diff --git a/web/app/components/workflow/panel/debug-and-preview/hooks/create-llm-trace-builder.ts b/web/app/components/workflow/panel/debug-and-preview/hooks/create-llm-trace-builder.ts new file mode 100644 index 0000000000..5e3e238326 --- /dev/null +++ b/web/app/components/workflow/panel/debug-and-preview/hooks/create-llm-trace-builder.ts @@ -0,0 +1,124 @@ +import type { IOnDataMoreInfo } from '@/service/base' +import type { NodeTracing } from '@/types/workflow' +import { + NodeRunningStatus, + WorkflowRunningStatus, +} from '../../../types' + +type ChunkMeta = Pick + +const TRACKED_CHUNK_TYPES = ['model_start', 'model_end', 'tool_call', 'tool_result', 'text', 'thought', 'thought_start', 'thought_end'] + +export function createLLMTraceBuilder() { + return function update( + tracing: NodeTracing[] | undefined, + chunkType: IOnDataMoreInfo['chunk_type'], + message: string, + meta: ChunkMeta, + ): number { + if (!tracing) + return -1 + + if (!chunkType || !TRACKED_CHUNK_TYPES.includes(chunkType)) + return -1 + + let targetNodeIndex = -1 + if (meta.node_id) { + targetNodeIndex = tracing.findIndex(item => item.node_id === meta.node_id) + } + if (targetNodeIndex < 0) { + for (let i = tracing.length - 1; i >= 0; i--) { + if (tracing[i].status === NodeRunningStatus.Running || tracing[i].status === WorkflowRunningStatus.Running) { + targetNodeIndex = i + break + } + } + } + + if (targetNodeIndex < 0) + return -1 + + const node = tracing[targetNodeIndex] + if (!node.execution_metadata) + node.execution_metadata = { llm_trace: [] } as unknown as NodeTracing['execution_metadata'] + if (!node.execution_metadata!.llm_trace) + node.execution_metadata!.llm_trace = [] + + const trace = node.execution_metadata!.llm_trace! + + if (chunkType === 'model_start') { + trace.push({ + type: 'model', + name: meta.model_name || '', + duration: 0, + output: { text: null, reasoning: null }, + provider: meta.model_provider, + icon: meta.model_icon, + icon_dark: meta.model_icon_dark, + }) + } + + if (chunkType === 'text') { + const last = trace[trace.length - 1] + if (last?.type === 'model') + last.output.text = (last.output.text ?? '') + message + } + + if (chunkType === 'thought_start' || chunkType === 'thought' || chunkType === 'thought_end') { + const last = trace[trace.length - 1] + if (last?.type === 'model') + last.output.reasoning = (last.output.reasoning ?? '') + message + } + + if (chunkType === 'model_end') { + for (let i = trace.length - 1; i >= 0; i--) { + if (trace[i].type === 'model') { + trace[i].duration = meta.model_duration || 0 + trace[i].usage = meta.model_usage || null + trace[i].status = 'success' + break + } + } + } + + if (chunkType === 'tool_call') { + const lastModel = trace.findLast(item => item.type === 'model') + if (lastModel) { + if (!lastModel.output.tool_calls) + lastModel.output.tool_calls = [] + lastModel.output.tool_calls.push({ + id: meta.tool_call_id || '', + name: meta.tool_name || '', + arguments: meta.tool_arguments || '', + }) + } + trace.push({ + type: 'tool', + name: meta.tool_name || '', + duration: 0, + output: { + id: meta.tool_call_id || null, + name: meta.tool_name || null, + arguments: meta.tool_arguments || null, + output: null, + }, + icon: meta.tool_icon, + icon_dark: meta.tool_icon_dark, + }) + } + + if (chunkType === 'tool_result') { + for (let i = trace.length - 1; i >= 0; i--) { + if (trace[i].type === 'tool') { + trace[i].output.output = message + trace[i].error = meta.tool_error + trace[i].duration = meta.tool_elapsed_time || 0 + trace[i].status = meta.tool_error ? 'error' : 'success' + break + } + } + } + + return targetNodeIndex + } +} diff --git a/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts b/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts index fd955d16f0..e3aeb840be 100644 --- a/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts +++ b/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts @@ -44,6 +44,7 @@ import { NodeRunningStatus, WorkflowRunningStatus, } from '../../../types' +import { createLLMTraceBuilder } from './create-llm-trace-builder' import { createWorkflowEventHandlers } from './use-workflow-event-handlers' type UseChatMessageSenderParams = { @@ -216,6 +217,7 @@ export function useChatMessageSender({ let hasSetResponseId = false let toolCallId = '' let thoughtId = '' + const llmTraceBuilder = createLLMTraceBuilder() const workflowHandlers = createWorkflowEventHandlers({ responseItem, @@ -236,6 +238,8 @@ export function useChatMessageSender({ messageId, taskId, chunk_type, + node_id, + tool_call_id, tool_icon, tool_icon_dark, tool_name, @@ -243,6 +247,12 @@ export function useChatMessageSender({ tool_files, tool_error, tool_elapsed_time, + model_provider, + model_name, + model_icon, + model_icon_dark, + model_usage, + model_duration, }: StreamChunkMeta) => { if (!isCurrentRun()) return @@ -341,6 +351,45 @@ export function useChatMessageSender({ if (messageId) responseItem.id = messageId + if (responseItem.workflowProcess?.tracing) { + const idx = llmTraceBuilder( + responseItem.workflowProcess.tracing, + chunk_type, + message, + { + node_id, + tool_call_id, + tool_name, + tool_arguments, + tool_icon, + tool_icon_dark, + tool_error, + tool_elapsed_time, + model_provider, + model_name, + model_icon, + model_icon_dark, + model_usage, + model_duration, + }, + ) + if (idx >= 0) { + const tracing = responseItem.workflowProcess.tracing + const item = tracing[idx] + tracing[idx] = { + ...item, + execution_metadata: { + ...item.execution_metadata!, + llm_trace: [...(item.execution_metadata?.llm_trace || [])], + }, + } + responseItem.workflowProcess = { + ...responseItem.workflowProcess, + tracing: [...tracing], + } + } + } + updateCurrentQAOnTree({ placeholderQuestionId, questionItem, @@ -548,6 +597,7 @@ export function useChatMessageSender({ const url = `/workflow/${workflowRunId}/events?include_state_snapshot=true` let toolCallId = '' let thoughtId = '' + const llmTraceBuilder = createLLMTraceBuilder() const otherOptions: IOtherOptions = { getAbortController: (abortController) => { @@ -558,6 +608,8 @@ export function useChatMessageSender({ messageId: msgId, taskId, chunk_type, + node_id, + tool_call_id, tool_icon, tool_icon_dark, tool_name, @@ -565,6 +617,12 @@ export function useChatMessageSender({ tool_files, tool_error, tool_elapsed_time, + model_provider, + model_name, + model_icon, + model_icon_dark, + model_usage, + model_duration, }: StreamChunkMeta) => { updateChatTreeNode(messageId, (responseItem) => { if (chunk_type === 'text' || !chunk_type) { @@ -645,6 +703,45 @@ export function useChatMessageSender({ } } + if (responseItem.workflowProcess?.tracing) { + const idx = llmTraceBuilder( + responseItem.workflowProcess.tracing, + chunk_type, + message, + { + node_id, + tool_call_id, + tool_name, + tool_arguments, + tool_icon, + tool_icon_dark, + tool_error, + tool_elapsed_time, + model_provider, + model_name, + model_icon, + model_icon_dark, + model_usage, + model_duration, + }, + ) + if (idx >= 0) { + const tracing = responseItem.workflowProcess.tracing + const item = tracing[idx] + tracing[idx] = { + ...item, + execution_metadata: { + ...item.execution_metadata!, + llm_trace: [...(item.execution_metadata?.llm_trace || [])], + }, + } + responseItem.workflowProcess = { + ...responseItem.workflowProcess, + tracing: [...tracing], + } + } + } + if (msgId) responseItem.id = msgId }) diff --git a/web/app/components/workflow/run/hooks.ts b/web/app/components/workflow/run/hooks.ts index 9aa6d253a1..9ed12a200f 100644 --- a/web/app/components/workflow/run/hooks.ts +++ b/web/app/components/workflow/run/hooks.ts @@ -85,9 +85,12 @@ export const useLogs = () => { setFalse: setShowLLMDetailFalse, }] = useBoolean(false) const [llmResultList, setLLMResultList] = useState([]) - const handleShowLLMDetail = useCallback((detail: LLMTraceItem[]) => { + const [llmDetailNodeId, setLLMDetailNodeId] = useState('') + const handleShowLLMDetail = useCallback((detail: LLMTraceItem[], nodeId?: string) => { setShowLLMDetailTrue() setLLMResultList(detail) + if (nodeId) + setLLMDetailNodeId(nodeId) }, [setShowLLMDetailTrue, setLLMResultList]) return { @@ -128,6 +131,7 @@ export const useLogs = () => { setShowLLMDetailFalse, llmResultList, setLLMResultList, + llmDetailNodeId, handleShowLLMDetail, } } diff --git a/web/app/components/workflow/run/llm-log/llm-log-trigger.tsx b/web/app/components/workflow/run/llm-log/llm-log-trigger.tsx index 1d65f754b4..94d6d66e24 100644 --- a/web/app/components/workflow/run/llm-log/llm-log-trigger.tsx +++ b/web/app/components/workflow/run/llm-log/llm-log-trigger.tsx @@ -8,7 +8,7 @@ import { Thinking } from '@/app/components/base/icons/src/vender/workflow' type LLMLogTriggerProps = { nodeInfo: NodeTracing - onShowLLMDetail: (detail: LLMTraceItem[]) => void + onShowLLMDetail: (detail: LLMTraceItem[], nodeId?: string) => void } const LLMLogTrigger = ({ nodeInfo, @@ -20,7 +20,7 @@ const LLMLogTrigger = ({ const handleShowLLMDetail = (e: React.MouseEvent) => { e.stopPropagation() e.nativeEvent.stopImmediatePropagation() - onShowLLMDetail(llmTrace || []) + onShowLLMDetail(llmTrace || [], nodeInfo.node_id) } return ( diff --git a/web/app/components/workflow/run/node.tsx b/web/app/components/workflow/run/node.tsx index 9243554bd4..d5c8d2ec2b 100644 --- a/web/app/components/workflow/run/node.tsx +++ b/web/app/components/workflow/run/node.tsx @@ -46,7 +46,7 @@ type Props = { onShowLoopDetail?: (detail: NodeTracing[][], loopDurationMap: LoopDurationMap, loopVariableMap: LoopVariableMap) => void onShowRetryDetail?: (detail: NodeTracing[]) => void onShowAgentOrToolLog?: (detail?: AgentLogItemWithChildren) => void - onShowLLMDetail?: (detail: LLMTraceItem[]) => void + onShowLLMDetail?: (detail: LLMTraceItem[], nodeId?: string) => void notShowIterationNav?: boolean notShowLoopNav?: boolean } diff --git a/web/app/components/workflow/run/tracing-panel.tsx b/web/app/components/workflow/run/tracing-panel.tsx index 6b9993f8a8..e4926aebbc 100644 --- a/web/app/components/workflow/run/tracing-panel.tsx +++ b/web/app/components/workflow/run/tracing-panel.tsx @@ -8,6 +8,7 @@ import { import * as React from 'react' import { useCallback, + useMemo, useState, } from 'react' import { useTranslation } from 'react-i18next' @@ -95,9 +96,17 @@ const TracingPanel: FC = ({ showLLMDetail, setShowLLMDetailFalse, llmResultList, + llmDetailNodeId, handleShowLLMDetail, } = useLogs() + const liveLLMResultList = useMemo(() => { + if (!showLLMDetail || !llmDetailNodeId) + return llmResultList + const node = list.find(n => n.node_id === llmDetailNodeId) + return node?.execution_metadata?.llm_trace || llmResultList + }, [showLLMDetail, llmDetailNodeId, list, llmResultList]) + const renderNode = (node: NodeTracing) => { const isParallelFirstNode = !!node.parallelDetail?.isParallelStartNode if (isParallelFirstNode) { @@ -191,7 +200,7 @@ const TracingPanel: FC = ({ showLLMDetail={showLLMDetail} setShowLLMDetailFalse={setShowLLMDetailFalse} - llmResultList={llmResultList} + llmResultList={liveLLMResultList} /> ) } diff --git a/web/service/base.ts b/web/service/base.ts index 107920f758..c26ec4533a 100644 --- a/web/service/base.ts +++ b/web/service/base.ts @@ -51,7 +51,8 @@ export type IOnDataMoreInfo = { messageId: string errorMessage?: string errorCode?: string - chunk_type?: 'text' | 'tool_call' | 'tool_result' | 'thought' | 'thought_start' | 'thought_end' + chunk_type?: 'text' | 'tool_call' | 'tool_result' | 'thought' | 'thought_start' | 'thought_end' | 'model_start' | 'model_end' + node_id?: string tool_call_id?: string tool_name?: string tool_arguments?: string @@ -61,6 +62,13 @@ export type IOnDataMoreInfo = { tool_files?: string[] tool_error?: string tool_elapsed_time?: number + + model_provider?: string + model_name?: string + model_icon?: string | IconObject + model_icon_dark?: string | IconObject + model_usage?: Record | null + model_duration?: number } export type IOnData = (message: string, isFirstMessage: boolean, moreInfo: IOnDataMoreInfo) => void @@ -312,6 +320,7 @@ export const handleStream = ( taskId: bufferObj.task_id, messageId: bufferObj.id, chunk_type: bufferObj.chunk_type, + node_id: bufferObj.node_id, tool_call_id: bufferObj.tool_call_id, tool_name: bufferObj.tool_name, tool_arguments: bufferObj.tool_arguments, @@ -320,6 +329,12 @@ export const handleStream = ( tool_files: bufferObj.tool_files, tool_error: bufferObj.tool_error, tool_elapsed_time: bufferObj.tool_elapsed_time, + model_provider: bufferObj.model_provider, + model_name: bufferObj.model_name, + model_icon: bufferObj.model_icon, + model_icon_dark: bufferObj.model_icon_dark, + model_usage: bufferObj.model_usage, + model_duration: bufferObj.model_duration, }) isFirstMessage = false } diff --git a/web/types/workflow.ts b/web/types/workflow.ts index 3419659b44..a702216dcf 100644 --- a/web/types/workflow.ts +++ b/web/types/workflow.ts @@ -105,6 +105,7 @@ export type LLMTraceItem = { icon_dark?: string | IconObject error?: string status?: 'success' | 'error' + usage?: Record | null } export type NodeTracing = {