fix: llm logs

This commit is contained in:
zxhlyh 2026-03-06 18:04:18 +08:00
parent e0794020f7
commit 52dd4b82e6
8 changed files with 256 additions and 6 deletions

View File

@ -0,0 +1,124 @@
import type { IOnDataMoreInfo } from '@/service/base'
import type { NodeTracing } from '@/types/workflow'
import {
NodeRunningStatus,
WorkflowRunningStatus,
} from '../../../types'
type ChunkMeta = Pick<IOnDataMoreInfo, 'node_id' | 'tool_call_id' | 'tool_name' | 'tool_arguments' | 'tool_icon' | 'tool_icon_dark' | 'tool_error' | 'tool_elapsed_time' | 'model_provider' | 'model_name' | 'model_icon' | 'model_icon_dark' | 'model_usage' | 'model_duration'>
const TRACKED_CHUNK_TYPES = ['model_start', 'model_end', 'tool_call', 'tool_result', 'text', 'thought', 'thought_start', 'thought_end']
export function createLLMTraceBuilder() {
return function update(
tracing: NodeTracing[] | undefined,
chunkType: IOnDataMoreInfo['chunk_type'],
message: string,
meta: ChunkMeta,
): number {
if (!tracing)
return -1
if (!chunkType || !TRACKED_CHUNK_TYPES.includes(chunkType))
return -1
let targetNodeIndex = -1
if (meta.node_id) {
targetNodeIndex = tracing.findIndex(item => item.node_id === meta.node_id)
}
if (targetNodeIndex < 0) {
for (let i = tracing.length - 1; i >= 0; i--) {
if (tracing[i].status === NodeRunningStatus.Running || tracing[i].status === WorkflowRunningStatus.Running) {
targetNodeIndex = i
break
}
}
}
if (targetNodeIndex < 0)
return -1
const node = tracing[targetNodeIndex]
if (!node.execution_metadata)
node.execution_metadata = { llm_trace: [] } as unknown as NodeTracing['execution_metadata']
if (!node.execution_metadata!.llm_trace)
node.execution_metadata!.llm_trace = []
const trace = node.execution_metadata!.llm_trace!
if (chunkType === 'model_start') {
trace.push({
type: 'model',
name: meta.model_name || '',
duration: 0,
output: { text: null, reasoning: null },
provider: meta.model_provider,
icon: meta.model_icon,
icon_dark: meta.model_icon_dark,
})
}
if (chunkType === 'text') {
const last = trace[trace.length - 1]
if (last?.type === 'model')
last.output.text = (last.output.text ?? '') + message
}
if (chunkType === 'thought_start' || chunkType === 'thought' || chunkType === 'thought_end') {
const last = trace[trace.length - 1]
if (last?.type === 'model')
last.output.reasoning = (last.output.reasoning ?? '') + message
}
if (chunkType === 'model_end') {
for (let i = trace.length - 1; i >= 0; i--) {
if (trace[i].type === 'model') {
trace[i].duration = meta.model_duration || 0
trace[i].usage = meta.model_usage || null
trace[i].status = 'success'
break
}
}
}
if (chunkType === 'tool_call') {
const lastModel = trace.findLast(item => item.type === 'model')
if (lastModel) {
if (!lastModel.output.tool_calls)
lastModel.output.tool_calls = []
lastModel.output.tool_calls.push({
id: meta.tool_call_id || '',
name: meta.tool_name || '',
arguments: meta.tool_arguments || '',
})
}
trace.push({
type: 'tool',
name: meta.tool_name || '',
duration: 0,
output: {
id: meta.tool_call_id || null,
name: meta.tool_name || null,
arguments: meta.tool_arguments || null,
output: null,
},
icon: meta.tool_icon,
icon_dark: meta.tool_icon_dark,
})
}
if (chunkType === 'tool_result') {
for (let i = trace.length - 1; i >= 0; i--) {
if (trace[i].type === 'tool') {
trace[i].output.output = message
trace[i].error = meta.tool_error
trace[i].duration = meta.tool_elapsed_time || 0
trace[i].status = meta.tool_error ? 'error' : 'success'
break
}
}
}
return targetNodeIndex
}
}

View File

@ -44,6 +44,7 @@ import {
NodeRunningStatus,
WorkflowRunningStatus,
} from '../../../types'
import { createLLMTraceBuilder } from './create-llm-trace-builder'
import { createWorkflowEventHandlers } from './use-workflow-event-handlers'
type UseChatMessageSenderParams = {
@ -216,6 +217,7 @@ export function useChatMessageSender({
let hasSetResponseId = false
let toolCallId = ''
let thoughtId = ''
const llmTraceBuilder = createLLMTraceBuilder()
const workflowHandlers = createWorkflowEventHandlers({
responseItem,
@ -236,6 +238,8 @@ export function useChatMessageSender({
messageId,
taskId,
chunk_type,
node_id,
tool_call_id,
tool_icon,
tool_icon_dark,
tool_name,
@ -243,6 +247,12 @@ export function useChatMessageSender({
tool_files,
tool_error,
tool_elapsed_time,
model_provider,
model_name,
model_icon,
model_icon_dark,
model_usage,
model_duration,
}: StreamChunkMeta) => {
if (!isCurrentRun())
return
@ -341,6 +351,45 @@ export function useChatMessageSender({
if (messageId)
responseItem.id = messageId
if (responseItem.workflowProcess?.tracing) {
const idx = llmTraceBuilder(
responseItem.workflowProcess.tracing,
chunk_type,
message,
{
node_id,
tool_call_id,
tool_name,
tool_arguments,
tool_icon,
tool_icon_dark,
tool_error,
tool_elapsed_time,
model_provider,
model_name,
model_icon,
model_icon_dark,
model_usage,
model_duration,
},
)
if (idx >= 0) {
const tracing = responseItem.workflowProcess.tracing
const item = tracing[idx]
tracing[idx] = {
...item,
execution_metadata: {
...item.execution_metadata!,
llm_trace: [...(item.execution_metadata?.llm_trace || [])],
},
}
responseItem.workflowProcess = {
...responseItem.workflowProcess,
tracing: [...tracing],
}
}
}
updateCurrentQAOnTree({
placeholderQuestionId,
questionItem,
@ -548,6 +597,7 @@ export function useChatMessageSender({
const url = `/workflow/${workflowRunId}/events?include_state_snapshot=true`
let toolCallId = ''
let thoughtId = ''
const llmTraceBuilder = createLLMTraceBuilder()
const otherOptions: IOtherOptions = {
getAbortController: (abortController) => {
@ -558,6 +608,8 @@ export function useChatMessageSender({
messageId: msgId,
taskId,
chunk_type,
node_id,
tool_call_id,
tool_icon,
tool_icon_dark,
tool_name,
@ -565,6 +617,12 @@ export function useChatMessageSender({
tool_files,
tool_error,
tool_elapsed_time,
model_provider,
model_name,
model_icon,
model_icon_dark,
model_usage,
model_duration,
}: StreamChunkMeta) => {
updateChatTreeNode(messageId, (responseItem) => {
if (chunk_type === 'text' || !chunk_type) {
@ -645,6 +703,45 @@ export function useChatMessageSender({
}
}
if (responseItem.workflowProcess?.tracing) {
const idx = llmTraceBuilder(
responseItem.workflowProcess.tracing,
chunk_type,
message,
{
node_id,
tool_call_id,
tool_name,
tool_arguments,
tool_icon,
tool_icon_dark,
tool_error,
tool_elapsed_time,
model_provider,
model_name,
model_icon,
model_icon_dark,
model_usage,
model_duration,
},
)
if (idx >= 0) {
const tracing = responseItem.workflowProcess.tracing
const item = tracing[idx]
tracing[idx] = {
...item,
execution_metadata: {
...item.execution_metadata!,
llm_trace: [...(item.execution_metadata?.llm_trace || [])],
},
}
responseItem.workflowProcess = {
...responseItem.workflowProcess,
tracing: [...tracing],
}
}
}
if (msgId)
responseItem.id = msgId
})

View File

@ -85,9 +85,12 @@ export const useLogs = () => {
setFalse: setShowLLMDetailFalse,
}] = useBoolean(false)
const [llmResultList, setLLMResultList] = useState<LLMTraceItem[]>([])
const handleShowLLMDetail = useCallback((detail: LLMTraceItem[]) => {
const [llmDetailNodeId, setLLMDetailNodeId] = useState('')
const handleShowLLMDetail = useCallback((detail: LLMTraceItem[], nodeId?: string) => {
setShowLLMDetailTrue()
setLLMResultList(detail)
if (nodeId)
setLLMDetailNodeId(nodeId)
}, [setShowLLMDetailTrue, setLLMResultList])
return {
@ -128,6 +131,7 @@ export const useLogs = () => {
setShowLLMDetailFalse,
llmResultList,
setLLMResultList,
llmDetailNodeId,
handleShowLLMDetail,
}
}

View File

@ -8,7 +8,7 @@ import { Thinking } from '@/app/components/base/icons/src/vender/workflow'
type LLMLogTriggerProps = {
nodeInfo: NodeTracing
onShowLLMDetail: (detail: LLMTraceItem[]) => void
onShowLLMDetail: (detail: LLMTraceItem[], nodeId?: string) => void
}
const LLMLogTrigger = ({
nodeInfo,
@ -20,7 +20,7 @@ const LLMLogTrigger = ({
const handleShowLLMDetail = (e: React.MouseEvent<HTMLButtonElement>) => {
e.stopPropagation()
e.nativeEvent.stopImmediatePropagation()
onShowLLMDetail(llmTrace || [])
onShowLLMDetail(llmTrace || [], nodeInfo.node_id)
}
return (

View File

@ -46,7 +46,7 @@ type Props = {
onShowLoopDetail?: (detail: NodeTracing[][], loopDurationMap: LoopDurationMap, loopVariableMap: LoopVariableMap) => void
onShowRetryDetail?: (detail: NodeTracing[]) => void
onShowAgentOrToolLog?: (detail?: AgentLogItemWithChildren) => void
onShowLLMDetail?: (detail: LLMTraceItem[]) => void
onShowLLMDetail?: (detail: LLMTraceItem[], nodeId?: string) => void
notShowIterationNav?: boolean
notShowLoopNav?: boolean
}

View File

@ -8,6 +8,7 @@ import {
import * as React from 'react'
import {
useCallback,
useMemo,
useState,
} from 'react'
import { useTranslation } from 'react-i18next'
@ -95,9 +96,17 @@ const TracingPanel: FC<TracingPanelProps> = ({
showLLMDetail,
setShowLLMDetailFalse,
llmResultList,
llmDetailNodeId,
handleShowLLMDetail,
} = useLogs()
const liveLLMResultList = useMemo(() => {
if (!showLLMDetail || !llmDetailNodeId)
return llmResultList
const node = list.find(n => n.node_id === llmDetailNodeId)
return node?.execution_metadata?.llm_trace || llmResultList
}, [showLLMDetail, llmDetailNodeId, list, llmResultList])
const renderNode = (node: NodeTracing) => {
const isParallelFirstNode = !!node.parallelDetail?.isParallelStartNode
if (isParallelFirstNode) {
@ -191,7 +200,7 @@ const TracingPanel: FC<TracingPanelProps> = ({
showLLMDetail={showLLMDetail}
setShowLLMDetailFalse={setShowLLMDetailFalse}
llmResultList={llmResultList}
llmResultList={liveLLMResultList}
/>
)
}

View File

@ -51,7 +51,8 @@ export type IOnDataMoreInfo = {
messageId: string
errorMessage?: string
errorCode?: string
chunk_type?: 'text' | 'tool_call' | 'tool_result' | 'thought' | 'thought_start' | 'thought_end'
chunk_type?: 'text' | 'tool_call' | 'tool_result' | 'thought' | 'thought_start' | 'thought_end' | 'model_start' | 'model_end'
node_id?: string
tool_call_id?: string
tool_name?: string
tool_arguments?: string
@ -61,6 +62,13 @@ export type IOnDataMoreInfo = {
tool_files?: string[]
tool_error?: string
tool_elapsed_time?: number
model_provider?: string
model_name?: string
model_icon?: string | IconObject
model_icon_dark?: string | IconObject
model_usage?: Record<string, number | string> | null
model_duration?: number
}
export type IOnData = (message: string, isFirstMessage: boolean, moreInfo: IOnDataMoreInfo) => void
@ -312,6 +320,7 @@ export const handleStream = (
taskId: bufferObj.task_id,
messageId: bufferObj.id,
chunk_type: bufferObj.chunk_type,
node_id: bufferObj.node_id,
tool_call_id: bufferObj.tool_call_id,
tool_name: bufferObj.tool_name,
tool_arguments: bufferObj.tool_arguments,
@ -320,6 +329,12 @@ export const handleStream = (
tool_files: bufferObj.tool_files,
tool_error: bufferObj.tool_error,
tool_elapsed_time: bufferObj.tool_elapsed_time,
model_provider: bufferObj.model_provider,
model_name: bufferObj.model_name,
model_icon: bufferObj.model_icon,
model_icon_dark: bufferObj.model_icon_dark,
model_usage: bufferObj.model_usage,
model_duration: bufferObj.model_duration,
})
isFirstMessage = false
}

View File

@ -105,6 +105,7 @@ export type LLMTraceItem = {
icon_dark?: string | IconObject
error?: string
status?: 'success' | 'error'
usage?: Record<string, number | string> | null
}
export type NodeTracing = {