diff --git a/web/app/components/base/chat/chat-with-history/hooks.tsx b/web/app/components/base/chat/chat-with-history/hooks.tsx
index 0dae241043..8ab0bbfe77 100644
--- a/web/app/components/base/chat/chat-with-history/hooks.tsx
+++ b/web/app/components/base/chat/chat-with-history/hooks.tsx
@@ -43,7 +43,7 @@ import {
import { TransferMethod } from '@/types/app'
import { addFileInfos, sortAgentSorts } from '../../../tools/utils'
import { CONVERSATION_ID_INFO } from '../constants'
-import { buildChatItemTree, buildToolCallsFromHistorySequence, getProcessedSystemVariablesFromUrlParams, getRawInputsFromUrlParams, getRawUserVariablesFromUrlParams } from '../utils'
+import { buildChatItemTree, buildLLMGenerationItemsFromHistorySequence, getProcessedSystemVariablesFromUrlParams, getRawInputsFromUrlParams, getRawUserVariablesFromUrlParams } from '../utils'
function getFormattedChatList(messages: any[]) {
const newChatList: ChatItem[] = []
@@ -59,8 +59,8 @@ function getFormattedChatList(messages: any[]) {
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
newChatList.push({
id: item.id,
- content: buildToolCallsFromHistorySequence(item).message,
- toolCalls: buildToolCallsFromHistorySequence(item).toolCalls,
+ content: buildLLMGenerationItemsFromHistorySequence(item).message,
+ llmGenerationItems: buildLLMGenerationItemsFromHistorySequence(item).llmGenerationItems,
agent_thoughts: addFileInfos(item.agent_thoughts ? sortAgentSorts(item.agent_thoughts) : item.agent_thoughts, item.message_files),
feedback: item.feedback,
isAnswer: true,
diff --git a/web/app/components/base/chat/chat/answer/generation-content.tsx b/web/app/components/base/chat/chat/answer/generation-content.tsx
new file mode 100644
index 0000000000..4748d48360
--- /dev/null
+++ b/web/app/components/base/chat/chat/answer/generation-content.tsx
@@ -0,0 +1,23 @@
+import type { LLMGenerationItem } from '@/types/workflow'
+import ToolCallItemComponent from '@/app/components/workflow/run/llm-log/tool-call-item'
+
+type GenerationContentProps = {
+ llmGenerationItems: LLMGenerationItem[]
+}
+const GenerationContent = ({
+ llmGenerationItems,
+}: GenerationContentProps) => {
+ return (
+
+ {llmGenerationItems.map((llmGenerationItem: LLMGenerationItem, index: number) => (
+
+ ))}
+
+ )
+}
+
+export default GenerationContent
diff --git a/web/app/components/base/chat/chat/answer/index.tsx b/web/app/components/base/chat/chat/answer/index.tsx
index 9663512e53..d3c95eb213 100644
--- a/web/app/components/base/chat/chat/answer/index.tsx
+++ b/web/app/components/base/chat/chat/answer/index.tsx
@@ -18,10 +18,10 @@ import { cn } from '@/utils/classnames'
import ContentSwitch from '../content-switch'
import AgentContent from './agent-content'
import BasicContent from './basic-content'
+import GenerationContent from './generation-content'
import More from './more'
import Operation from './operation'
import SuggestedQuestions from './suggested-questions'
-import ToolCalls from './tool-calls'
import WorkflowProcessItem from './workflow-process'
type AnswerProps = {
@@ -62,7 +62,7 @@ const Answer: FC = ({
workflowProcess,
allFiles,
message_files,
- toolCalls,
+ llmGenerationItems,
} = item
const hasAgentThoughts = !!agent_thoughts?.length
@@ -114,6 +114,9 @@ const Answer: FC = ({
}, [switchSibling, item.prevSibling, item.nextSibling])
const contentIsEmpty = typeof content === 'string' && content.trim() === ''
+ const generationContentRenderIsUsed = llmGenerationItems?.length && llmGenerationItems.some((item) => {
+ return item.type === 'tool' || item.type === 'thought'
+ })
return (
@@ -157,8 +160,8 @@ const Answer: FC
= ({
)
}
{
- !!toolCalls?.length && (
-
+ generationContentRenderIsUsed && (
+
)
}
{
@@ -169,7 +172,7 @@ const Answer: FC = ({
)
}
{
- !contentIsEmpty && !hasAgentThoughts && (
+ !contentIsEmpty && !hasAgentThoughts && !generationContentRenderIsUsed && (
)
}
diff --git a/web/app/components/base/chat/chat/answer/tool-calls/index.tsx b/web/app/components/base/chat/chat/answer/tool-calls/index.tsx
deleted file mode 100644
index 66118006fe..0000000000
--- a/web/app/components/base/chat/chat/answer/tool-calls/index.tsx
+++ /dev/null
@@ -1,23 +0,0 @@
-import type { ToolCallItem } from '@/types/workflow'
-import ToolCallItemComponent from '@/app/components/workflow/run/llm-log/tool-call-item'
-
-type ToolCallsProps = {
- toolCalls: ToolCallItem[]
-}
-const ToolCalls = ({
- toolCalls,
-}: ToolCallsProps) => {
- return (
-
- {toolCalls.map((toolCall: ToolCallItem, index: number) => (
-
- ))}
-
- )
-}
-
-export default ToolCalls
diff --git a/web/app/components/base/chat/chat/hooks.ts b/web/app/components/base/chat/chat/hooks.ts
index 56f2b44de5..fe0772b41f 100644
--- a/web/app/components/base/chat/chat/hooks.ts
+++ b/web/app/components/base/chat/chat/hooks.ts
@@ -343,8 +343,87 @@ export const useChat = (
tool_elapsed_time,
}: any) => {
if (!isAgentMode) {
- if (chunk_type === 'text')
+ if (chunk_type === 'text') {
responseItem.content = responseItem.content + message
+
+ if (!responseItem.llmGenerationItems)
+ responseItem.llmGenerationItems = []
+
+ const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
+
+ if (isNotCompletedTextItemIndex > -1) {
+ responseItem.llmGenerationItems![isNotCompletedTextItemIndex].text += message
+ }
+ else {
+ toolCallId = uuidV4()
+ responseItem.llmGenerationItems?.push({
+ id: toolCallId,
+ type: 'text',
+ text: message,
+ })
+ }
+ }
+
+ if (chunk_type === 'tool_call') {
+ if (!responseItem.llmGenerationItems)
+ responseItem.llmGenerationItems = []
+
+ const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
+ if (isNotCompletedTextItemIndex > -1) {
+ responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
+ }
+ toolCallId = uuidV4()
+ responseItem.llmGenerationItems?.push({
+ id: toolCallId,
+ type: 'tool',
+ toolName: tool_name,
+ toolArguments: tool_arguments,
+ toolIcon: tool_icon,
+ toolIconDark: tool_icon_dark,
+ })
+ }
+
+ if (chunk_type === 'tool_result') {
+ const currentToolCallIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === toolCallId) ?? -1
+
+ if (currentToolCallIndex > -1) {
+ responseItem.llmGenerationItems![currentToolCallIndex].toolError = tool_error
+ responseItem.llmGenerationItems![currentToolCallIndex].toolDuration = tool_elapsed_time
+ responseItem.llmGenerationItems![currentToolCallIndex].toolFiles = tool_files
+ responseItem.llmGenerationItems![currentToolCallIndex].toolOutput = message
+ }
+ }
+
+ if (chunk_type === 'thought_start') {
+ if (!responseItem.llmGenerationItems)
+ responseItem.llmGenerationItems = []
+
+ const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
+ if (isNotCompletedTextItemIndex > -1) {
+ responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
+ }
+ thoughtId = uuidV4()
+ responseItem.llmGenerationItems?.push({
+ id: thoughtId,
+ type: 'thought',
+ thoughtOutput: '',
+ })
+ }
+
+ if (chunk_type === 'thought') {
+ const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
+ if (currentThoughtIndex > -1) {
+ responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
+ }
+ }
+
+ if (chunk_type === 'thought_end') {
+ const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
+ if (currentThoughtIndex > -1) {
+ responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
+ responseItem.llmGenerationItems![currentThoughtIndex].thoughtCompleted = true
+ }
+ }
}
else {
const lastThought = responseItem.agent_thoughts?.[responseItem.agent_thoughts?.length - 1]
@@ -352,57 +431,6 @@ export const useChat = (
lastThought.thought = lastThought.thought + message // need immer setAutoFreeze
}
- if (chunk_type === 'tool_call') {
- if (!responseItem.toolCalls)
- responseItem.toolCalls = []
- toolCallId = uuidV4()
- responseItem.toolCalls?.push({
- id: toolCallId,
- type: 'tool',
- toolName: tool_name,
- toolArguments: tool_arguments,
- toolIcon: tool_icon,
- toolIconDark: tool_icon_dark,
- })
- }
-
- if (chunk_type === 'tool_result') {
- const currentToolCallIndex = responseItem.toolCalls?.findIndex(item => item.id === toolCallId) ?? -1
-
- if (currentToolCallIndex > -1) {
- responseItem.toolCalls![currentToolCallIndex].toolError = tool_error
- responseItem.toolCalls![currentToolCallIndex].toolDuration = tool_elapsed_time
- responseItem.toolCalls![currentToolCallIndex].toolFiles = tool_files
- responseItem.toolCalls![currentToolCallIndex].toolOutput = message
- }
- }
-
- if (chunk_type === 'thought_start') {
- if (!responseItem.toolCalls)
- responseItem.toolCalls = []
- thoughtId = uuidV4()
- responseItem.toolCalls.push({
- id: thoughtId,
- type: 'thought',
- thoughtOutput: '',
- })
- }
-
- if (chunk_type === 'thought') {
- const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
- if (currentThoughtIndex > -1) {
- responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
- }
- }
-
- if (chunk_type === 'thought_end') {
- const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
- if (currentThoughtIndex > -1) {
- responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
- responseItem.toolCalls![currentThoughtIndex].thoughtCompleted = true
- }
- }
-
if (messageId && !hasSetResponseId) {
questionItem.id = `question-${messageId}`
responseItem.id = messageId
diff --git a/web/app/components/base/chat/chat/type.ts b/web/app/components/base/chat/chat/type.ts
index 2e07e287a0..3864adfa75 100644
--- a/web/app/components/base/chat/chat/type.ts
+++ b/web/app/components/base/chat/chat/type.ts
@@ -2,7 +2,7 @@ import type { FileEntity } from '@/app/components/base/file-uploader/types'
import type { TypeWithI18N } from '@/app/components/header/account-setting/model-provider-page/declarations'
import type { InputVarType } from '@/app/components/workflow/types'
import type { Annotation, MessageRating } from '@/models/log'
-import type { FileResponse, IconObject, ToolCallItem } from '@/types/workflow'
+import type { FileResponse, IconObject, LLMGenerationItem } from '@/types/workflow'
export type MessageMore = {
time: string
@@ -104,7 +104,7 @@ export type IChatItem = {
siblingIndex?: number
prevSibling?: string
nextSibling?: string
- toolCalls?: ToolCallItem[]
+ llmGenerationItems?: LLMGenerationItem[]
}
export type Metadata = {
diff --git a/web/app/components/base/chat/utils.ts b/web/app/components/base/chat/utils.ts
index 958190e7b2..b66253c2f4 100644
--- a/web/app/components/base/chat/utils.ts
+++ b/web/app/components/base/chat/utils.ts
@@ -1,6 +1,6 @@
import type { ChatMessageRes, IChatItem } from './chat/type'
import type { ChatItem, ChatItemInTree } from './types'
-import type { ToolCallItem } from '@/types/workflow'
+import type { LLMGenerationItem } from '@/types/workflow'
import { v4 as uuidV4 } from 'uuid'
import { UUID_NIL } from './constants'
@@ -234,18 +234,18 @@ function getThreadMessages(tree: ChatItemInTree[], targetMessageId?: string): Ch
return ret
}
-const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
- toolCalls: ToolCallItem[]
+const buildLLMGenerationItemsFromHistorySequence = (message: ChatMessageRes): {
+ llmGenerationItems: LLMGenerationItem[]
message: string
} => {
const { answer, generation_detail } = message
if (!generation_detail) {
- return { toolCalls: [], message: answer || '' }
+ return { llmGenerationItems: [], message: answer || '' }
}
const { reasoning_content = [], tool_calls = [], sequence = [] } = generation_detail
- const toolCalls: ToolCallItem[] = []
+ const llmGenerationItems: LLMGenerationItem[] = []
let answerMessage = ''
sequence.forEach((segment) => {
@@ -260,7 +260,7 @@ const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
case 'reasoning': {
const reasoning = reasoning_content[segment.index]
if (reasoning) {
- toolCalls.push({
+ llmGenerationItems.push({
id: uuidV4(),
type: 'thought',
thoughtOutput: reasoning,
@@ -272,7 +272,7 @@ const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
case 'tool_call': {
const toolCall = tool_calls[segment.index]
if (toolCall) {
- toolCalls.push({
+ llmGenerationItems.push({
id: uuidV4(),
type: 'tool',
toolName: toolCall.name,
@@ -288,12 +288,12 @@ const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
}
})
- return { toolCalls, message: answerMessage || '' }
+ return { llmGenerationItems, message: answerMessage || '' }
}
export {
buildChatItemTree,
- buildToolCallsFromHistorySequence,
+ buildLLMGenerationItemsFromHistorySequence,
getLastAnswer,
getProcessedInputsFromUrlParams,
getProcessedSystemVariablesFromUrlParams,
diff --git a/web/app/components/workflow/panel/chat-record/index.tsx b/web/app/components/workflow/panel/chat-record/index.tsx
index f02350c314..0420266e67 100644
--- a/web/app/components/workflow/panel/chat-record/index.tsx
+++ b/web/app/components/workflow/panel/chat-record/index.tsx
@@ -12,7 +12,7 @@ import {
} from 'react'
import { useStore as useAppStore } from '@/app/components/app/store'
import Chat from '@/app/components/base/chat/chat'
-import { buildChatItemTree, buildToolCallsFromHistorySequence, getThreadMessages } from '@/app/components/base/chat/utils'
+import { buildChatItemTree, buildLLMGenerationItemsFromHistorySequence, getThreadMessages } from '@/app/components/base/chat/utils'
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
import Loading from '@/app/components/base/loading'
import { fetchConversationMessages } from '@/service/debug'
@@ -38,8 +38,8 @@ function getFormattedChatList(messages: ChatMessageRes[]) {
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
res.push({
id: item.id,
- content: buildToolCallsFromHistorySequence(item).message,
- toolCalls: buildToolCallsFromHistorySequence(item).toolCalls,
+ content: buildLLMGenerationItemsFromHistorySequence(item).message,
+ llmGenerationItems: buildLLMGenerationItemsFromHistorySequence(item).llmGenerationItems,
feedback: item.feedback,
isAnswer: true,
citation: item.metadata?.retriever_resources,
diff --git a/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts b/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts
index 163c0797af..c313e31058 100644
--- a/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts
+++ b/web/app/components/workflow/panel/debug-and-preview/hooks/use-chat-message-sender.ts
@@ -158,14 +158,37 @@ export function useChatMessageSender({
}) => {
if (!isCurrentRun())
return
- if (chunk_type === 'text')
+ if (chunk_type === 'text') {
responseItem.content = responseItem.content + message
+ if (!responseItem.llmGenerationItems)
+ responseItem.llmGenerationItems = []
+
+ const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
+
+ if (isNotCompletedTextItemIndex > -1) {
+ responseItem.llmGenerationItems![isNotCompletedTextItemIndex].text += message
+ }
+ else {
+ toolCallId = uuidV4()
+ responseItem.llmGenerationItems?.push({
+ id: toolCallId,
+ type: 'text',
+ text: message,
+ })
+ }
+ }
+
if (chunk_type === 'tool_call') {
- if (!responseItem.toolCalls)
- responseItem.toolCalls = []
+ if (!responseItem.llmGenerationItems)
+ responseItem.llmGenerationItems = []
+
+ const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
+ if (isNotCompletedTextItemIndex > -1) {
+ responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
+ }
toolCallId = uuidV4()
- responseItem.toolCalls?.push({
+ responseItem.llmGenerationItems?.push({
id: toolCallId,
type: 'tool',
toolName: tool_name,
@@ -176,21 +199,26 @@ export function useChatMessageSender({
}
if (chunk_type === 'tool_result') {
- const currentToolCallIndex = responseItem.toolCalls?.findIndex(item => item.id === toolCallId) ?? -1
+ const currentToolCallIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === toolCallId) ?? -1
if (currentToolCallIndex > -1) {
- responseItem.toolCalls![currentToolCallIndex].toolError = tool_error
- responseItem.toolCalls![currentToolCallIndex].toolDuration = tool_elapsed_time
- responseItem.toolCalls![currentToolCallIndex].toolFiles = tool_files
- responseItem.toolCalls![currentToolCallIndex].toolOutput = message
+ responseItem.llmGenerationItems![currentToolCallIndex].toolError = tool_error
+ responseItem.llmGenerationItems![currentToolCallIndex].toolDuration = tool_elapsed_time
+ responseItem.llmGenerationItems![currentToolCallIndex].toolFiles = tool_files
+ responseItem.llmGenerationItems![currentToolCallIndex].toolOutput = message
}
}
if (chunk_type === 'thought_start') {
- if (!responseItem.toolCalls)
- responseItem.toolCalls = []
+ if (!responseItem.llmGenerationItems)
+ responseItem.llmGenerationItems = []
+
+ const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
+ if (isNotCompletedTextItemIndex > -1) {
+ responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
+ }
thoughtId = uuidV4()
- responseItem.toolCalls.push({
+ responseItem.llmGenerationItems?.push({
id: thoughtId,
type: 'thought',
thoughtOutput: '',
@@ -198,17 +226,17 @@ export function useChatMessageSender({
}
if (chunk_type === 'thought') {
- const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
+ const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
- responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
+ responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
}
}
if (chunk_type === 'thought_end') {
- const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
+ const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
- responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
- responseItem.toolCalls![currentThoughtIndex].thoughtCompleted = true
+ responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
+ responseItem.llmGenerationItems![currentThoughtIndex].thoughtCompleted = true
}
}
@@ -245,6 +273,10 @@ export function useChatMessageSender({
if (errorMessage) {
responseItem.content = errorMessage
responseItem.isError = true
+ responseItem.llmGenerationItems?.forEach((item) => {
+ if (item.type === 'text')
+ item.isError = true
+ })
updateCurrentQAOnTree({
placeholderQuestionId,
questionItem,
diff --git a/web/app/components/workflow/run/llm-log/llm-result-panel.tsx b/web/app/components/workflow/run/llm-log/llm-result-panel.tsx
index 56687050a9..6e3f6e1353 100644
--- a/web/app/components/workflow/run/llm-log/llm-result-panel.tsx
+++ b/web/app/components/workflow/run/llm-log/llm-result-panel.tsx
@@ -2,8 +2,8 @@
import type { FC } from 'react'
import type {
+ LLMGenerationItem,
LLMTraceItem,
- ToolCallItem,
} from '@/types/workflow'
import {
RiArrowLeftLine,
@@ -63,7 +63,7 @@ const LLMResultPanel: FC = ({
{
formattedList.map((item, index) => (
-
+
))
}
diff --git a/web/app/components/workflow/run/llm-log/tool-call-item.tsx b/web/app/components/workflow/run/llm-log/tool-call-item.tsx
index e3e5802655..0fcfcdf1b5 100644
--- a/web/app/components/workflow/run/llm-log/tool-call-item.tsx
+++ b/web/app/components/workflow/run/llm-log/tool-call-item.tsx
@@ -1,4 +1,4 @@
-import type { ToolCallItem } from '@/types/workflow'
+import type { LLMGenerationItem } from '@/types/workflow'
import {
RiArrowDownSLine,
} from '@remixicon/react'
@@ -6,6 +6,7 @@ import { useState } from 'react'
import { useTranslation } from 'react-i18next'
import AppIcon from '@/app/components/base/app-icon'
import { Thinking } from '@/app/components/base/icons/src/vender/workflow'
+import { Markdown } from '@/app/components/base/markdown'
import BlockIcon from '@/app/components/workflow/block-icon'
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
@@ -14,7 +15,7 @@ import { cn } from '@/utils/classnames'
type ToolCallItemComponentProps = {
className?: string
- payload: ToolCallItem
+ payload: LLMGenerationItem
}
const ToolCallItemComponent = ({
className,
@@ -22,6 +23,19 @@ const ToolCallItemComponent = ({
}: ToolCallItemComponentProps) => {
const { t } = useTranslation()
const [expand, setExpand] = useState(false)
+
+ if (payload.type === 'text') {
+ return (
+
+ )
+ }
+
return (