fix: ensure sub-graph modal syncs immediately when

applying generated code.
This commit is contained in:
zhsama 2026-02-05 06:06:38 +08:00
parent 52b34b1fdb
commit a2380c4fd3
4 changed files with 85 additions and 15 deletions

View File

@ -14,6 +14,7 @@ from core.llm_generator.context_models import (
)
from core.llm_generator.entities import RuleCodeGeneratePayload, RuleGeneratePayload, RuleStructuredOutputPayload
from core.llm_generator.output_models import (
CodeNodeOutputItem,
CodeNodeStructuredOutput,
InstructionModifyOutput,
SuggestedQuestionsOutput,
@ -479,8 +480,10 @@ class LLMGenerator:
model_parameters=model_parameters,
)
response_payload = response.model_dump()
response_payload["outputs"] = cls._format_code_outputs(response.outputs)
return {
**response.model_dump(),
**response_payload,
"code_language": language,
"error": "",
}
@ -503,6 +506,20 @@ class LLMGenerator:
"error": error,
}
@classmethod
def _format_code_outputs(cls, outputs: Sequence[CodeNodeOutputItem]) -> dict[str, dict[str, str]]:
"""Normalize code outputs to a stable mapping for frontend consumers.
The LLM structured output uses an array to satisfy strict-mode schemas, but the
frontend expects a name-to-type mapping for Code node outputs.
"""
mapped: dict[str, dict[str, str]] = {}
for output_item in outputs:
if not output_item.name:
continue
mapped[output_item.name] = {"type": str(output_item.type)}
return mapped
@classmethod
def generate_suggested_questions(
cls,
@ -557,10 +574,14 @@ class LLMGenerator:
completion_params = model_config.get("completion_params", {}) if model_config else {}
try:
response = invoke_llm_with_pydantic_model(provider=model_instance.provider, model_schema=model_schema,
model_instance=model_instance, prompt_messages=prompt_messages,
output_model=SuggestedQuestionsOutput,
model_parameters=completion_params)
response = invoke_llm_with_pydantic_model(
provider=model_instance.provider,
model_schema=model_schema,
model_instance=model_instance,
prompt_messages=prompt_messages,
output_model=SuggestedQuestionsOutput,
model_parameters=completion_params,
)
return {"questions": response.questions, "error": ""}

View File

@ -1,7 +1,7 @@
import type { Var, Variable } from '../../types'
import type { CodeNodeType, OutputVar } from './types'
import { produce } from 'immer'
import { useCallback, useEffect, useState } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import {
useNodesReadOnly,
} from '@/app/components/workflow/hooks'
@ -56,9 +56,28 @@ const useConfig = (id: string, payload: CodeNodeType) => {
setInputs,
})
const [outputKeyOrders, setOutputKeyOrders] = useState<string[]>(() => Object.keys(payload.outputs || {}))
const outputKeyOrdersRef = useRef<string[]>(Object.keys(payload.outputs || {}))
const outputKeyOrders = (() => {
const outputKeys = inputs.outputs ? Object.keys(inputs.outputs) : []
if (outputKeys.length === 0) {
if (outputKeyOrdersRef.current.length > 0)
outputKeyOrdersRef.current = []
return [] as string[]
}
const nextOutputKeyOrders = outputKeyOrdersRef.current.filter(key => outputKeys.includes(key))
outputKeys.forEach((key) => {
if (!nextOutputKeyOrders.includes(key))
nextOutputKeyOrders.push(key)
})
outputKeyOrdersRef.current = nextOutputKeyOrders
return nextOutputKeyOrders
})()
const syncOutputKeyOrders = useCallback((outputs: OutputVar) => {
setOutputKeyOrders(Object.keys(outputs))
outputKeyOrdersRef.current = Object.keys(outputs)
}, [])
const handleOutputKeyOrdersChange = useCallback((newOutputKeyOrders: string[]) => {
outputKeyOrdersRef.current = newOutputKeyOrders
}, [])
useEffect(() => {
const outputKeys = inputs.outputs ? Object.keys(inputs.outputs) : []
@ -174,7 +193,7 @@ const useConfig = (id: string, payload: CodeNodeType) => {
inputs,
setInputs,
outputKeyOrders,
onOutputKeyOrdersChange: setOutputKeyOrders,
onOutputKeyOrdersChange: handleOutputKeyOrdersChange,
})
const filterVar = useCallback((varPayload: Var) => {

View File

@ -179,7 +179,7 @@ const ContextGenerateModal = forwardRef<ContextGenerateModalHandle, Props>(({
outputs: nextOutputs,
variables: nextVariables,
},
})
}, { sync: true })
if (closeOnApply)
handleCloseModal()

View File

@ -48,6 +48,7 @@ const SubGraphModal: FC<SubGraphModalProps> = (props) => {
const workflowNodes = useWorkflowStore(state => state.nodes)
const workflowEdges = useReactFlowStore(state => state.edges)
const setControlPromptEditorRerenderKey = useWorkflowStore(state => state.setControlPromptEditorRerenderKey)
const setWorkflowNodes = useWorkflowStore(state => state.setNodes)
const { handleSyncWorkflowDraft, doSyncWorkflowDraft } = useNodesSyncDraft()
const configsMap = useHooksStore(state => state.configsMap)
const { getBeforeNodesInSameBranch } = useWorkflow()
@ -134,8 +135,9 @@ const SubGraphModal: FC<SubGraphModalProps> = (props) => {
}
})
setNodes(nextNodes)
handleSyncWorkflowDraft()
}, [handleSyncWorkflowDraft, paramKey, reactflowStore, toolNodeId])
setWorkflowNodes(nextNodes)
handleSyncWorkflowDraft(true)
}, [handleSyncWorkflowDraft, paramKey, reactflowStore, setWorkflowNodes, toolNodeId])
useEffect(() => {
if (!toolParam || (toolParam.type && toolParam.type !== VarKindType.nested_node))
@ -179,7 +181,7 @@ const SubGraphModal: FC<SubGraphModalProps> = (props) => {
const ensureAssembleOutputs = (payload: CodeNodeType) => {
const outputs = payload.outputs || {}
if (outputs.result)
if (Object.keys(outputs).length > 0)
return payload
return {
...payload,
@ -193,6 +195,20 @@ const SubGraphModal: FC<SubGraphModalProps> = (props) => {
}
}
const resolveAssembleOutputSelector = (rawSelector: unknown, outputKeys: string[]) => {
if (outputKeys.length === 0)
return null
const normalizedSelector = Array.isArray(rawSelector)
? (rawSelector[0] === extractorNodeId ? rawSelector.slice(1) : rawSelector)
: []
const currentKey = normalizedSelector[0]
const fallbackKey = outputKeys.includes('result') ? 'result' : outputKeys[0]
const nextKey = outputKeys.includes(currentKey) ? currentKey : fallbackKey
if (!nextKey || nextKey === currentKey)
return null
return [nextKey, ...normalizedSelector.slice(1)]
}
const userPromptText = isAgentVariant
? getUserPromptText((extractorNodeData.data as LLMNodeType).prompt_template)
: ''
@ -223,6 +239,19 @@ const SubGraphModal: FC<SubGraphModalProps> = (props) => {
return node
const currentParam = toolData.tool_parameters[paramKey]
const baseNestedConfig = currentParam.nested_node_config ?? nestedNodeConfig
let nextNestedConfig = baseNestedConfig
if (!isAgentVariant) {
const outputKeys = Object.keys((extractorNodeData.data as CodeNodeType).outputs || {})
const nextSelector = resolveAssembleOutputSelector(baseNestedConfig?.output_selector, outputKeys)
if (nextSelector) {
nextNestedConfig = {
...baseNestedConfig,
extractor_node_id: baseNestedConfig?.extractor_node_id || extractorNodeId,
output_selector: nextSelector,
}
}
}
return {
...node,
data: {
@ -233,7 +262,7 @@ const SubGraphModal: FC<SubGraphModalProps> = (props) => {
...currentParam,
type: VarKindType.nested_node,
value: nextValue,
nested_node_config: currentParam.nested_node_config ?? nestedNodeConfig,
nested_node_config: nextNestedConfig,
},
},
},
@ -242,8 +271,9 @@ const SubGraphModal: FC<SubGraphModalProps> = (props) => {
return node
})
setNodes(nextNodes)
setWorkflowNodes(nextNodes)
setControlPromptEditorRerenderKey(Date.now())
}, [assemblePlaceholder, extractorNodeId, getUserPromptText, isAgentVariant, nestedNodeConfig, paramKey, reactflowStore, resolvedAgentNodeId, setControlPromptEditorRerenderKey, toolNodeId])
}, [assemblePlaceholder, extractorNodeId, getUserPromptText, isAgentVariant, nestedNodeConfig, paramKey, reactflowStore, resolvedAgentNodeId, setControlPromptEditorRerenderKey, setWorkflowNodes, toolNodeId])
return (
<Transition appear show={isOpen} as={Fragment}>