chore: fix type issues

This commit is contained in:
Stream 2026-01-28 06:43:08 +08:00
parent 9d287647c1
commit a571b3abb2
No known key found for this signature in database
GPG Key ID: 0D403F5A24E1C78B
3 changed files with 10 additions and 7 deletions

View File

@ -70,8 +70,8 @@ class ContextGeneratePayload(BaseModel):
model_config_data: dict[str, Any] = Field(..., alias="model_config", description="Model configuration")
available_vars: list[AvailableVarPayload] = Field(..., description="Available variables from upstream nodes")
parameter_info: ParameterInfoPayload = Field(..., description="Target parameter metadata from the frontend")
code_context: CodeContextPayload | None = Field(
default=None, description="Existing code node context for incremental generation"
code_context: CodeContextPayload = Field(
description="Existing code node context for incremental generation"
)
@ -81,8 +81,8 @@ class SuggestedQuestionsPayload(BaseModel):
language: str = Field(
default="English", description="Language for generated questions (e.g. English, Chinese, Japanese)"
)
model_config_data: dict[str, Any] | None = Field(
default=None,
model_config_data: dict[str, Any] = Field(
default_factory=dict,
alias="model_config",
description="Model configuration (optional, uses system default if not provided)",
)

View File

@ -730,6 +730,8 @@ Generate {language} code to extract/transform available variables for the target
raise ValueError("Workflow not found for the given app model.")
last_run = workflow_service.get_node_last_run(app_model=app, workflow=workflow, node_id=node_id)
try:
if not last_run:
raise ValueError()
node_type = last_run.node_type
except Exception:
try:

View File

@ -1402,12 +1402,12 @@ class LLMNode(Node[LLMNodeData]):
# Create typed NodeData from dict
typed_node_data = LLMNodeData.model_validate(node_data)
prompt_template = typed_node_data.prompt_template
prompt_template: (Sequence[LLMNodeChatModelMessage | PromptMessageContext] |
LLMNodeCompletionModelPromptTemplate) = typed_node_data.prompt_template
variable_selectors = []
prompt_context_selectors: list[Sequence[str]] = []
if isinstance(prompt_template, list):
for prompt in prompt_template:
prompt: LLMNodeChatModelMessage | PromptMessageContext
if isinstance(prompt, LLMNodeChatModelMessage) and prompt.edition_type == "jinja2":
variable_template_parser = VariableTemplateParser(template=prompt.text)
variable_selectors.extend(variable_template_parser.extract_variable_selectors())
@ -1453,10 +1453,11 @@ class LLMNode(Node[LLMNodeData]):
if isinstance(prompt_template, list):
for prompt in prompt_template:
prompt: LLMNodeChatModelMessage | PromptMessageContext
if isinstance(prompt, LLMNodeChatModelMessage) and prompt.edition_type == "jinja2":
enable_jinja = True
break
if isinstance(prompt, PromptMessageContext):
prompt_context_selectors.append(prompt.value_selector)
else:
prompt_template: LLMNodeCompletionModelPromptTemplate
enable_jinja = True