This commit is contained in:
felix 2026-03-24 07:32:21 +00:00 committed by GitHub
commit 232ae3f15d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 7 additions and 12 deletions

View File

@ -73,7 +73,6 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
_task_state: EasyUITaskState
_application_generate_entity: Union[ChatAppGenerateEntity, CompletionAppGenerateEntity, AgentChatAppGenerateEntity]
_precomputed_event_type: StreamEvent | None = None
def __init__(
self,
@ -346,15 +345,10 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
self._task_state.llm_result.message.content = current_content
if isinstance(event, QueueLLMChunkEvent):
# Determine the event type once, on first LLM chunk, and reuse for subsequent chunks
if not hasattr(self, "_precomputed_event_type") or self._precomputed_event_type is None:
self._precomputed_event_type = self._message_cycle_manager.get_message_event_type(
message_id=self._message_id
)
yield self._message_cycle_manager.message_to_stream_response(
answer=cast(str, delta_text),
message_id=self._message_id,
event_type=self._precomputed_event_type,
event_type=StreamEvent.MESSAGE,
)
else:
yield self._agent_message_to_stream_response(

View File

@ -133,10 +133,8 @@ class TestEasyUIBasedGenerateTaskPipelineProcessStreamResponse:
pipeline._task_state = mock_task_state
return pipeline
def test_get_message_event_type_called_once_when_first_llm_chunk_arrives(
self, pipeline, mock_message_cycle_manager
):
"""Expect get_message_event_type to be called when processing the first LLM chunk event."""
def test_get_message_event_type_not_called_for_llm_text_chunks(self, pipeline, mock_message_cycle_manager):
"""Expect LLM text chunks to always use MESSAGE event type."""
# Setup a minimal LLM chunk event
chunk = Mock()
chunk.delta.message.content = "hi"
@ -151,7 +149,10 @@ class TestEasyUIBasedGenerateTaskPipelineProcessStreamResponse:
list(pipeline._process_stream_response(publisher=None, trace_manager=None))
# Assert
mock_message_cycle_manager.get_message_event_type.assert_called_once_with(message_id="test-message-id")
mock_message_cycle_manager.get_message_event_type.assert_not_called()
mock_message_cycle_manager.message_to_stream_response.assert_called_once_with(
answer="hi", message_id="test-message-id", event_type=StreamEvent.MESSAGE
)
def test_llm_chunk_event_with_text_content(self, pipeline, mock_message_cycle_manager, mock_task_state):
"""Test handling of LLM chunk events with text content."""