diff --git a/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py b/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py index f668cdee7d4..7fa4f249ac9 100644 --- a/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py +++ b/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py @@ -76,10 +76,12 @@ def event_content(response, all_text = '' reasoning_content = '' try: + response_reasoning_content = False for chunk in response: reasoning_chunk = reasoning.get_reasoning_content(chunk) content_chunk = reasoning_chunk.get('content') if 'reasoning_content' in chunk.additional_kwargs: + response_reasoning_content = True reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '') else: reasoning_content_chunk = reasoning_chunk.get('reasoning_content') @@ -95,6 +97,21 @@ def event_content(response, 'node_type': 'ai-chat-node', 'real_node_id': 'ai-chat-node', 'reasoning_content': reasoning_content_chunk if reasoning_content_enable else ''}) + reasoning_chunk = reasoning.get_end_reasoning_content() + all_text += reasoning_chunk.get('content') + reasoning_content_chunk = "" + if not response_reasoning_content: + reasoning_content_chunk = reasoning_chunk.get( + 'reasoning_content') + yield manage.get_base_to_response().to_stream_chunk_response(chat_id, str(chat_record_id), 'ai-chat-node', + [], reasoning_chunk.get('content'), + False, + 0, 0, {'node_is_end': False, + 'view_type': 'many_view', + 'node_type': 'ai-chat-node', + 'real_node_id': 'ai-chat-node', + 'reasoning_content' + : reasoning_content_chunk if reasoning_content_enable else ''}) # 获取token if is_ai_chat: try: @@ -276,11 +293,13 @@ def execute_block(self, message_list: List[BaseMessage], response_token = 0 write_context(self, manage, request_token, response_token, chat_result.content) reasoning_result = reasoning.get_reasoning_content(chat_result) - content = reasoning_result.get('content') + reasoning_result_end = reasoning.get_end_reasoning_content() + content = reasoning_result.get('content') + reasoning_result_end.get('content') if 'reasoning_content' in chat_result.response_metadata: reasoning_content = chat_result.response_metadata.get('reasoning_content', '') else: - reasoning_content = reasoning_result.get('reasoning_content') + reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get( + 'reasoning_content') post_response_handler.handler(chat_id, chat_record_id, paragraph_list, problem_text, chat_result.content, manage, self, padding_problem_text, client_id, reasoning_content=reasoning_content if reasoning_content_enable else '') diff --git a/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py b/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py index dc73887eb1b..d72d93b4fa8 100644 --- a/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py +++ b/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py @@ -55,10 +55,12 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo 'reasoning_content_start': ''}) reasoning = Reasoning(model_setting.get('reasoning_content_start', ''), model_setting.get('reasoning_content_end', '')) + response_reasoning_content = False for chunk in response: reasoning_chunk = reasoning.get_reasoning_content(chunk) content_chunk = reasoning_chunk.get('content') if 'reasoning_content' in chunk.additional_kwargs: + response_reasoning_content = True reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '') else: reasoning_content_chunk = reasoning_chunk.get('reasoning_content') @@ -69,6 +71,16 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo yield {'content': content_chunk, 'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable', False) else ''} + + reasoning_chunk = reasoning.get_end_reasoning_content() + answer += reasoning_chunk.get('content') + reasoning_content_chunk = "" + if not response_reasoning_content: + reasoning_content_chunk = reasoning_chunk.get( + 'reasoning_content') + yield {'content': reasoning_chunk.get('content'), + 'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable', + False) else ''} _write_context(node_variable, workflow_variable, node, workflow, answer, reasoning_content) @@ -86,11 +98,12 @@ def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, wor 'reasoning_content_start': ''}) reasoning = Reasoning(model_setting.get('reasoning_content_start'), model_setting.get('reasoning_content_end')) reasoning_result = reasoning.get_reasoning_content(response) - content = reasoning_result.get('content') + reasoning_result_end = reasoning.get_end_reasoning_content() + content = reasoning_result.get('content') + reasoning_result_end.get('content') if 'reasoning_content' in response.response_metadata: reasoning_content = response.response_metadata.get('reasoning_content', '') else: - reasoning_content = reasoning_result.get('reasoning_content') + reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get('reasoning_content') _write_context(node_variable, workflow_variable, node, workflow, content, reasoning_content) diff --git a/apps/application/flow/tools.py b/apps/application/flow/tools.py index a4e0d5a5837..dfbf69b3593 100644 --- a/apps/application/flow/tools.py +++ b/apps/application/flow/tools.py @@ -32,6 +32,17 @@ def __init__(self, reasoning_content_start, reasoning_content_end): self.reasoning_content_is_end = False self.reasoning_content_chunk = "" + def get_end_reasoning_content(self): + if not self.reasoning_content_is_start and not self.reasoning_content_is_end: + r = {'content': self.all_content, 'reasoning_content': ''} + self.reasoning_content_chunk = "" + return r + if self.reasoning_content_is_start and not self.reasoning_content_is_end: + r = {'content': '', 'reasoning_content': self.reasoning_content_chunk} + self.reasoning_content_chunk = "" + return r + return {'content': '', 'reasoning_content': ''} + def get_reasoning_content(self, chunk): # 如果没有开始思考过程标签那么就全是结果 if self.reasoning_content_start_tag is None or len(self.reasoning_content_start_tag) == 0: @@ -60,8 +71,7 @@ def get_reasoning_content(self, chunk): return {'content': chunk.content, 'reasoning_content': ''} # 是否包含结束 if reasoning_content_end_tag_prefix_index > -1: - if len( - self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index > self.reasoning_content_end_tag_len: + if len(self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index >= self.reasoning_content_end_tag_len: reasoning_content_end_tag_index = self.reasoning_content_chunk.find(self.reasoning_content_end_tag) if reasoning_content_end_tag_index > -1: reasoning_content_chunk = self.reasoning_content_chunk[0:reasoning_content_end_tag_index] diff --git a/ui/src/api/type/application.ts b/ui/src/api/type/application.ts index c5858acbe1e..6e5bd0b0700 100644 --- a/ui/src/api/type/application.ts +++ b/ui/src/api/type/application.ts @@ -158,12 +158,15 @@ export class ChatRecordManage { get_run_node() { if ( this.write_node_info && - (this.write_node_info.current_node.buffer.length > 0 || + (this.write_node_info.current_node.reasoning_content_buffer.length > 0 || + this.write_node_info.current_node.buffer.length > 0 || !this.write_node_info.current_node.is_end) ) { return this.write_node_info } - const run_node = this.node_list.filter((item) => item.buffer.length > 0 || !item.is_end)[0] + const run_node = this.node_list.filter( + (item) => item.reasoning_content_buffer.length > 0 || item.buffer.length > 0 || !item.is_end + )[0] if (run_node) { const index = this.node_list.indexOf(run_node)