From 67e54ed3f0ae89db3c2b03b0f3d886c335ac4fcc Mon Sep 17 00:00:00 2001 From: shaohuzhang1 Date: Wed, 19 Feb 2025 12:05:38 +0800 Subject: [PATCH] fix: Tokens cannot be obtained from the model dialogue --- apps/setting/models_provider/impl/base_chat_open_ai.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/apps/setting/models_provider/impl/base_chat_open_ai.py b/apps/setting/models_provider/impl/base_chat_open_ai.py index d00002ed749..6ad1baac6c7 100644 --- a/apps/setting/models_provider/impl/base_chat_open_ai.py +++ b/apps/setting/models_provider/impl/base_chat_open_ai.py @@ -52,7 +52,8 @@ def _stream( run_manager: Optional[CallbackManagerForLLMRun] = None, **kwargs: Any, ) -> Iterator[ChatGenerationChunk]: - + kwargs["stream"] = True + kwargs["stream_options"] = {"include_usage": True} """Set default stream_options.""" stream_usage = self._should_stream_usage(kwargs.get('stream_usage'), **kwargs) # Note: stream_options is not a valid parameter for Azure OpenAI. @@ -63,7 +64,6 @@ def _stream( if stream_usage: kwargs["stream_options"] = {"include_usage": stream_usage} - kwargs["stream"] = True payload = self._get_request_payload(messages, stop=stop, **kwargs) default_chunk_class: Type[BaseMessageChunk] = AIMessageChunk base_generation_info = {} @@ -107,9 +107,6 @@ def _stream( continue # custom code - if generation_chunk.message.usage_metadata is not None: - self.usage_metadata = generation_chunk.message.usage_metadata - # custom code if len(chunk['choices']) > 0 and 'reasoning_content' in chunk['choices'][0]['delta']: generation_chunk.message.additional_kwargs["reasoning_content"] = chunk['choices'][0]['delta'][ 'reasoning_content'] @@ -121,6 +118,9 @@ def _stream( generation_chunk.text, chunk=generation_chunk, logprobs=logprobs ) is_first_chunk = False + # custom code + if generation_chunk.message.usage_metadata is not None: + self.usage_metadata = generation_chunk.message.usage_metadata yield generation_chunk def _create_chat_result(self,