@@ -22,8 +22,11 @@ class OpenaiToResponse(BaseToResponse):
22
22
def to_block_response (self , chat_id , chat_record_id , content , is_end , completion_tokens , prompt_tokens ,
23
23
other_params : dict = None ,
24
24
_status = status .HTTP_200_OK ):
25
+ if other_params is None :
26
+ other_params = {}
25
27
data = ChatCompletion (id = chat_record_id , choices = [
26
28
BlockChoice (finish_reason = 'stop' , index = 0 , chat_id = chat_id ,
29
+ reasoning_content = other_params .get ('reasoning_content' , "" ),
27
30
message = ChatCompletionMessage (role = 'assistant' , content = content ))],
28
31
created = datetime .datetime .now ().second , model = '' , object = 'chat.completion' ,
29
32
usage = CompletionUsage (completion_tokens = completion_tokens ,
@@ -32,11 +35,16 @@ def to_block_response(self, chat_id, chat_record_id, content, is_end, completion
32
35
).dict ()
33
36
return JsonResponse (data = data , status = _status )
34
37
35
- def to_stream_chunk_response (self , chat_id , chat_record_id , node_id , up_node_id_list , content , is_end , completion_tokens ,
38
+ def to_stream_chunk_response (self , chat_id , chat_record_id , node_id , up_node_id_list , content , is_end ,
39
+ completion_tokens ,
36
40
prompt_tokens , other_params : dict = None ):
41
+ if other_params is None :
42
+ other_params = {}
37
43
chunk = ChatCompletionChunk (id = chat_record_id , model = '' , object = 'chat.completion.chunk' ,
38
- created = datetime .datetime .now ().second ,choices = [
39
- Choice (delta = ChoiceDelta (content = content , chat_id = chat_id ), finish_reason = 'stop' if is_end else None ,
44
+ created = datetime .datetime .now ().second , choices = [
45
+ Choice (delta = ChoiceDelta (content = content , reasoning_content = other_params .get ('reasoning_content' , "" ),
46
+ chat_id = chat_id ),
47
+ finish_reason = 'stop' if is_end else None ,
40
48
index = 0 )],
41
49
usage = CompletionUsage (completion_tokens = completion_tokens ,
42
50
prompt_tokens = prompt_tokens ,
0 commit comments