-
Notifications
You must be signed in to change notification settings - Fork 11
/
llm_api.py
46 lines (39 loc) · 1.59 KB
/
llm_api.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""=================================================
@PROJECT_NAME: agent_example
@File : llm_api.py
@Author : Liuyz
@Date : 2024/6/28 17:03
@Function:
@Modify History:
@Copyright:Copyright(c) 2024-2026. All Rights Reserved
=================================================="""
# 业务空间模型调用请参考文档传入workspace信息: https://help.aliyun.com/document_detail/2746874.html
import random
from http import HTTPStatus
from dashscope import Generation
def call_stream_with_messages():
messages = [
{'role': 'user', 'content': '用萝卜、土豆、茄子做饭,给我个菜谱'}]
responses = Generation.call(
'qwen1.5-110b-chat',
messages=messages,
seed=random.randint(1, 10000), # set the random seed, optional, default to 1234 if not set
result_format='message', # set the result to be "message" format.
stream=True,
output_in_full=True # get streaming output incrementally
)
full_content = ''
for response in responses:
if response.status_code == HTTPStatus.OK:
full_content += response.output.choices[0]['message']['content']
print(response)
else:
print('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
response.request_id, response.status_code,
response.code, response.message
))
print('Full content: \n' + full_content)
if __name__ == '__main__':
call_stream_with_messages()