-
Notifications
You must be signed in to change notification settings - Fork 115
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
ad7c611
commit f54e567
Showing
2 changed files
with
91 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
# EmoLLM——MetaGPT | ||
|
||
## 使用 | ||
|
||
```bash | ||
pip install -r requirements.txt | ||
pip install . | ||
``` | ||
|
||
## key 配置 | ||
|
||
为确保项目正常运行,请在项目内新建`.env`文件,并在其中设置你的API密钥,你可以根据下列例子写入对应的 key,即可成功运行调用,目前默认使用 zhipuai,你可以仅写入`ZHIPUAI_API_KEY`即可使用。 | ||
|
||
```bash | ||
OPENAI_API_KEY= | ||
OPENAI_API_BASE= | ||
ZHIPUAI_API_KEY= | ||
BAIDU_API_KEY= | ||
OPENAI_API_MODEL= | ||
``` | ||
|
||
## MetaGPT Version | ||
|
||
https://gitee.com/jujimeizuo/meta-gpt-tianji |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
from dotenv import load_dotenv | ||
load_dotenv() | ||
|
||
import asyncio | ||
import os | ||
import erniebot | ||
from zhipuai import ZhipuAI | ||
from metagpt.logs import logger | ||
|
||
|
||
|
||
class BaiduAPI: | ||
def __init__(self): | ||
pass | ||
|
||
async def _aask(self, prompt, stream=False, model="ernie-4.0", top_p=0.95): | ||
messages = [{"role": "user", "content": prompt}] | ||
response = erniebot.ChatCompletion.create( | ||
model=model, messages=messages, top_p=top_p, stream=stream | ||
) | ||
return response.result | ||
|
||
|
||
class ZhipuAPI: | ||
def __init__(self, glm=None): | ||
if glm is None: | ||
raise RuntimeError("ZhipuApi is Error!") | ||
self.glm = glm | ||
|
||
async def _aask(self, prompt, stream=False, model="glm-3-turbo", top_p=0.95): | ||
messages = [{"role": "user", "content": prompt}] | ||
response = self.glm.chat.completions.create( | ||
model=model, messages=messages, top_p=top_p, stream=stream | ||
) | ||
return response.choices[0].message.content | ||
|
||
|
||
class LLMAPI: | ||
def __init__(self): | ||
self.llm_api = None | ||
|
||
# select api | ||
if os.environ["ZHIPUAI_API_KEY"] is not None: | ||
glm = ZhipuAI(api_key=os.environ["ZHIPUAI_API_KEY"]) | ||
self.llm_api = ZhipuAPI(glm=glm) | ||
elif os.environ["BAIDU_API_KEY"] is not None: | ||
erniebot.api_type = "aistudio" | ||
erniebot.access_token = os.environ["BAIDU_API_KEY"] | ||
self.llm_api = BaiduAPI() | ||
else: | ||
raise RuntimeError("No api_key found!") | ||
|
||
# 这里的 model 的 default value 逻辑不对,应该是根据 api_type 来决定,不一定必须是 zhipuai | ||
async def _aask(self, prompt, stream=False, model="glm-3-turbo", top_p=0.95): | ||
logger.info(f"call llm_api, response is below") | ||
rsp = await self.llm_api._aask(prompt, stream=stream, model=model, top_p=top_p) | ||
return rsp | ||
|
||
|
||
if __name__ == "__main__": | ||
# models = erniebot.Model.list() | ||
# print("可用模型",models) | ||
|
||
llm_api = LLMAPI() | ||
# result = asyncio.run(baidu_api._aask("你好啊")) | ||
result = asyncio.run(llm_api._aask("你好啊")) | ||
print("result", result) |