Skip to content

Commit

Permalink
mgx ZTEai adaptor
Browse files Browse the repository at this point in the history
  • Loading branch information
qwe12369 committed Dec 24, 2024
1 parent 4954729 commit d37bcb1
Show file tree
Hide file tree
Showing 3 changed files with 69 additions and 0 deletions.
1 change: 1 addition & 0 deletions metagpt/configs/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class LLMType(Enum):
OPENROUTER = "openrouter"
BEDROCK = "bedrock"
ARK = "ark" # https://www.volcengine.com/docs/82379/1263482#python-sdk
ZTEAI = 'zte'

def __missing__(self, key):
return self.OPENAI
Expand Down
3 changes: 3 additions & 0 deletions metagpt/provider/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
from metagpt.provider.anthropic_api import AnthropicLLM
from metagpt.provider.bedrock_api import BedrockLLM
from metagpt.provider.ark_api import ArkLLM
from metagpt.provider.zteai_api import ZTEaiLLM


__all__ = [
"GeminiLLM",
Expand All @@ -34,4 +36,5 @@
"AnthropicLLM",
"BedrockLLM",
"ArkLLM",
"ZTEaiLLM",
]
65 changes: 65 additions & 0 deletions metagpt/provider/zteai_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
from typing import Optional, Union
from metagpt.configs.llm_config import LLMConfig, LLMType
from metagpt.const import LLM_API_TIMEOUT, USE_CONFIG_TIMEOUT
from metagpt.logs import logger
from metagpt.provider.base_llm import BaseLLM
import json
from metagpt.provider.llm_provider_registry import register_provider
import aiohttp


async def ZTEAI(querytext,appid,apikey,numb,token,modeltype):
url = "https://rdcloud.zte.com.cn/zte-studio-ai-platform/openapi/v1/chat"
headers = {"Content-Type": "application/json",
"Authorization":"Bearer "+appid+"-"+apikey,
"X-Emp-No": numb,
"X-Auth-Value": token}
data = {
"chatUuid":"",
"chatName":"",
"stream":False,
"keep": True,
"text":querytext,
"model":modeltype
}
async with aiohttp.ClientSession() as session:
async with session.post(url,headers = headers,json=data) as response:
nowresult = await response.text()
return (json.loads(nowresult)['bo']['result'])


@register_provider(LLMType.ZTEAI)
class ZTEaiLLM(BaseLLM):
def __init__(self, config: LLMConfig):
self.config = config

async def ask(self, msg: str, timeout=USE_CONFIG_TIMEOUT) -> str:
rsp = await ZTEAI(msg,self.config.app_id,self.config.api_key,self.config.domain,self.config.access_key,self.config.model)
return rsp

async def aask(
self,
msg: str,
system_msgs: Optional[list[str]] = None,
format_msgs: Optional[list[dict[str, str]]] = None,
images: Optional[Union[str, list[str]]] = None,
timeout=USE_CONFIG_TIMEOUT,
) -> str:
return await self.ask(msg, timeout=self.get_timeout(timeout))

async def _achat_completion(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT):
pass

async def acompletion(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT):
"""dummy implementation of abstract method in base"""
return []

async def _achat_completion_stream(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> str:
pass

async def acompletion_text(self, messages: list[dict], stream=False, timeout=USE_CONFIG_TIMEOUT) -> str:
"""dummy implementation of abstract method in base"""
return ""

def get_timeout(self, timeout: int) -> int:
return timeout or LLM_API_TIMEOUT

0 comments on commit d37bcb1

Please sign in to comment.