Skip to content

Commit

Permalink
Fix:readme (#225)
Browse files Browse the repository at this point in the history
fix readme: 
1. modelscope-agent-7b -> qwen-72b
2. Remove the modelscope_ prefix in tool
3. Fix the remote url returned by tool

---------

Co-authored-by: suluyan.sly <suluyan.sly@alibaba-inc.com>
  • Loading branch information
suluyana and suluyan.sly authored Jan 2, 2024
1 parent 5e91bbe commit 0a88717
Show file tree
Hide file tree
Showing 27 changed files with 342 additions and 76 deletions.
19 changes: 11 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,37 +62,40 @@ To use modelscope-agent, all you need is to instantiate an `AgentExecutor` objec

```Python
import os

from modelscope.utils.config import Config
from modelscope_agent.llm import LLMFactory
from modelscope_agent.agent import AgentExecutor
from modelscope_agent.prompt import MSPromptGenerator

# get cfg from file, refer the example in config folder
model_cfg_file = os.getenv('MODEL_CONFIG_FILE', 'config/cfg_model_template.json')
model_cfg = Config.from_file(model_cfg_file)
tool_cfg_file = os.getenv('TOOL_CONFIG_FILE', 'config/cfg_tool_template.json')
tool_cfg = Config.from_file(tool_cfg_file)


# instantiation LLM
model_name = 'modelscope-agent-7b'
llm = LLMFactory.build_llm(model_name, model_cfg)
model_name = 'qwen-72b'

# prompt generator
prompt_generator = MSPromptGenerator()
print('To use qwen-72b model, you need to enter DashScope Token, which can be obtained from here: 1. Register and log in to https://dashscope.aliyun.com 2. Open the model square and select Tongyi Qianwen 72b. It is expected to take half a day to pass')
os.environ['DASHSCOPE_API_KEY'] = input()

llm = LLMFactory.build_llm(model_name, model_cfg)

# instantiation agent
agent = AgentExecutor(llm, tool_cfg, prompt_generator=prompt_generator)

agent = AgentExecutor(llm, tool_cfg)
```

- Single-step & Multi-step tool-use

```Python
# Single-step tool-use
agent.run('使用地址识别模型,从下面的地址中找到省市区等元素,地址:浙江杭州市江干区九堡镇三村村一区', remote=True)
agent.run("I want to see cute kittens", remote=True)

# Multi-step tool-use
print('The built-in voice generation and video generation capabilities are deployed in mdoelscope. You need to enter the ModelScope Token, which can be obtained from here: https://modelscope.cn/my/myaccesstoken')
os.environ['MODELSCOPE_API_TOKEN'] = input()

agent.reset()
agent.run('写一篇关于Vision Pro VR眼镜的20字宣传文案,并用女声读出来,同时生成个视频看看', remote=True)
```
Expand Down
19 changes: 12 additions & 7 deletions README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ Notebook环境使用简单,您只需要按以下步骤操作(注意:目前

```Python
import os

from modelscope.utils.config import Config
from modelscope_agent.llm import LLMFactory
from modelscope_agent.agent import AgentExecutor
from modelscope_agent.prompt import MSPromptGenerator

# get cfg from file, refer the example in config folder
model_cfg_file = os.getenv('MODEL_CONFIG_FILE', 'config/cfg_model_template.json')
Expand All @@ -78,23 +78,28 @@ tool_cfg_file = os.getenv('TOOL_CONFIG_FILE', 'config/cfg_tool_template.json')
tool_cfg = Config.from_file(tool_cfg_file)

# instantiation LLM
model_name = 'modelscope-agent-7b'
llm = LLMFactory.build_llm(model_name, model_cfg)
model_name = 'qwen-72b'

print('To use qwen-72b model, you need to enter DashScope Token, which can be obtained from here: 1. Register and log in to https://dashscope.aliyun.com 2. Open the model square and select Tongyi Qianwen 72b. It is expected to take half a day to pass')
os.environ['DASHSCOPE_API_KEY'] = input()

# prompt generator
prompt_generator = MSPromptGenerator()
llm = LLMFactory.build_llm(model_name, model_cfg)

# instantiation agent
agent = AgentExecutor(llm, tool_cfg, prompt_generator=prompt_generator)

agent = AgentExecutor(llm, tool_cfg)
```

- 单步 & 多步工具使用

```Python
# Single-step tool-use
agent.run('使用地址识别模型,从下面的地址中找到省市区等元素,地址:浙江杭州市江干区九堡镇三村村一区', remote=True)
agent.run("I want to see cute kittens", remote=True)

# Multi-step tool-use
print('The built-in voice generation and video generation capabilities are deployed in mdoelscope. You need to enter the ModelScope Token, which can be obtained from here: https://modelscope.cn/my/myaccesstoken')
os.environ['MODELSCOPE_API_TOKEN'] = input()

agent.reset()
agent.run('写一篇关于Vision Pro VR眼镜的20字宣传文案,并用女声读出来,同时生成个视频看看', remote=True)
```
Expand Down
3 changes: 2 additions & 1 deletion apps/agentfabric/builder_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,5 @@ def __init__(self,
**kwargs):
super().__init__(
system_template=system_template,
custom_starter_messages=custom_starter_messages)
custom_starter_messages=custom_starter_messages,
**kwargs)
16 changes: 8 additions & 8 deletions apps/agentfabric/config/tool_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,46 +44,46 @@
"is_active": true,
"use": false
},
"modelscope_text-address": {
"text-address": {
"name": "地址解析",
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/mgeo_geographic_elements_tagging_chinese_base",
"use": false,
"is_active": true,
"is_remote_tool": true
},
"modelscope_text-ner": {
"text-ner": {
"name": "命名实体识别",
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/nlp_raner_named-entity-recognition_chinese-base-cmeee",
"use": false,
"is_active": true,
"is_remote_tool": true
},
"modelscope_speech-generation": {
"speech-generation": {
"name": "语音生成",
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/speech_sambert-hifigan_tts_zh-cn_16k",
"use": false,
"is_active": false,
"is_active": true,
"is_remote_tool": true
},
"modelscope_video-generation": {
"video-generation": {
"name": "视频生成",
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/text-to-video-synthesis",
"use": false,
"is_active": true,
"is_remote_tool": true
},
"modelscope_text-translation-en2zh": {
"text-translation-en2zh": {
"name": "英译中",
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/nlp_csanmt_translation_en2zh",
"use": false,
"is_active": false,
"is_remote_tool": true
},
"modelscope_text-translation-zh2en": {
"text-translation-zh2en": {
"name": "中译英",
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/nlp_csanmt_translation_zh2en",
"use": false,
"is_active": false,
"is_active": true,
"is_remote_tool": true
}
}
3 changes: 2 additions & 1 deletion apps/agentfabric/custom_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,8 @@ def __init__(
assistant_template=assistant_template,
sep=sep,
llm=llm,
length_constraint=length_constraint)
length_constraint=length_constraint,
**kwargs)

def _parse_role_config(self, config: dict):
prompt = 'You are playing as an AI-Agent, '
Expand Down
2 changes: 2 additions & 0 deletions apps/agentfabric/user_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ def init_user_chatbot_agent(uuid_str=''):
builder_cfg, model_cfg, tool_cfg, available_tool_list, plugin_cfg, available_plugin_list = parse_configuration(
uuid_str)
# set top_p and stop_words for role play
if 'generate_cfg' not in model_cfg[builder_cfg.model]:
model_cfg[builder_cfg.model]['generate_cfg'] = dict()
model_cfg[builder_cfg.model]['generate_cfg']['top_p'] = 0.5
model_cfg[builder_cfg.model]['generate_cfg']['stop'] = 'Observation'

Expand Down
16 changes: 16 additions & 0 deletions config/cfg_model_template.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
"debug": false
}
},
"gpt-3.5-turbo": {
"type": "custom_llm",
"model": "gpt-3.5-turbo"
},
"qwen_plus": {
"type": "dashscope",
"model": "qwen-plus",
Expand All @@ -20,6 +24,18 @@
"debug": false
}
},
"qwen-max": {
"type": "dashscope",
"model": "qwen-max",
"length_constraint": {
"knowledge": 4000,
"input": 6000
},
"generate_cfg": {
"use_raw_prompt": true,
"top_p": 0.8
}
},
"custom_llm": {
"type": "custom_llm"
},
Expand Down
35 changes: 21 additions & 14 deletions config/cfg_tool_template.json
Original file line number Diff line number Diff line change
@@ -1,35 +1,35 @@
{
"modelscope_text-address": {
"text-address": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/mgeo_geographic_elements_tagging_chinese_base",
"use": true
"use": false
},
"modelscope_text-ner": {
"text-ner": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/nlp_raner_named-entity-recognition_chinese-base-cmeee",
"use": true
"use": false
},
"modelscope_text-ie": {
"text-ie": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/nlp_structbert_siamese-uie_chinese-base",
"use": true
"use": false
},
"modelscope_speech-generation": {
"speech-generation": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/speech_sambert-hifigan_tts_zh-cn_16k",
"use": true
},
"modelscope_video-generation": {
"video-generation": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/text-to-video-synthesis",
"use": true
},
"modelscope_image-chat": {
"image-chat": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/multi-modal_mplug_owl_multimodal-dialogue_7b",
"use": true
"use": false
},
"modelscope_text-translation-en2zh": {
"text-translation-en2zh": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/nlp_csanmt_translation_en2zh",
"use": true
"use": false
},
"modelscope_text-translation-zh2en": {
"text-translation-zh2en": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/damo/nlp_csanmt_translation_zh2en",
"use": true
"use": false
},
"image_gen": {
"url": "https://api-inference.modelscope.cn/api-inference/v1/models/AI-ModelScope/stable-diffusion-xl-base-1.0",
Expand All @@ -38,6 +38,13 @@
"use_safetensors": true
}
},
"code_interpreter": {
"name": "Code Interpreter",
"is_active": true,
"use": false,
"is_remote_tool": false,
"max_output": 2000
},
"amap_weather": {
"use": false,
"token": "need to be filled when you use weather"
Expand Down
13 changes: 7 additions & 6 deletions modelscope_agent/constant.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@
MRKLActionParser, MsActionParser,
OpenAiFunctionsActionParser)
from modelscope_agent.prompt import (ChatGLMPromptGenerator, MessagesGenerator,
MrklPromptGenerator, MSPromptGenerator)
MrklPromptGenerator, MSPromptGenerator,
QwenPromptGenerator)

DEFAULT_MODEL_CONFIG = {
'qwen': {
'en': {
'prompt_generator': MrklPromptGenerator,
'prompt_generator': QwenPromptGenerator,
'action_parser': MRKLActionParser
},
'zh': {
Expand All @@ -24,11 +25,11 @@
'action_parser': ChatGLMActionParser
},
'gpt': {
'prompt_generator': MrklPromptGenerator,
'action_parser': MRKLActionParser
'prompt_generator': 'MessagesGenerator',
'action_parser': 'OpenAiFunctionsActionParser'
},
'openai': {
'prompt_generator': MrklPromptGenerator,
'action_parser': MRKLActionParser
'prompt_generator': 'MessagesGenerator',
'action_parser': 'OpenAiFunctionsActionParser'
}
}
2 changes: 0 additions & 2 deletions modelscope_agent/output_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,15 +71,13 @@ def __init__(self, image) -> None:
if os.path.isfile(image):
self._path = image
else:
origin_image = image
self._path = self.get_remote_file(image, 'png')
try:
image = Image.open(self._path)
self._raw_data = image
except FileNotFoundError:
# Image store in remote server when use remote mode
raise FileNotFoundError(f'Invalid path: {image}')
self._path = origin_image
else:
if not isinstance(image, Image.Image):
image = Image.fromarray(image.astype(np.uint8))
Expand Down
4 changes: 3 additions & 1 deletion modelscope_agent/prompt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
from .mrkl_prompt import MrklPromptGenerator
from .ms_prompt import MSPromptGenerator
from .prompt import PromptGenerator
from .qwen_prompt import QwenPromptGenerator
from .raw_prompt_builder import build_raw_prompt

prompt_generators = {
'ChatGLMPromptGenerator': ChatGLMPromptGenerator,
'MessagesGenerator': MessagesGenerator,
'MrklPromptGenerator': MrklPromptGenerator,
'MSPromptGenerator': MSPromptGenerator,
'PromptGenerator': PromptGenerator
'PromptGenerator': PromptGenerator,
'QwenPromptGenerator': QwenPromptGenerator
}
3 changes: 2 additions & 1 deletion modelscope_agent/prompt/chatglm3_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ def __init__(self,
exec_template=exec_template,
assistant_template=assistant_template,
sep=sep,
length_constraint=length_constraint)
length_constraint=length_constraint,
**kwargs)

def get_tool_str(self, tool_list):
tool_json = json.loads('['
Expand Down
3 changes: 2 additions & 1 deletion modelscope_agent/prompt/messages_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ def __init__(self,
exec_template=exec_template,
assistant_template=assistant_template,
sep=sep,
length_constraint=length_constraint)
length_constraint=length_constraint,
**kwargs)
self.custom_starter_messages = kwargs.get('custom_starter_messages',
None)

Expand Down
3 changes: 2 additions & 1 deletion modelscope_agent/prompt/mrkl_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ def __init__(self,
assistant_template=assistant_template,
sep=sep,
llm=llm,
length_constraint=length_constraint)
length_constraint=length_constraint,
**kwargs)

def init_prompt(self, task, tool_list, knowledge_list, **kwargs):
if len(self.history) == 0:
Expand Down
3 changes: 2 additions & 1 deletion modelscope_agent/prompt/ms_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@ def __init__(self,
exec_template=exec_template,
assistant_template=assistant_template,
sep=sep,
length_constraint=length_constraint)
length_constraint=length_constraint,
**kwargs)
Loading

0 comments on commit 0a88717

Please sign in to comment.