From 1d690416e6f93837900677049fd7c88c2a761e07 Mon Sep 17 00:00:00 2001 From: wangchongshi Date: Fri, 14 Jun 2024 15:56:54 +0800 Subject: [PATCH] improvement: add demo pictures of the discussion group agents. --- agentuniverse/base/util/prompt_util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/agentuniverse/base/util/prompt_util.py b/agentuniverse/base/util/prompt_util.py index e85a8ab8..cc0b0612 100644 --- a/agentuniverse/base/util/prompt_util.py +++ b/agentuniverse/base/util/prompt_util.py @@ -164,8 +164,8 @@ def process_llm_token(agent_llm: LLM, lc_prompt_template, profile: dict, planner input_tokens = agent_llm.max_context_length() - agent_llm.max_tokens if input_tokens <= 0: - raise Exception("The `max_tokens` in the llm configuration is the maximum output number of tokens, " - "the current `max_tokens` is greater than the context length of the LLM model.") + raise Exception("The current output max tokens limit is greater than the context length of the LLM model, " + "please adjust it by editing the `max_tokens` parameter in the llm yaml.") if prompt_tokens <= input_tokens: return