From 71c54a6799ab67cd2c5c44e22ce274819681d32f Mon Sep 17 00:00:00 2001 From: coolCatalyst Date: Wed, 29 Nov 2023 23:54:39 +0100 Subject: [PATCH] fix(api): fixed issue with name function and ilmproved promtp (#1759) # Description Please include a summary of the changes and the related issue. Please also include relevant motivation and context. ## Checklist before requesting a review Please delete options that are not relevant. - [ ] My code follows the style guidelines of this project - [ ] I have performed a self-review of my code - [ ] I have commented hard-to-understand areas - [ ] I have ideally added tests that prove my fix is effective or that my feature works - [ ] New and existing unit tests pass locally with my changes - [ ] Any dependent changes have been merged ## Screenshots (if appropriate): --- backend/llm/api_brain_qa.py | 30 +++++++++++++++------------ backend/llm/utils/make_api_request.py | 7 ------- 2 files changed, 17 insertions(+), 20 deletions(-) diff --git a/backend/llm/api_brain_qa.py b/backend/llm/api_brain_qa.py index d2352d84..81636acb 100644 --- a/backend/llm/api_brain_qa.py +++ b/backend/llm/api_brain_qa.py @@ -53,7 +53,13 @@ async def make_completion( messages, functions, brain_id: UUID, - ): + recursive_count = 0, + ): + if recursive_count > 5: + yield "🧠🧠" + yield "The assistant is having issues and took more than 5 calls to the API. Please try again later or an other instruction." + return + yield "🧠🧠" response = completion( model=self.model, @@ -71,22 +77,18 @@ async def make_completion( } for chunk in response: finish_reason = chunk.choices[0].finish_reason - if finish_reason == "stop": break if "function_call" in chunk.choices[0].delta and chunk.choices[0].delta["function_call"]: - if "name" in chunk.choices[0].delta["function_call"]: - function_call["name"] = chunk.choices[0].delta["function_call"][ - "name" - ] - if "arguments" in chunk.choices[0].delta["function_call"]: + if chunk.choices[0].delta["function_call"].name: + function_call["name"] = chunk.choices[0].delta["function_call"].name + if chunk.choices[0].delta["function_call"].arguments: function_call["arguments"] += chunk.choices[0].delta[ "function_call" - ]["arguments"] + ].arguments elif finish_reason == "function_call": try: - logger.info(f"Function call: {function_call}") arguments = json.loads(function_call["arguments"]) except Exception: @@ -104,18 +106,20 @@ async def make_completion( status_code=400, detail=f"Error while calling API: {e}", ) - + + function_name = function_call["name"] messages.append( { "role": "function", - "name": str(brain_id), - "content": api_call_response, + "name": function_call["name"], + "content": f"The function {function_name} was called and gave The following answer:(data from function) {api_call_response} (end of data from function). Don't call this function again unless there was an error or extremely necessary and asked specifically by the user.", } ) async for value in self.make_completion( messages=messages, functions=functions, brain_id=brain_id, + recursive_count=recursive_count + 1, ): yield value @@ -142,7 +146,7 @@ async def generate_stream(self, chat_id: UUID, question: ChatQuestion): if not brain: raise HTTPException(status_code=404, detail="Brain not found") - prompt_content = "You'are a helpful assistant which can call APIs. Feel free to call the API when you need to. Don't force APIs call, do it when necessary. If it seems like you should call the API and there are missing parameters, ask user for them." + prompt_content = "You are a helpful assistant that can access functions to help answer questions. If there are information missing in the question, you can ask follow up questions to get more information to the user. Once all the information is available, you can call the function to get the answer." if self.prompt_to_use: prompt_content += self.prompt_to_use.content diff --git a/backend/llm/utils/make_api_request.py b/backend/llm/utils/make_api_request.py index 874e4083..86ce0a9c 100644 --- a/backend/llm/utils/make_api_request.py +++ b/backend/llm/utils/make_api_request.py @@ -30,13 +30,6 @@ def get_api_call_response_as_text( headers=headers or None, data=json.dumps(params) or None, ) - logger.info("🔥🔥🔥🔥🔥") - logger.info(f"API call response: {response.text}") - logger.info(f'API call search params: {search_params}') - logger.info(f"API call response status code: {response.status_code}") - logger.info(f"API call url with search params: {api_url_with_search_params}") - logger.info(f"API call headers: {headers}") - logger.info(f"API Response: {response.text}") return response.text except Exception as e: logger.error(f"Error calling API: {e}")