Skip to content

Commit

Permalink
fix(api): fixed issue with name function and ilmproved promtp (#1759)
Browse files Browse the repository at this point in the history
# Description

Please include a summary of the changes and the related issue. Please
also include relevant motivation and context.

## Checklist before requesting a review

Please delete options that are not relevant.

- [ ] My code follows the style guidelines of this project
- [ ] I have performed a self-review of my code
- [ ] I have commented hard-to-understand areas
- [ ] I have ideally added tests that prove my fix is effective or that
my feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] Any dependent changes have been merged

## Screenshots (if appropriate):
  • Loading branch information
coolCatalyst committed Nov 29, 2023
1 parent 66350ab commit 71c54a6
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 20 deletions.
30 changes: 17 additions & 13 deletions backend/llm/api_brain_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,13 @@ async def make_completion(
messages,
functions,
brain_id: UUID,
):
recursive_count = 0,
):
if recursive_count > 5:
yield "🧠<Deciding what to do>🧠"
yield "The assistant is having issues and took more than 5 calls to the API. Please try again later or an other instruction."
return

yield "🧠<Deciding what to do>🧠"
response = completion(
model=self.model,
Expand All @@ -71,22 +77,18 @@ async def make_completion(
}
for chunk in response:
finish_reason = chunk.choices[0].finish_reason

if finish_reason == "stop":
break
if "function_call" in chunk.choices[0].delta and chunk.choices[0].delta["function_call"]:
if "name" in chunk.choices[0].delta["function_call"]:
function_call["name"] = chunk.choices[0].delta["function_call"][
"name"
]
if "arguments" in chunk.choices[0].delta["function_call"]:
if chunk.choices[0].delta["function_call"].name:
function_call["name"] = chunk.choices[0].delta["function_call"].name
if chunk.choices[0].delta["function_call"].arguments:
function_call["arguments"] += chunk.choices[0].delta[
"function_call"
]["arguments"]
].arguments

elif finish_reason == "function_call":
try:
logger.info(f"Function call: {function_call}")
arguments = json.loads(function_call["arguments"])

except Exception:
Expand All @@ -104,18 +106,20 @@ async def make_completion(
status_code=400,
detail=f"Error while calling API: {e}",
)


function_name = function_call["name"]
messages.append(
{
"role": "function",
"name": str(brain_id),
"content": api_call_response,
"name": function_call["name"],
"content": f"The function {function_name} was called and gave The following answer:(data from function) {api_call_response} (end of data from function). Don't call this function again unless there was an error or extremely necessary and asked specifically by the user.",
}
)
async for value in self.make_completion(
messages=messages,
functions=functions,
brain_id=brain_id,
recursive_count=recursive_count + 1,
):
yield value

Expand All @@ -142,7 +146,7 @@ async def generate_stream(self, chat_id: UUID, question: ChatQuestion):
if not brain:
raise HTTPException(status_code=404, detail="Brain not found")

prompt_content = "You'are a helpful assistant which can call APIs. Feel free to call the API when you need to. Don't force APIs call, do it when necessary. If it seems like you should call the API and there are missing parameters, ask user for them."
prompt_content = "You are a helpful assistant that can access functions to help answer questions. If there are information missing in the question, you can ask follow up questions to get more information to the user. Once all the information is available, you can call the function to get the answer."

if self.prompt_to_use:
prompt_content += self.prompt_to_use.content
Expand Down
7 changes: 0 additions & 7 deletions backend/llm/utils/make_api_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,6 @@ def get_api_call_response_as_text(
headers=headers or None,
data=json.dumps(params) or None,
)
logger.info("🔥🔥🔥🔥🔥")
logger.info(f"API call response: {response.text}")
logger.info(f'API call search params: {search_params}')
logger.info(f"API call response status code: {response.status_code}")
logger.info(f"API call url with search params: {api_url_with_search_params}")
logger.info(f"API call headers: {headers}")
logger.info(f"API Response: {response.text}")
return response.text
except Exception as e:
logger.error(f"Error calling API: {e}")
Expand Down

0 comments on commit 71c54a6

Please sign in to comment.