Skip to content

Commit

Permalink
Merge pull request #848 from wsbao/wsbao
Browse files Browse the repository at this point in the history
Fixed another a few bugs in using OpenAI API/Azure OpenAI API/OpenAI compatible custom API
  • Loading branch information
KillianLucas authored Dec 24, 2023
2 parents cb0023c + f3dc9a2 commit 52b1ba0
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions interpreter/terminal_interface/start_terminal_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def start_terminal_interface(interpreter):
{
"name": "api_version",
"nickname": "av",
"help_text": "optionally set the API key for your llm calls (this will override environment variables)",
"help_text": "optionally set the API version for your llm calls (this will override environment variables)",
"type": str,
"attribute": {"object": interpreter.llm, "attr_name": "api_version"},
},
Expand Down Expand Up @@ -584,7 +584,8 @@ def start_terminal_interface(interpreter):
# If we've set a custom api base, we want it to be sent in an openai compatible way.
# So we need to tell LiteLLM to do this by changing the model name:
if interpreter.llm.api_base:
if not interpreter.llm.model.lower().startswith("openai/"):
if not interpreter.llm.model.lower().startswith("openai/") and \
not interpreter.llm.model.lower().startswith("azure/"):
interpreter.llm.model = "openai/" + interpreter.llm.model

# If --conversations is used, run conversation_navigator
Expand Down

0 comments on commit 52b1ba0

Please sign in to comment.