Skip to content

Commit

Permalink
Use a different model to guess pip and respond
Browse files Browse the repository at this point in the history
If we finetune a model it will learn to always respond in python
surrounded with backticks so it won't be able to produce pip packages
well. We need to have a different model.
  • Loading branch information
jakethekoenig committed Feb 16, 2024
1 parent dd37f90 commit 3cd5066
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 1 deletion.
2 changes: 2 additions & 0 deletions src/rawdog/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"llm_api_key": None,
"llm_base_url": None,
"llm_model": "gpt-4-turbo-preview",
"pip_model": None
"llm_custom_provider": None,
"llm_temperature": 1.0,
"retries": 2,
Expand All @@ -21,6 +22,7 @@
setting_descriptions = {
"retries": "If the script fails, retry this many times before giving up.",
"leash": "Print the script before executing and prompt for confirmation.",
"pip_model": "The model to use to get package name from import name.",
}


Expand Down
9 changes: 8 additions & 1 deletion src/rawdog/llm_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,14 @@ def add_message(self, role: str, content: str):

def get_python_package(self, import_name: str):
base_url = self.config.get("llm_base_url")
model = self.config.get("llm_model")
model = self.config.get("pip_model")
llm_model = self.config.get("llm_model")
if model is None:
if "ft:" in llm_model or "rawdog" in llm_model or "abante" in llm_model:
model = "gpt-3.5-turbo"
else:
model = llm_model

custom_llm_provider = self.config.get("llm_custom_provider")

messages = [
Expand Down

0 comments on commit 3cd5066

Please sign in to comment.