Skip to content

Commit

Permalink
add caching of openrouter model details
Browse files Browse the repository at this point in the history
  • Loading branch information
JV committed Aug 15, 2023
1 parent 56f6ae0 commit fa5e4bd
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions aider/models/openrouter.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import tiktoken
from .model import Model

cached_model_details = None


class OpenRouterModel(Model):
def __init__(self, name, openai):
Expand All @@ -19,8 +21,10 @@ def __init__(self, name, openai):
self.tokenizer = tiktoken.get_encoding("cl100k_base")

# TODO cache the model list data to speed up using multiple models
available_models = openai.Model.list().data
found = next((details for details in available_models if details.get('id') == name), None)
global cached_model_details
if cached_model_details == None:
cached_model_details = openai.Model.list().data
found = next((details for details in cached_model_details if details.get('id') == name), None)

if found:
self.max_context_tokens = int(found.context_length)
Expand Down

0 comments on commit fa5e4bd

Please sign in to comment.