Skip to content

Commit

Permalink
Update error message and change argument parsing
Browse files Browse the repository at this point in the history
  • Loading branch information
davidmezzetti committed Dec 9, 2023
1 parent 3dab4d8 commit a515d30
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/python/txtai/pipeline/llm/litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(self, path, template=None, **kwargs):
super().__init__(path, template, **kwargs)

if not LITELLM:
raise ImportError('LiteLLM is not available - install "llm" extra to enable')
raise ImportError('LiteLLM is not available - install "pipeline" extra to enable')

# Register prompt template
self.register(path)
Expand Down
4 changes: 2 additions & 2 deletions src/python/txtai/pipeline/llm/llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,13 @@ def __init__(self, path, template=None, **kwargs):
super().__init__(path, template, **kwargs)

if not LLAMA_CPP:
raise ImportError('llama.cpp is not available - install "llm" extra to enable')
raise ImportError('llama.cpp is not available - install "pipeline" extra to enable')

# Check if this is a local path, otherwise download from the HF Hub
path = path if os.path.exists(path) else self.download(path)

# Create llama.cpp instance
self.llm = Llama(path, verbose=kwargs.get("verbose", False), **kwargs)
self.llm = Llama(path, verbose=kwargs.pop("verbose", False), **kwargs)

def execute(self, texts, maxlength, **kwargs):
results = []
Expand Down

0 comments on commit a515d30

Please sign in to comment.