Skip to content

Commit

Permalink
Merge pull request langchain-ai#13 from shane-huang/llm-gpu
Browse files Browse the repository at this point in the history
fix breaking change in interface
  • Loading branch information
shane-huang authored Apr 25, 2024
2 parents 60e6ff6 + 2d842f0 commit dbccebd
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
6 changes: 4 additions & 2 deletions libs/community/langchain_community/llms/bigdl_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,11 @@ class BigdlLLM(IpexLLM):
def from_model_id(
cls,
model_id: str,
model_kwargs: Optional[dict] = None,
*,
tokenizer_id: Optional[str] = None,
load_in_4bit: bool = True,
load_in_low_bit: Optional[str] = None,
model_kwargs: Optional[dict] = None,
**kwargs: Any,
) -> LLM:
"""
Expand Down Expand Up @@ -105,8 +106,9 @@ def from_model_id(
def from_model_id_low_bit(
cls,
model_id: str,
tokenizer_id: Optional[str] = None,
model_kwargs: Optional[dict] = None,
*,
tokenizer_id: Optional[str] = None,
**kwargs: Any,
) -> LLM:
"""
Expand Down
6 changes: 4 additions & 2 deletions libs/community/langchain_community/llms/ipex_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,11 @@ class Config:
def from_model_id(
cls,
model_id: str,
model_kwargs: Optional[dict] = None,
*,
tokenizer_id: Optional[str] = None,
load_in_4bit: bool = True,
load_in_low_bit: Optional[str] = None,
model_kwargs: Optional[dict] = None,
**kwargs: Any,
) -> LLM:
"""
Expand Down Expand Up @@ -82,8 +83,9 @@ def from_model_id(
def from_model_id_low_bit(
cls,
model_id: str,
tokenizer_id: Optional[str] = None,
model_kwargs: Optional[dict] = None,
*,
tokenizer_id: Optional[str] = None,
**kwargs: Any,
) -> LLM:
"""
Expand Down

0 comments on commit dbccebd

Please sign in to comment.