-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Use Taskprocessing TextToText provider as LLM
Signed-off-by: Marcel Klehr <mklehr@gmx.net>
- Loading branch information
1 parent
b24ef9c
commit 78cd111
Showing
8 changed files
with
95 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
import json | ||
import time | ||
from typing import Any, Dict, List, Optional | ||
|
||
from nc_py_api import Nextcloud | ||
from langchain_core.callbacks.manager import CallbackManagerForLLMRun | ||
from langchain_core.language_models.llms import LLM | ||
|
||
def get_model_for(model_type: str, model_config: dict): | ||
if model_config is None: | ||
return None | ||
|
||
if model_type == 'llm': | ||
return CustomLLM() | ||
|
||
return None | ||
|
||
class CustomLLM(LLM): | ||
"""A custom chat model that queries Nextcloud's TextToText provider | ||
""" | ||
|
||
def _call( | ||
self, | ||
prompt: str, | ||
stop: Optional[List[str]] = None, | ||
run_manager: Optional[CallbackManagerForLLMRun] = None, | ||
**kwargs: Any, | ||
) -> str: | ||
"""Run the LLM on the given input. | ||
Override this method to implement the LLM logic. | ||
Args: | ||
prompt: The prompt to generate from. | ||
stop: Stop words to use when generating. Model output is cut off at the | ||
first occurrence of any of the stop substrings. | ||
If stop tokens are not supported consider raising NotImplementedError. | ||
run_manager: Callback manager for the run. | ||
**kwargs: Arbitrary additional keyword arguments. These are usually passed | ||
to the model provider API call. | ||
Returns: | ||
The model output as a string. Actual completions SHOULD NOT include the prompt. | ||
""" | ||
nc = Nextcloud() | ||
|
||
print(json.dumps(prompt)) | ||
|
||
response = nc.ocs("POST", "/ocs/v1.php/taskprocessing/schedule", json={ | ||
"type": "core:text2text", | ||
"appId": "context_chat_backend", | ||
"input": { | ||
"input": prompt | ||
} | ||
}) | ||
|
||
task_id = response["task"]["id"] | ||
|
||
while response['task']['status'] != 'STATUS_SUCCESSFUL' and response['task']['status'] != 'STATUS_FAILED': | ||
time.sleep(5) | ||
response = nc.ocs("GET", f"/ocs/v1.php/taskprocessing/task/{task_id}") | ||
print(json.dumps(response)) | ||
|
||
if response['task']['status'] == 'STATUS_FAILED': | ||
raise RuntimeError('Nextcloud TaskProcessing Task failed') | ||
|
||
return response['task']['output']['output'] | ||
|
||
@property | ||
def _identifying_params(self) -> Dict[str, Any]: | ||
"""Return a dictionary of identifying parameters.""" | ||
return { | ||
# The model name allows users to specify custom token counting | ||
# rules in LLM monitoring applications (e.g., in LangSmith users | ||
# can provide per token pricing for their model and monitor | ||
# costs for the given LLM.) | ||
"model_name": "NextcloudTextToTextProvider", | ||
} | ||
|
||
@property | ||
def _llm_type(self) -> str: | ||
"""Get the type of language model used by this chat model. Used for logging purposes only.""" | ||
return "nc_texttotetx" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters