From b8d8acce5c4f5688e87b103399c63b0551707385 Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Tue, 3 Sep 2024 11:38:44 +0800 Subject: [PATCH] fix some bugs and update version to v0.2.2 (#211) --- lazyllm/cli/run.py | 4 ++-- lazyllm/tools/rag/retriever.py | 5 ++++- lazyllm/tools/webpages/webmodule.py | 8 +++++++- pyproject.toml | 2 +- 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/lazyllm/cli/run.py b/lazyllm/cli/run.py index c124d84e..0f480089 100644 --- a/lazyllm/cli/run.py +++ b/lazyllm/cli/run.py @@ -10,13 +10,13 @@ def chatbot(llm): lazyllm.WebModule(llm, port=range(20000, 25000)).start().wait() -def rag(llm, documents): +def rag(llm, docpath): import lazyllm from lazyllm import pipeline, parallel, bind, SentenceSplitter, Document, Retriever, Reranker prompt = ('You will play the role of an AI Q&A assistant and complete a dialogue task. In this ' 'task, you need to provide your answer based on the given context and question.') - documents = Document(dataset_path="rag_master", embed=lazyllm.OnlineEmbeddingModule(), create_ui=False) + documents = Document(dataset_path=docpath, embed=lazyllm.OnlineEmbeddingModule(), create_ui=False) documents.create_node_group(name="sentences", transform=SentenceSplitter, chunk_size=1024, chunk_overlap=100) with pipeline() as ppl: diff --git a/lazyllm/tools/rag/retriever.py b/lazyllm/tools/rag/retriever.py index 7fc9d72e..84ea885a 100644 --- a/lazyllm/tools/rag/retriever.py +++ b/lazyllm/tools/rag/retriever.py @@ -1,4 +1,4 @@ -from lazyllm import ModuleBase +from lazyllm import ModuleBase, pipeline from .store import DocNode from typing import List @@ -25,6 +25,9 @@ def __init__( self.topk = topk self.similarity_kw = kwargs # kw parameters + def _get_post_process_tasks(self): + return pipeline(lambda *a: self('Test Query')) + def forward(self, query: str) -> List[DocNode]: return self.doc.forward( func_name="retrieve", diff --git a/lazyllm/tools/webpages/webmodule.py b/lazyllm/tools/webpages/webmodule.py index 01696b65..f691ad02 100644 --- a/lazyllm/tools/webpages/webmodule.py +++ b/lazyllm/tools/webpages/webmodule.py @@ -16,7 +16,7 @@ import platform import lazyllm -from lazyllm import LOG, globals, FileSystemQueue, OnlineChatModule, TrainableModule, ForkProcess +from lazyllm import LOG, globals, FileSystemQueue, OnlineChatModule, TrainableModule, ForkProcess, pipeline from ...module.module import ModuleBase @@ -356,6 +356,9 @@ def _update(self, *, mode=None, recursive=True): self._work() return self + def _get_post_process_tasks(self): + return pipeline(self._print_url) + def wait(self): if hasattr(self, 'p'): return self.p.join() @@ -381,3 +384,6 @@ def _verify_port_access(self, port): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: result = s.connect_ex(('localhost', port)) return result != 0 + + def _print_url(self): + LOG.success(f'LazyLLM webmodule launched successfully: Running on local URL: {self.url}', flush=True) diff --git a/pyproject.toml b/pyproject.toml index 615d590f..0b75307f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "lazyllm" -version = "0.2.1" +version = "0.2.2" description = "A Low-code Development Tool For Building Multi-agent LLMs Applications." authors = ["wangzhihong "] license = "Apache-2.0 license"