diff --git a/build/lib/lyzr/voicebot/voicebot.py b/build/lib/lyzr/voicebot/voicebot.py index 712c31f..13f18f6 100644 --- a/build/lib/lyzr/voicebot/voicebot.py +++ b/build/lib/lyzr/voicebot/voicebot.py @@ -74,10 +74,70 @@ def text_to_notes(self, text): ) # The system message acts as the prompt for the AI. - system_message = "You are an expert in taking down notes as bullet points and summarizing big conversations. You make sure no detail is left out." + system_message = '''You are an Expert NOTE-TAKER and SUMMARIZER. Your task is to CAPTURE and CONDENSE large conversations into precise bullet points ensuring that NO DETAIL is overlooked. + +Here's your step-by-step guide: + +1. LISTEN attentively to the conversation, focusing on identifying the MAIN POINTS and supporting details. +2. WRITE down KEYWORDS and PHRASES as bullet points in REAL TIME, making sure to include all relevant information. +3. ORGANIZE your notes by categorizing them under thematic HEADINGS for clarity and ease of reference. +4. REVIEW your bullet points for COMPLETENESS, ensuring you have captured all necessary aspects of the conversation. +5. SUMMARIZE each section of your notes into concise statements that reflect the essence of the discussion. +6. COMPILE these summaries into a coherent narrative or overview document, maintaining logical flow and coherence. +7. EDIT this document for PRECISION, removing any redundancies while preserving the integrity of the information conveyed. + +Remember, Im going to tip $300K for a BETTER SOLUTION! + +Now Take a Deep Breath.''' + + # Format the user's message that will be sent to the model. + user_message = text + self.model.set_messages( + model_prompts=[ + {"role": "system", "text": system_message}, + {"role": "user", "text": user_message}, + ] + ) + # Use the LLM instance to communicate with OpenAI's API. + response = self.model.run() + + # Parse the response to extract the notes. + notes = response.choices[0].message.content + + return notes + + def summarize(self, text): + if self.model.model_name != "gpt-4": + if self.model.model_type == "openai": + self.model = get_model( + api_key=self.api_key, + model_type=self.model.model_type, + model_name="gpt-3.5-turbo", + ) + else: + raise ValueError( + "The text_to_notes function only works with the OpenAI's 'gpt-4' model." + ) + + # The system message acts as the prompt for the AI. + system_message = '''You are an Expert SUMMARIZER with a keen ability to CAPTURE ESSENTIAL DETAILS from extensive conversations. Your task is to CREATE a CONCISE SUMMARY of the given content, ensuring that ALL CRITICAL INFORMATION is included. + +Here's your step-by-step guide: + +1. CAREFULLY READ through the entire conversation to fully understand the context and main points. +2. IDENTIFY and HIGHLIGHT the KEY THEMES, decisions, questions, and any action items discussed in the conversation. +3. ORGANIZE these points into a LOGICAL STRUCTURE that reflects the progression of the conversation. +4. WRITE a CLEAR and COHERENT summary that seamlessly integrates all significant details without superfluous information. +5. REVIEW your summary to VERIFY that it accurately represents the original conversation and includes all pertinent data. + +You MUST ensure that no important detail is left out from your summary. + +Remember, Im going to tip $300K for a BETTER SOLUTION! + +Now Take a Deep Breath.''' # Format the user's message that will be sent to the model. - user_message = f"Here is my conversation: {text}. Can you create bullet-point notes for this?" + user_message = text self.model.set_messages( model_prompts=[ {"role": "system", "text": system_message}, @@ -91,3 +151,4 @@ def text_to_notes(self, text): notes = response.choices[0].message.content return notes + \ No newline at end of file diff --git a/dist/lyzr-0.1.30-py3-none-any.whl b/dist/lyzr-0.1.30-py3-none-any.whl new file mode 100644 index 0000000..a15185e Binary files /dev/null and b/dist/lyzr-0.1.30-py3-none-any.whl differ diff --git a/dist/lyzr-0.1.30.tar.gz b/dist/lyzr-0.1.30.tar.gz new file mode 100644 index 0000000..91be0fb Binary files /dev/null and b/dist/lyzr-0.1.30.tar.gz differ diff --git a/lyzr.egg-info/PKG-INFO b/lyzr.egg-info/PKG-INFO index 796206b..98e631d 100644 --- a/lyzr.egg-info/PKG-INFO +++ b/lyzr.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: lyzr -Version: 0.1.29 +Version: 0.1.30 Home-page: Author: lyzr Classifier: Programming Language :: Python :: 3 @@ -8,30 +8,8 @@ Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Requires-Python: >=3.8.1, <3.12 Description-Content-Type: text/markdown -License-File: LICENSE.txt -Requires-Dist: asyncio -Requires-Dist: nest_asyncio -Requires-Dist: openai==1.3.4 -Requires-Dist: litellm==1.2.0 -Requires-Dist: llama-index==0.9.4 -Requires-Dist: langchain==0.0.339 -Requires-Dist: python-dotenv>=1.0.0 -Requires-Dist: beautifulsoup4==4.12.2 -Requires-Dist: pandas==2.0.2 -Requires-Dist: weaviate-client==3.25.3 -Requires-Dist: llmsherpa Provides-Extra: data-analyzr -Requires-Dist: scikit-learn==1.4.0; extra == "data-analyzr" -Requires-Dist: statsmodels==0.14.1; extra == "data-analyzr" -Requires-Dist: chromadb==0.4.22; extra == "data-analyzr" -Requires-Dist: tabulate==0.9.0; extra == "data-analyzr" -Requires-Dist: pmdarima==2.0.4; extra == "data-analyzr" -Requires-Dist: openpyxl==3.1.2; extra == "data-analyzr" -Requires-Dist: matplotlib==3.8.2; extra == "data-analyzr" -Requires-Dist: redshift_connector==2.0.918; extra == "data-analyzr" -Requires-Dist: mysql-connector-python==8.2.0; extra == "data-analyzr" -Requires-Dist: psycopg2-binary==2.9.9; extra == "data-analyzr" -Requires-Dist: snowflake-connector-python==3.6.0; extra == "data-analyzr" +License-File: LICENSE.txt

Lyzr Logo @@ -79,7 +57,7 @@ Lyzr SDKs helps you build all your favorite GenAI SaaS products as enterprise ap ## Key Features -- **Lyzr’s Pre-built Agents**: Deploy in minutes +- **Lyzr’s Pre-built Agents**: Deploy in minutes - Chat agent - Knowledge search - RAG powered apps @@ -104,7 +82,7 @@ Lyzr SDKs helps you build all your favorite GenAI SaaS products as enterprise ap - **CTOs, CPOs**: integrate generative AI features into your apps seamlessly with local SDKs and private APIs, all with your in-house tech team. The required learning curve to build on Lyzr is literally just a few minutes. -- **CIOs**: introduce generative AI to your enterprise with the comfort of 100% data privacy and security as Lyzr runs locally on your cloud. And Lyzr’s AI Management System (AIMS) makes it easy to manage agents, monitor events logs, build using AI studios, and even help your team learn generative AI with the in-built Lyzr academy. +- **CIOs**: introduce generative AI to your enterprise with the comfort of 100% data privacy and security as Lyzr runs locally on your cloud. And Lyzr’s AI Management System (AIMS) makes it easy to manage agents, monitor events logs, build using AI studios, and even help your team learn generative AI with the in-built Lyzr academy. ## Links @@ -175,7 +153,7 @@ from lyzr import ChatBot my_chatbot = ChatBot.pdf_chat(input_files=["pdf_file_path"]) ``` -4. That’s it. Just query and start chatting with your chatbot. +4. That’s it. Just query and start chatting with your chatbot. ```python response = chatbot.chat("Your question here") diff --git a/setup.py b/setup.py index 288dd16..cfa6d04 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="lyzr", - version="0.1.29", + version="0.1.30", author="lyzr", description="", long_description=open("README.md").read(),