diff --git a/lionagi/core/direct/__init__.py b/lionagi/core/direct/__init__.py index 555f2ea87..28565896d 100644 --- a/lionagi/core/direct/__init__.py +++ b/lionagi/core/direct/__init__.py @@ -1,9 +1,11 @@ from .predict import predict from .select import select from .score import score +from .vote import vote __all__ = [ "predict", "select", "score", + "vote" ] diff --git a/lionagi/core/direct/select.py b/lionagi/core/direct/select.py index 894c50098..6468ad328 100644 --- a/lionagi/core/direct/select.py +++ b/lionagi/core/direct/select.py @@ -17,7 +17,6 @@ def __init__( self, sentence=None, choices=None, - num_choices=1, instruction=None, reason=False, confidence_score=False, @@ -27,7 +26,7 @@ def __init__( self.sentence = sentence self.choices = choices - self.task = f"select {num_choices} item(s), from provided choices {choices}." + self.task = f"select 1 item, from provided choices {choices}." if instruction: self.task += f"objetive {instruction}." @@ -41,7 +40,6 @@ def __init__( async def select( sentence, choices=None, - num_choices=1, instruction=None, confidence_score=False, reason=False, @@ -79,7 +77,6 @@ async def select( _template = SelectTemplate( sentence=sentence, choices=choices, - num_choices=num_choices, instruction=instruction, confidence_score=confidence_score, reason=reason, @@ -97,6 +94,7 @@ async def select( ) ans = _template.answer + if ans not in _template.choices: _template.answer = StringMatch.choose_most_similar(ans, _template.choices) diff --git a/lionagi/core/direct/vote.py b/lionagi/core/direct/vote.py index 638af362b..0dd2f7f3f 100644 --- a/lionagi/core/direct/vote.py +++ b/lionagi/core/direct/vote.py @@ -1,10 +1,11 @@ from lionagi.libs import func_call import numpy as np +from .predict import predict from .score import score # for example, directive=predict -async def vote(sentence, directive, num_generations=5, num_output=1, num_scorer=5, score_range=(0,100), num_digit=2, scorer_instruction=None, **kwargs): +async def vote(sentence, directive=predict, num_generations=5, num_output=1, num_scorer=5, score_range=(0,100), num_digit=2, scorer_instruction=None, **kwargs): async def _inner(i): out_ = await directive(sentence, **kwargs) diff --git a/notebooks/lion_direct.ipynb b/notebooks/lion_direct.ipynb index f9a57f0d9..13a48c322 100644 --- a/notebooks/lion_direct.ipynb +++ b/notebooks/lion_direct.ipynb @@ -6,6 +6,7 @@ "metadata": {}, "outputs": [], "source": [ + "from IPython.display import Markdown\n", "from lionagi import direct" ] }, @@ -32,8 +33,11 @@ "outputs": [ { "data": { + "text/markdown": [ + "The shark tries to escape into deeper waters." + ], "text/plain": [ - "'The big white shark tries to swim faster to escape.'" + "" ] }, "execution_count": 3, @@ -42,7 +46,8 @@ } ], "source": [ - "await direct.predict(sentence)" + "out_ = await direct.predict(sentence)\n", + "Markdown(out_.answer)" ] }, { @@ -52,10 +57,11 @@ "outputs": [ { "data": { + "text/markdown": [ + "The shark tries to swim away as fast as it can. The blue whale continues the pursuit, showcasing its surprising agility." + ], "text/plain": [ - "{'answer': 'The big white shark tries to swim faster to escape, but the blue whale is persistent in its pursuit.',\n", - " 'reason': 'Predatory or competitive interactions in the ocean often involve chase sequences, where the predator pursues its target persistently, and the prey tries to escape.',\n", - " 'confidence_score': 0.75}" + "" ] }, "execution_count": 4, @@ -64,20 +70,14 @@ } ], "source": [ - "await direct.predict(\n", + "out_ = await direct.predict(\n", " sentence,\n", " num_sentences=2,\n", " reason=True,\n", - " confidence_score=True,\n", " retry_kwargs={\"timeout\": 15},\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2. select" + ")\n", + "\n", + "Markdown(out_.answer)" ] }, { @@ -87,8 +87,11 @@ "outputs": [ { "data": { + "text/markdown": [ + "Based on the given sentence, it's logical to predict that the shark would attempt to escape the threat posed by the larger blue whale. The continuation of the chase by the blue whale emphasizes the size and speed capabilities of blue whales, despite their massive size." + ], "text/plain": [ - "'deep ocean'" + "" ] }, "execution_count": 5, @@ -96,40 +99,66 @@ "output_type": "execute_result" } ], + "source": [ + "Markdown(out_.reason)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2. select" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "deep ocean\n" + ] + } + ], "source": [ "choices = [\"funny\", \"catch\", \"apple\", \"deep ocean\"]\n", "\n", - "await direct.select(sentence, choices=choices)" + "out_ = await direct.select(sentence, choices=choices)\n", + "print(out_.answer)" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [ { "data": { + "text/markdown": [ + "deep ocean" + ], "text/plain": [ - "{'answer': 'deep ocean',\n", - " 'reason': \"The context involves a blue whale and a big white shark, which are both deep-sea creatures. The 'deep ocean' choice is directly related to their natural habitat, making it an intriguing and fitting selection for the setup.\",\n", - " 'confidence_score': '0.95'}" + "" ] }, - "execution_count": 6, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "await direct.select(\n", + "out_ = await direct.select(\n", " sentence,\n", " choices=choices,\n", " num_choices=2,\n", " objective=\"find most weird for the setup\",\n", - " reason=True,\n", - " confidence_score=True,\n", " temperature=0.45,\n", - ")" + ")\n", + "\n", + "Markdown(out_.answer)" ] }, { @@ -141,118 +170,200 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "1" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "5.0\n" + ] } ], "source": [ - "await direct.score(sentence)" + "out_ = await direct.score(sentence)\n", + "print(out_.answer)" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "{'score': 85.0,\n", - " 'reason': \"It's unusual for a blue whale, which primarily feeds on tiny krill, to chase a big white shark, as this behavior deviates significantly from their known peaceful and solitary nature.\",\n", - " 'confidence_score': 0.9}" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "75.0\n", + "The scenario of a blue whale chasing a big white shark is quite unusual and not typical behavior observed in nature, as blue whales primarily feed on tiny krill and are not known to chase sharks. This makes the situation relatively high on the weirdness scale.\n" + ] } ], "source": [ - "await direct.score(\n", + "out_ = await direct.score(\n", " sentence,\n", " instruction=\"rate weirdness\",\n", " reason=True,\n", " score_range=(1, 100),\n", " num_digit=1,\n", - " confidence_score=True,\n", - ")" + ")\n", + "\n", + "print(out_.answer)\n", + "print(out_.reason)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### 4. sentiment" + "### 4. vote" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'neutral'" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "await direct.sentiment(sentence)" + "from lionagi.core.direct import vote, predict\n", + "\n", + "sentence = \"\"\"\n", + "Why did the blue whale and the big shark start dancing at the bar? Because they heard the bartender was serving \"sea-riously\" good fin-tunes! And then they stopped dancing... because the octopus DJ said it was time for a squid break!\n", + "\"\"\"" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0.5" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "await direct.sentiment(sentence, to_type=\"num\")" + "# this code made 10 generations of \"predict\" - 10 LLM calls\n", + "# each answer got scored by 10 scorers, - 100 LLM calls\n", + "# let's check the top 5 generations\n", + "\n", + "out_ = await vote(\n", + " sentence, \n", + " directive=predict, \n", + " num_sentences=3, \n", + " num_generations=10, \n", + " num_output=5, \n", + " num_scorer=10, \n", + " score_range=(0,10), \n", + " num_digits=2, \n", + " scorer_instruction=\"rate humor highly critically\",\n", + " temperature=0.4\n", + ")" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Score 7.5\n" + ] + }, { "data": { + "text/markdown": [ + "So, the blue whale and the big shark decided to take a break and order some krill cocktails. Meanwhile, the octopus DJ started playing some clam jams, getting the whole bar in a bubbly mood. After their break, the whale and the shark were ready to dive back into the dance floor, showing off their smooth jellyfish jive." + ], "text/plain": [ - "'awesome 😎'" + "" ] }, - "execution_count": 11, "metadata": {}, - "output_type": "execute_result" + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Score 7.5\n" + ] + }, + { + "data": { + "text/markdown": [ + "But the party didn't end there. Once the squid break was over, the blue whale, the big shark, and all their sea friends got back on the dance floor, ready to dive into more oceanic beats. The night was young, and the sea creatures were determined to make a splash with their dance moves!" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Score 7.65\n" + ] + }, + { + "data": { + "text/markdown": [ + "So, the blue whale and the big shark took a break, sipping on their ocean-inspired cocktails, chatting about the coral reefs and the latest in aquatic fashion. Meanwhile, the octopus DJ was busy setting up his next set, promising to bring even more waves to the dance floor. Everyone at the bar was eagerly waiting, knowing that when the music started again, it would be a splash hit." + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Score 7.9\n" + ] + }, + { + "data": { + "text/markdown": [ + "So, the whale and the shark took a seat at the bar, sipping on their ocean-inspired cocktails, chatting about the current events in the deep blue. They were soon joined by a group of jellyfish, who were glowing with excitement to join the underwater party. The night was filled with laughter, bubbles, and the best sea-themed music, making it an unforgettable evening under the sea." + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Score 8.25\n" + ] + }, + { + "data": { + "text/markdown": [ + "So, the blue whale and the big shark grabbed a drink and watched the jellyfish do their jelly wobble on the dance floor. It was a sight to sea, making everyone at the bar laugh and cheer. Eventually, the octopus DJ returned, and the party continued with even more sea-riously good fin-tunes." + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" } ], "source": [ - "await direct.sentiment(sentence, choices=[\"awesome 😎\", \"hmmm 🤔\", \"mehhh 😭\"])" + "from IPython.display import Markdown\n", + "\n", + "for i in range(5):\n", + " print(\"Score\", out_[i].score)\n", + " display(Markdown(out_[i].answer))" ] } ], @@ -272,7 +383,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.12.1" } }, "nbformat": 4,