From 89c7253da64e59e83d545bc5bb314838f6447008 Mon Sep 17 00:00:00 2001 From: Jeremy Howard Date: Sat, 26 Oct 2024 05:15:09 +1000 Subject: [PATCH] update outputs --- 00_core.ipynb | 176 +++++------- 01_toolloop.ipynb | 149 ++++------- 02_async.ipynb | 1 - README.md | 473 ++++++++++++++++++-------------- README.txt | 653 +++++++++++++++++++++++++++++---------------- claudette/asink.py | 1 - index.ipynb | 321 ++++++++++------------ 7 files changed, 947 insertions(+), 827 deletions(-) diff --git a/00_core.ipynb b/00_core.ipynb index fc11b58..7957781 100644 --- a/00_core.ipynb +++ b/00_core.ipynb @@ -229,12 +229,12 @@ { "data": { "text/markdown": [ - "Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\n", + "Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\n", "\n", "
\n", "\n", - "- id: `msg_01N7BpbVNmZHMv4aF4Vgperz`\n", - "- content: `[{'text': \"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", 'type': 'text'}]`\n", + "- id: `msg_017ZxWnLswjWcs7iDXYyEaod`\n", + "- content: `[{'text': \"Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", @@ -245,7 +245,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01N7BpbVNmZHMv4aF4Vgperz', content=[TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 22; Cache create: 0; Cache read: 0; Total: 32)" + "Message(id='msg_017ZxWnLswjWcs7iDXYyEaod', content=[TextBlock(text=\"Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 22; Cache create: 0; Cache read: 0; Total: 32)" ] }, "execution_count": null, @@ -327,7 +327,7 @@ { "data": { "text/plain": [ - "TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')" + "TextBlock(text=\"Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')" ] }, "execution_count": null, @@ -371,7 +371,7 @@ { "data": { "text/plain": [ - "\"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\"" + "\"Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\"" ] }, "execution_count": null, @@ -421,12 +421,12 @@ { "data": { "text/markdown": [ - "Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\n", + "Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\n", "\n", "
\n", "\n", - "- id: `msg_01N7BpbVNmZHMv4aF4Vgperz`\n", - "- content: `[{'text': \"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", 'type': 'text'}]`\n", + "- id: `msg_017ZxWnLswjWcs7iDXYyEaod`\n", + "- content: `[{'text': \"Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", @@ -437,7 +437,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01N7BpbVNmZHMv4aF4Vgperz', content=[TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 22; Cache create: 0; Cache read: 0; Total: 32)" + "Message(id='msg_017ZxWnLswjWcs7iDXYyEaod', content=[TextBlock(text=\"Hello Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 22; Cache create: 0; Cache read: 0; Total: 32)" ] }, "execution_count": null, @@ -730,23 +730,23 @@ { "data": { "text/markdown": [ - "Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?\n", + "Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\n", "\n", "
\n", "\n", - "- id: `msg_013taKXp67CXLBKtDjRUURFU`\n", - "- content: `[{'text': \"Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", 'type': 'text'}]`\n", + "- id: `msg_01UCGP9v7US35BhtjxDwd48t`\n", + "- content: `[{'text': \"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 31}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 22}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_013taKXp67CXLBKtDjRUURFU', content=[TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 31; Cache create: 0; Cache read: 0; Total: 41)" + "Message(id='msg_01UCGP9v7US35BhtjxDwd48t', content=[TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 22; Cache create: 0; Cache read: 0; Total: 32)" ] }, "execution_count": null, @@ -821,7 +821,7 @@ "text/plain": [ "[{'role': 'user', 'content': \"I'm Jeremy\"},\n", " {'role': 'assistant',\n", - " 'content': [TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", type='text')]},\n", + " 'content': [TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude. How can I help you today?\", type='text')]},\n", " {'role': 'user', 'content': 'I forgot my name. Can you remind me please?'}]" ] }, @@ -856,19 +856,19 @@ "\n", "
\n", "\n", - "- id: `msg_01EPrgHY2bRNcRBj6RJzGLCr`\n", + "- id: `msg_01Wmtgmf9u1chtyKN2KQG9Ee`\n", "- content: `[{'text': 'You just told me your name is Jeremy.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 55, 'output_tokens': 12}`\n", + "- usage: `{'input_tokens': 46, 'output_tokens': 12}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01EPrgHY2bRNcRBj6RJzGLCr', content=[TextBlock(text='You just told me your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 55; Out: 12; Cache create: 0; Cache read: 0; Total: 67)" + "Message(id='msg_01Wmtgmf9u1chtyKN2KQG9Ee', content=[TextBlock(text='You just told me your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 46; Out: 12; Cache create: 0; Cache read: 0; Total: 58)" ] }, "execution_count": null, @@ -974,7 +974,7 @@ { "data": { "text/plain": [ - "In: 10; Out: 31; Cache create: 0; Cache read: 0; Total: 41" + "In: 10; Out: 22; Cache create: 0; Cache read: 0; Total: 32" ] }, "execution_count": null, @@ -1136,7 +1136,7 @@ "\n", "
\n", "\n", - "- id: `msg_01B7RhgVrvQTB9riLkwWg1ie`\n", + "- id: `msg_01GVnwKLBsg4TXgsrcsQGu2G`\n", "- content: `[{'text': 'Hello! How can I assist you today?', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1148,7 +1148,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01B7RhgVrvQTB9riLkwWg1ie', content=[TextBlock(text='Hello! How can I assist you today?', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 12; Cache create: 0; Cache read: 0; Total: 20)" + "Message(id='msg_01GVnwKLBsg4TXgsrcsQGu2G', content=[TextBlock(text='Hello! How can I assist you today?', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 8; Out: 12; Cache create: 0; Cache read: 0; Total: 20)" ] }, "execution_count": null, @@ -1213,7 +1213,7 @@ "\n", "
\n", "\n", - "- id: `msg_015xKvUT8NSzzSFb7KXLwsNi`\n", + "- id: `msg_01SRJeS8XuYSwJQ7uten3YzJ`\n", "- content: `[{'text': 'According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1225,7 +1225,7 @@ "
" ], "text/plain": [ - "Message(id='msg_015xKvUT8NSzzSFb7KXLwsNi', content=[TextBlock(text='According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 23; Cache create: 0; Cache read: 0; Total: 47)" + "Message(id='msg_01SRJeS8XuYSwJQ7uten3YzJ', content=[TextBlock(text='According to Douglas Adams, \"The answer to the ultimate question of life, the universe, and everything is 42.\"', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 24; Out: 23; Cache create: 0; Cache read: 0; Total: 47)" ] }, "execution_count": null, @@ -1344,7 +1344,7 @@ "\n", "
\n", "\n", - "- id: `msg_013HvMd3c1gNsLDRb4A9Lm6C`\n", + "- id: `msg_016YWxKEDhdQ15rq5nw8sYsq`\n", "- content: `[{'text': '1, 2, 3, 4, ', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -1356,7 +1356,7 @@ "
" ], "text/plain": [ - "Message(id='msg_013HvMd3c1gNsLDRb4A9Lm6C', content=[TextBlock(text='1, 2, 3, 4, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='5', type='message', usage=In: 15; Out: 14; Cache create: 0; Cache read: 0; Total: 29)" + "Message(id='msg_016YWxKEDhdQ15rq5nw8sYsq', content=[TextBlock(text='1, 2, 3, 4, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='5', type='message', usage=In: 15; Out: 14; Cache create: 0; Cache read: 0; Total: 29)" ] }, "execution_count": null, @@ -1423,7 +1423,7 @@ " 'temp': None,\n", " 'stream': None,\n", " 'stop': None,\n", - " 'result': Message(id='msg_01KxXCh98wwUs9zQ9CSfY9Uf', content=[TextBlock(text='1, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='2', type='message', usage=In: 15; Out: 5; Cache create: 0; Cache read: 0; Total: 20),\n", + " 'result': Message(id='msg_015fRvRNQTreX3QzzJZmCi8j', content=[TextBlock(text='1, ', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='stop_sequence', stop_sequence='2', type='message', usage=In: 15; Out: 5; Cache create: 0; Cache read: 0; Total: 20),\n", " 'use': In: 94; Out: 89; Cache create: 0; Cache read: 0; Total: 183,\n", " 'stop_reason': 'stop_sequence',\n", " 'stop_sequence': '2'}" @@ -1563,12 +1563,12 @@ { "data": { "text/markdown": [ - "ToolUseBlock(id='toolu_01CWvR4gjtHZwp7maM2exdGr', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", + "ToolUseBlock(id='toolu_019no78gSVgC5MPrWkfvabkJ', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", "\n", "
\n", "\n", - "- id: `msg_01Pze5co7jijaGQDWykwx8WJ`\n", - "- content: `[{'id': 'toolu_01CWvR4gjtHZwp7maM2exdGr', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- id: `msg_01BBX49iGZo35EuBhC9koAk5`\n", + "- content: `[{'id': 'toolu_019no78gSVgC5MPrWkfvabkJ', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `tool_use`\n", @@ -1579,7 +1579,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01Pze5co7jijaGQDWykwx8WJ', content=[ToolUseBlock(id='toolu_01CWvR4gjtHZwp7maM2exdGr', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 483; Out: 53; Cache create: 0; Cache read: 0; Total: 536)" + "Message(id='msg_01BBX49iGZo35EuBhC9koAk5', content=[ToolUseBlock(id='toolu_019no78gSVgC5MPrWkfvabkJ', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 483; Out: 53; Cache create: 0; Cache read: 0; Total: 536)" ] }, "execution_count": null, @@ -1663,7 +1663,7 @@ "data": { "text/plain": [ "{'type': 'tool_result',\n", - " 'tool_use_id': 'toolu_01CWvR4gjtHZwp7maM2exdGr',\n", + " 'tool_use_id': 'toolu_019no78gSVgC5MPrWkfvabkJ',\n", " 'content': '7063474'}" ] }, @@ -1726,10 +1726,10 @@ "data": { "text/plain": [ "[{'role': 'assistant',\n", - " 'content': [ToolUseBlock(id='toolu_01CWvR4gjtHZwp7maM2exdGr', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]},\n", + " 'content': [ToolUseBlock(id='toolu_019no78gSVgC5MPrWkfvabkJ', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]},\n", " {'role': 'user',\n", " 'content': [{'type': 'tool_result',\n", - " 'tool_use_id': 'toolu_01CWvR4gjtHZwp7maM2exdGr',\n", + " 'tool_use_id': 'toolu_019no78gSVgC5MPrWkfvabkJ',\n", " 'content': '7063474'}]}]" ] }, @@ -1784,10 +1784,10 @@ "text/plain": [ "[{'role': 'user', 'content': 'What is 604542+6458932?'},\n", " {'role': 'assistant',\n", - " 'content': [ToolUseBlock(id='toolu_01CWvR4gjtHZwp7maM2exdGr', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]},\n", + " 'content': [ToolUseBlock(id='toolu_019no78gSVgC5MPrWkfvabkJ', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]},\n", " {'role': 'user',\n", " 'content': [{'type': 'tool_result',\n", - " 'tool_use_id': 'toolu_01CWvR4gjtHZwp7maM2exdGr',\n", + " 'tool_use_id': 'toolu_019no78gSVgC5MPrWkfvabkJ',\n", " 'content': '7063474'}]}]" ] }, @@ -1898,12 +1898,12 @@ { "data": { "text/markdown": [ - "ToolUseBlock(id='toolu_01NRuky7TuvTXwc4h5LE5YVz', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", + "ToolUseBlock(id='toolu_01BhWTjadDrcswRX3egRzn2a', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", "\n", "
\n", "\n", - "- id: `msg_01HGow4rQdU82LYFfzty4tJj`\n", - "- content: `[{'id': 'toolu_01NRuky7TuvTXwc4h5LE5YVz', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- id: `msg_01YHLdfTN1oHtzLswwUiBDD2`\n", + "- content: `[{'id': 'toolu_01BhWTjadDrcswRX3egRzn2a', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `tool_use`\n", @@ -1914,7 +1914,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01HGow4rQdU82LYFfzty4tJj', content=[ToolUseBlock(id='toolu_01NRuky7TuvTXwc4h5LE5YVz', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 479; Out: 57; Cache create: 0; Cache read: 0; Total: 536)" + "Message(id='msg_01YHLdfTN1oHtzLswwUiBDD2', content=[ToolUseBlock(id='toolu_01BhWTjadDrcswRX3egRzn2a', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 479; Out: 57; Cache create: 0; Cache read: 0; Total: 536)" ] }, "execution_count": null, @@ -2293,7 +2293,7 @@ "\n", "
\n", "\n", - "- id: `msg_013A37GK3ecgKVPFmH8sbzSe`\n", + "- id: `msg_01ThWqzVBhEohkdxKJwwCDGA`\n", "- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", @@ -2305,7 +2305,7 @@ "
" ], "text/plain": [ - "Message(id='msg_013A37GK3ecgKVPFmH8sbzSe', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 41; Out: 8; Cache create: 0; Cache read: 0; Total: 49)" + "Message(id='msg_01ThWqzVBhEohkdxKJwwCDGA', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 41; Out: 8; Cache create: 0; Cache read: 0; Total: 49)" ] }, "execution_count": null, @@ -2367,12 +2367,12 @@ { "data": { "text/markdown": [ - "According to Douglas Adams, 42. But in reality, it's to find personal meaning through experiences, relationships, and pursuing what brings you fulfillment.\n", + "According to Douglas Adams, 42. But more seriously: to find purpose, create meaning, and make connections with others while experiencing what life has to offer.\n", "\n", "
\n", "\n", - "- id: `msg_01NXx8tt1UeMViqa6vwybbVb`\n", - "- content: `[{'text': \"According to Douglas Adams, 42. But in reality, it's to find personal meaning through experiences, relationships, and pursuing what brings you fulfillment.\", 'type': 'text'}]`\n", + "- id: `msg_016U5jwM3QA3QENPtqN9RmN1`\n", + "- content: `[{'text': 'According to Douglas Adams, 42. But more seriously: to find purpose, create meaning, and make connections with others while experiencing what life has to offer.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", @@ -2383,7 +2383,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01NXx8tt1UeMViqa6vwybbVb', content=[TextBlock(text=\"According to Douglas Adams, 42. But in reality, it's to find personal meaning through experiences, relationships, and pursuing what brings you fulfillment.\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 69; Out: 30; Cache create: 0; Cache read: 0; Total: 99)" + "Message(id='msg_016U5jwM3QA3QENPtqN9RmN1', content=[TextBlock(text='According to Douglas Adams, 42. But more seriously: to find purpose, create meaning, and make connections with others while experiencing what life has to offer.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 69; Out: 30; Cache create: 0; Cache read: 0; Total: 99)" ] }, "execution_count": null, @@ -2439,32 +2439,23 @@ { "data": { "text/markdown": [ - "Beyond personal fulfillment, life's meaning involves:\n", - "- Making a positive impact on others\n", - "- Learning and growing continuously\n", - "- Creating something lasting\n", - "- Finding love and connection\n", - "- Discovering your unique purpose\n", - "- Contributing to something bigger than yourself\n", - "- Experiencing joy and wonder\n", - "- Overcoming challenges\n", - "- Leaving the world better than you found it\n", + "To grow, learn, love, help others, leave the world better than you found it, and find joy in both life's big moments and small everyday experiences. Ultimately, each person must discover their own unique meaning and purpose.\n", "\n", "
\n", "\n", - "- id: `msg_01QjG7pJyx8cW1gpthHinPo8`\n", - "- content: `[{'text': \"Beyond personal fulfillment, life's meaning involves:\\n- Making a positive impact on others\\n- Learning and growing continuously\\n- Creating something lasting\\n- Finding love and connection\\n- Discovering your unique purpose\\n- Contributing to something bigger than yourself\\n- Experiencing joy and wonder\\n- Overcoming challenges\\n- Leaving the world better than you found it\", 'type': 'text'}]`\n", + "- id: `msg_01Q5nVjaCVDUCWZzmRMsa5U7`\n", + "- content: `[{'text': \"To grow, learn, love, help others, leave the world better than you found it, and find joy in both life's big moments and small everyday experiences. Ultimately, each person must discover their own unique meaning and purpose.\", 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 105, 'output_tokens': 78, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 105, 'output_tokens': 50, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01QjG7pJyx8cW1gpthHinPo8', content=[TextBlock(text=\"Beyond personal fulfillment, life's meaning involves:\\n- Making a positive impact on others\\n- Learning and growing continuously\\n- Creating something lasting\\n- Finding love and connection\\n- Discovering your unique purpose\\n- Contributing to something bigger than yourself\\n- Experiencing joy and wonder\\n- Overcoming challenges\\n- Leaving the world better than you found it\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 105; Out: 78; Cache create: 0; Cache read: 0; Total: 183)" + "Message(id='msg_01Q5nVjaCVDUCWZzmRMsa5U7', content=[TextBlock(text=\"To grow, learn, love, help others, leave the world better than you found it, and find joy in both life's big moments and small everyday experiences. Ultimately, each person must discover their own unique meaning and purpose.\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 105; Out: 50; Cache create: 0; Cache read: 0; Total: 155)" ] }, "execution_count": null, @@ -2514,7 +2505,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "According to Douglas Adams, 42. More seriously: to find purpose, love, grow, and make a positive impact while experiencing life's journey." + "According to Douglas Adams, it's 42. More seriously: to find purpose, grow, love, and make a positive impact while experiencing what existence has to offer." ] } ], @@ -2580,8 +2571,8 @@ "\n", "
\n", "\n", - "- id: `msg_014s7mQQoAHsnUKTEzttxQT7`\n", - "- content: `[{'text': 'Let me calculate that sum for you.', 'type': 'text'}, {'id': 'toolu_013cYwwW3JfQoWe6qWntsbdw', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- id: `msg_01DsZu8XZEvBuzYLEgQy8qYg`\n", + "- content: `[{'text': 'Let me calculate that sum for you.', 'type': 'text'}, {'id': 'toolu_01119t7doYEYhYn4d99eaF2o', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `tool_use`\n", @@ -2592,7 +2583,7 @@ "
" ], "text/plain": [ - "Message(id='msg_014s7mQQoAHsnUKTEzttxQT7', content=[TextBlock(text='Let me calculate that sum for you.', type='text'), ToolUseBlock(id='toolu_013cYwwW3JfQoWe6qWntsbdw', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 437; Out: 81; Cache create: 0; Cache read: 0; Total: 518)" + "Message(id='msg_01DsZu8XZEvBuzYLEgQy8qYg', content=[TextBlock(text='Let me calculate that sum for you.', type='text'), ToolUseBlock(id='toolu_01119t7doYEYhYn4d99eaF2o', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 437; Out: 81; Cache create: 0; Cache read: 0; Total: 518)" ] }, "execution_count": null, @@ -2627,7 +2618,7 @@ "\n", "
\n", "\n", - "- id: `msg_01QHCJ4Su1dqm22WzbNNhgQ9`\n", + "- id: `msg_01RHRdxAXGredDCMoogsKs78`\n", "- content: `[{'text': 'The sum of 604542 and 6458932 is 7063474.', 'type': 'text'}]`\n", "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", @@ -2639,7 +2630,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01QHCJ4Su1dqm22WzbNNhgQ9', content=[TextBlock(text='The sum of 604542 and 6458932 is 7063474.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 532; Out: 23; Cache create: 0; Cache read: 0; Total: 555)" + "Message(id='msg_01RHRdxAXGredDCMoogsKs78', content=[TextBlock(text='The sum of 604542 and 6458932 is 7063474.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 532; Out: 23; Cache create: 0; Cache read: 0; Total: 555)" ] }, "execution_count": null, @@ -2825,7 +2816,7 @@ "\n", "
\n", "\n", - "- id: `msg_01D3bn7VFvPe7YofhjecQ2q9`\n", + "- id: `msg_01R2U4KZjWgVhkMfbFarvGJF`\n", "- content: `[{'text': 'The image contains purple or lavender-colored flowers, which appear to be daisies or a similar type of flower.', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -2837,7 +2828,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01D3bn7VFvPe7YofhjecQ2q9', content=[TextBlock(text='The image contains purple or lavender-colored flowers, which appear to be daisies or a similar type of flower.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 28; Cache create: 0; Cache read: 0; Total: 138)" + "Message(id='msg_01R2U4KZjWgVhkMfbFarvGJF', content=[TextBlock(text='The image contains purple or lavender-colored flowers, which appear to be daisies or a similar type of flower.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 28; Cache create: 0; Cache read: 0; Total: 138)" ] }, "execution_count": null, @@ -2977,7 +2968,7 @@ "\n", "
\n", "\n", - "- id: `msg_01St3vzBb7aR4DiHcmZgohkb`\n", + "- id: `msg_011UjBS45uE7C9XiSh8txh3L`\n", "- content: `[{'text': 'The image contains purple or lavender-colored flowers, which appear to be daisies or a similar type of flower.', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", @@ -2989,7 +2980,7 @@ "
" ], "text/plain": [ - "Message(id='msg_01St3vzBb7aR4DiHcmZgohkb', content=[TextBlock(text='The image contains purple or lavender-colored flowers, which appear to be daisies or a similar type of flower.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 28; Cache create: 0; Cache read: 0; Total: 138)" + "Message(id='msg_011UjBS45uE7C9XiSh8txh3L', content=[TextBlock(text='The image contains purple or lavender-colored flowers, which appear to be daisies or a similar type of flower.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 28; Cache create: 0; Cache read: 0; Total: 138)" ] }, "execution_count": null, @@ -3047,7 +3038,7 @@ "data": { "text/plain": [ "['anthropic.claude-3-opus-20240229-v1:0',\n", - " 'anthropic.claude-3-5-sonnet-20240620-v1:0',\n", + " 'anthropic.claude-3-5-sonnet-20241022-v2:0',\n", " 'anthropic.claude-3-sonnet-20240229-v1:0',\n", " 'anthropic.claude-3-haiku-20240307-v1:0']" ] @@ -3114,23 +3105,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", + "It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?\n", "\n", "
\n", "\n", - "- id: `msg_bdrk_01MwjVA5hwyfob3w4vdsqpnU`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_bdrk_01JPBwsACbf1HZoNDUzbHNpJ`\n", + "- content: `[{'text': \"It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", 'type': 'text'}]`\n", + "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 32}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_bdrk_01MwjVA5hwyfob3w4vdsqpnU', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" + "Message(id='msg_bdrk_01JPBwsACbf1HZoNDUzbHNpJ', content=[TextBlock(text=\"It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 32; Cache create: 0; Cache read: 0; Total: 42)" ] }, "execution_count": null, @@ -3160,7 +3151,7 @@ "data": { "text/plain": [ "['claude-3-opus@20240229',\n", - " 'claude-3-5-sonnet@20240620',\n", + " 'claude-3-5-sonnet-v2@20241022',\n", " 'claude-3-sonnet@20240229',\n", " 'claude-3-haiku@20240307']" ] @@ -3213,34 +3204,7 @@ "execution_count": null, "id": "11006ae0", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", - "\n", - "
\n", - "\n", - "- id: `msg_vrtx_01PFtHewPDe35yShy7vecp5q`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", - "- role: `assistant`\n", - "- stop_reason: `end_turn`\n", - "- stop_sequence: `None`\n", - "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36}`\n", - "\n", - "
" - ], - "text/plain": [ - "Message(id='msg_vrtx_01PFtHewPDe35yShy7vecp5q', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "chat(\"I'm Jeremy\")" ] diff --git a/01_toolloop.ipynb b/01_toolloop.ipynb index e5de910..34e71ff 100644 --- a/01_toolloop.ipynb +++ b/01_toolloop.ipynb @@ -167,7 +167,7 @@ { "data": { "text/plain": [ - "[ToolUseBlock(id='toolu_019DSHxKkKDrNTcUe9AgKqjm', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')]" + "[ToolUseBlock(id='toolu_0168sUZoEUpjzk5Y8WN3q9XL', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')]" ] }, "execution_count": null, @@ -237,7 +237,7 @@ "data": { "text/plain": [ "[TextBlock(text=\"Okay, let's cancel all orders for customer C1:\", type='text'),\n", - " ToolUseBlock(id='toolu_01Gn7zKBeBgWzi2AKfN6bVNZ', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')]" + " ToolUseBlock(id='toolu_01ADr1rEp7NLZ2iKWfLp7vz7', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')]" ] }, "execution_count": null, @@ -316,19 +316,19 @@ "\n", "
\n", "\n", - "- id: `msg_019SscrFmtXCmyAknBfLNv5i`\n", + "- id: `msg_01Fm2CY76dNeWief4kUW6r71`\n", "- content: `[{'text': 'The email address for customer C1 is john@example.com.', 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 732, 'output_tokens': 19, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 720, 'output_tokens': 19, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_019SscrFmtXCmyAknBfLNv5i', content=[TextBlock(text='The email address for customer C1 is john@example.com.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 732; Out: 19; Cache create: 0; Cache read: 0; Total: 751)" + "Message(id='msg_01Fm2CY76dNeWief4kUW6r71', content=[TextBlock(text='The email address for customer C1 is john@example.com.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 720; Out: 19; Cache create: 0; Cache read: 0; Total: 739)" ] }, "execution_count": null, @@ -361,34 +361,34 @@ "output_type": "stream", "text": [ "- Retrieving customer C1\n", - "Message(id='msg_01VzFkDJ59R7NP6gQ7cNrWi8', content=[TextBlock(text=\"Okay, let's cancel all orders for customer C1:\", type='text'), ToolUseBlock(id='toolu_01YJ6Kh3LMVL5Ekzn44VTH6E', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 537; Out: 72; Cache create: 0; Cache read: 0; Total: 609)\n", + "[{'role': 'user', 'content': [{'type': 'text', 'text': 'Please cancel all orders for customer C1 for me.'}]}, {'role': 'assistant', 'content': [TextBlock(text=\"Okay, let's cancel all orders for customer C1:\", type='text'), ToolUseBlock(id='toolu_01SvivKytaRHEdKixEY9dUDz', input={'customer_id': 'C1'}, name='get_customer_info', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01SvivKytaRHEdKixEY9dUDz', 'content': \"{'name': 'John Doe', 'email': 'john@example.com', 'phone': '123-456-7890', 'orders': [{'id': 'O1', 'product': 'Widget A', 'quantity': 2, 'price': 19.99, 'status': 'Shipped'}, {'id': 'O2', 'product': 'Gadget B', 'quantity': 1, 'price': 49.99, 'status': 'Processing'}]}\"}]}]\n", "- Cancelling order O1\n", - "Message(id='msg_01SxniJb85ofdMm3UJ4o2XCy', content=[TextBlock(text=\"Based on the customer information, it looks like there are 2 orders for customer C1:\\n- Order O1 for Widget A\\n- Order O2 for Gadget B\\n\\nLet's cancel both of these orders:\", type='text'), ToolUseBlock(id='toolu_01M5i2uKyNWDk7s5pQKF3uh3', input={'order_id': 'O1'}, name='cancel_order', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 745; Out: 107; Cache create: 0; Cache read: 0; Total: 852)\n", + "[{'role': 'assistant', 'content': [TextBlock(text=\"Based on the customer information, it looks like there are 2 orders for customer C1:\\n- Order O1 for Widget A\\n- Order O2 for Gadget B\\n\\nLet's cancel each of these orders:\", type='text'), ToolUseBlock(id='toolu_01DoGVUPVBeDYERMePHDzUoT', input={'order_id': 'O1'}, name='cancel_order', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01DoGVUPVBeDYERMePHDzUoT', 'content': 'True'}]}]\n", "- Cancelling order O2\n", - "Message(id='msg_01CQg3uJfWmCFRYBAWyUdXAC', content=[ToolUseBlock(id='toolu_012fBAoEhpixe16w7dZFLsnD', input={'order_id': 'O2'}, name='cancel_order', type='tool_use')], model='claude-3-haiku-20240307', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 864; Out: 57; Cache create: 0; Cache read: 0; Total: 921)\n", - "Message(id='msg_01SghTUGjDJAKifcq7pyiLKb', content=[TextBlock(text='Both order cancellations were successful. I have now cancelled all orders for customer C1.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 933; Out: 23; Cache create: 0; Cache read: 0; Total: 956)\n" + "[{'role': 'assistant', 'content': [ToolUseBlock(id='toolu_01XNwS35yY88Mvx4B3QqDeXX', input={'order_id': 'O2'}, name='cancel_order', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01XNwS35yY88Mvx4B3QqDeXX', 'content': 'True'}]}]\n", + "[{'role': 'assistant', 'content': [TextBlock(text=\"I've successfully cancelled both orders O1 and O2 for customer C1. Please let me know if you need anything else!\", type='text')]}]\n" ] }, { "data": { "text/markdown": [ - "Both order cancellations were successful. I have now cancelled all orders for customer C1.\n", + "I've successfully cancelled both orders O1 and O2 for customer C1. Please let me know if you need anything else!\n", "\n", "
\n", "\n", - "- id: `msg_01SghTUGjDJAKifcq7pyiLKb`\n", - "- content: `[{'text': 'Both order cancellations were successful. I have now cancelled all orders for customer C1.', 'type': 'text'}]`\n", + "- id: `msg_01K1QpUZ8nrBVUHYTrH5QjSF`\n", + "- content: `[{'text': \"I've successfully cancelled both orders O1 and O2 for customer C1. Please let me know if you need anything else!\", 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 933, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 921, 'output_tokens': 32, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01SghTUGjDJAKifcq7pyiLKb', content=[TextBlock(text='Both order cancellations were successful. I have now cancelled all orders for customer C1.', type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 933; Out: 23; Cache create: 0; Cache read: 0; Total: 956)" + "Message(id='msg_01K1QpUZ8nrBVUHYTrH5QjSF', content=[TextBlock(text=\"I've successfully cancelled both orders O1 and O2 for customer C1. Please let me know if you need anything else!\", type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 921; Out: 32; Cache create: 0; Cache read: 0; Total: 953)" ] }, "execution_count": null, @@ -430,19 +430,19 @@ "\n", "
\n", "\n", - "- id: `msg_01VdootDagiKh44zVBhHBMnK`\n", + "- id: `msg_01XcXpFDwoZ3u1bFDf5mY8x1`\n", "- content: `[{'text': \"The status of order O2 is now 'Cancelled' since I successfully cancelled that order earlier.\", 'type': 'text'}]`\n", "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 1095, 'output_tokens': 26, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 1092, 'output_tokens': 26, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01VdootDagiKh44zVBhHBMnK', content=[TextBlock(text=\"The status of order O2 is now 'Cancelled' since I successfully cancelled that order earlier.\", type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 1095; Out: 26; Cache create: 0; Cache read: 0; Total: 1121)" + "Message(id='msg_01XcXpFDwoZ3u1bFDf5mY8x1', content=[TextBlock(text=\"The status of order O2 is now 'Cancelled' since I successfully cancelled that order earlier.\", type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 1092; Out: 26; Cache create: 0; Cache read: 0; Total: 1118)" ] }, "execution_count": null, @@ -680,64 +680,50 @@ "text": [ "Press Enter to execute, or enter \"n\" to skip?\n", "```\n", - "from functools import reduce\n", - "checksum = lambda s: reduce(lambda x, y: x * ord(y), s, 1)\n", - "print(\"Function 'checksum' has been created.\")\n", - "print(\"Example usage: checksum('hello') =\", checksum('hello'))\n", + "checksum = lambda s: functools.reduce(lambda x, y: x * ord(y), s, 1)\n", "```\n", "\n", - "Certainly! I'll create a one-line function called `checksum` that multiplies together the ASCII values of each character in a given string `s` using the `reduce` function. To do this, we'll use the `run_cell` function to execute the Python code. Here's how we'll do it:\n", - "from functools import reduce\n", - "checksum = lambda s: reduce(lambda x, y: x * ord(y), s, 1)\n", - "print(\"Function 'checksum' has been created.\")\n", - "print(\"Example usage: checksum('hello') =\", checksum('hello'))\n", - "Great! The `checksum` function has been created successfully. Let me explain the function:\n", + "Create a 1-line function `checksum` for a string `s`,\n", + "that multiplies together the ascii values of each character in `s` using `reduce`.\n", + "Let me help you create that function using `reduce` and `functools`.\n", + "checksum = lambda s: functools.reduce(lambda x, y: x * ord(y), s, 1)\n", + "The function has been created. Let me explain how it works:\n", + "1. It takes a string `s` as input\n", + "2. Uses `functools.reduce` to multiply together all ASCII values\n", + "3. `ord(y)` gets the ASCII value of each character\n", + "4. The initial value is 1 (the third parameter to reduce)\n", + "5. The lambda function multiplies the accumulator (x) with each new ASCII value\n", "\n", - "1. We import `reduce` from the `functools` module (which is pre-imported in the environment).\n", - "2. The `checksum` function is defined as a lambda function that takes a string `s` as input.\n", - "3. Inside the lambda, we use `reduce` to multiply the ASCII values of each character in the string.\n", - "4. The `reduce` function uses another lambda that multiplies the accumulator `x` by the ASCII value of each character `y` (obtained using `ord(y)`).\n", - "5. The initial value for the reduction is 1, ensuring that the multiplication starts correctly.\n", - "\n", - "As we can see from the example output, calling `checksum('hello')` returns `13599570816`, which is the product of the ASCII values of 'h', 'e', 'l', 'l', and 'o'.\n", - "\n", - "You can now use this `checksum` function with any string. For example, if you want to calculate the checksum of another string, you can do so by calling `checksum('your_string_here')`.\n", - "\n", - "Is there anything else you'd like to do with this function or any other string operations you're interested in?\n" + "You can test it with any string. For example, you could try `checksum(\"hello\")` to see it in action.\n" ] }, { "data": { "text/markdown": [ - "Great! The `checksum` function has been created successfully. Let me explain the function:\n", - "\n", - "1. We import `reduce` from the `functools` module (which is pre-imported in the environment).\n", - "2. The `checksum` function is defined as a lambda function that takes a string `s` as input.\n", - "3. Inside the lambda, we use `reduce` to multiply the ASCII values of each character in the string.\n", - "4. The `reduce` function uses another lambda that multiplies the accumulator `x` by the ASCII value of each character `y` (obtained using `ord(y)`).\n", - "5. The initial value for the reduction is 1, ensuring that the multiplication starts correctly.\n", + "The function has been created. Let me explain how it works:\n", + "1. It takes a string `s` as input\n", + "2. Uses `functools.reduce` to multiply together all ASCII values\n", + "3. `ord(y)` gets the ASCII value of each character\n", + "4. The initial value is 1 (the third parameter to reduce)\n", + "5. The lambda function multiplies the accumulator (x) with each new ASCII value\n", "\n", - "As we can see from the example output, calling `checksum('hello')` returns `13599570816`, which is the product of the ASCII values of 'h', 'e', 'l', 'l', and 'o'.\n", - "\n", - "You can now use this `checksum` function with any string. For example, if you want to calculate the checksum of another string, you can do so by calling `checksum('your_string_here')`.\n", - "\n", - "Is there anything else you'd like to do with this function or any other string operations you're interested in?\n", + "You can test it with any string. For example, you could try `checksum(\"hello\")` to see it in action.\n", "\n", "
\n", "\n", - "- id: `msg_01LguR5AhsAdeBbYNRC3oNQM`\n", - "- content: `[{'text': \"Great! The `checksum` function has been created successfully. Let me explain the function:\\n\\n1. We import `reduce` from the `functools` module (which is pre-imported in the environment).\\n2. The `checksum` function is defined as a lambda function that takes a string `s` as input.\\n3. Inside the lambda, we use `reduce` to multiply the ASCII values of each character in the string.\\n4. The `reduce` function uses another lambda that multiplies the accumulator `x` by the ASCII value of each character `y` (obtained using `ord(y)`).\\n5. The initial value for the reduction is 1, ensuring that the multiplication starts correctly.\\n\\nAs we can see from the example output, calling `checksum('hello')` returns `13599570816`, which is the product of the ASCII values of 'h', 'e', 'l', 'l', and 'o'.\\n\\nYou can now use this `checksum` function with any string. For example, if you want to calculate the checksum of another string, you can do so by calling `checksum('your_string_here')`.\\n\\nIs there anything else you'd like to do with this function or any other string operations you're interested in?\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_011pcGY9LbYqvRSfDPgCqUkT`\n", + "- content: `[{'text': 'The function has been created. Let me explain how it works:\\n1. It takes a string `s` as input\\n2. Uses `functools.reduce` to multiply together all ASCII values\\n3. `ord(y)` gets the ASCII value of each character\\n4. The initial value is 1 (the third parameter to reduce)\\n5. The lambda function multiplies the accumulator (x) with each new ASCII value\\n\\nYou can test it with any string. For example, you could try `checksum(\"hello\")` to see it in action.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 908, 'output_tokens': 281, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 824, 'output_tokens': 125, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01LguR5AhsAdeBbYNRC3oNQM', content=[TextBlock(text=\"Great! The `checksum` function has been created successfully. Let me explain the function:\\n\\n1. We import `reduce` from the `functools` module (which is pre-imported in the environment).\\n2. The `checksum` function is defined as a lambda function that takes a string `s` as input.\\n3. Inside the lambda, we use `reduce` to multiply the ASCII values of each character in the string.\\n4. The `reduce` function uses another lambda that multiplies the accumulator `x` by the ASCII value of each character `y` (obtained using `ord(y)`).\\n5. The initial value for the reduction is 1, ensuring that the multiplication starts correctly.\\n\\nAs we can see from the example output, calling `checksum('hello')` returns `13599570816`, which is the product of the ASCII values of 'h', 'e', 'l', 'l', and 'o'.\\n\\nYou can now use this `checksum` function with any string. For example, if you want to calculate the checksum of another string, you can do so by calling `checksum('your_string_here')`.\\n\\nIs there anything else you'd like to do with this function or any other string operations you're interested in?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 908; Out: 281; Cache create: 0; Cache read: 0; Total: 1189)" + "Message(id='msg_011pcGY9LbYqvRSfDPgCqUkT', content=[TextBlock(text='The function has been created. Let me explain how it works:\\n1. It takes a string `s` as input\\n2. Uses `functools.reduce` to multiply together all ASCII values\\n3. `ord(y)` gets the ASCII value of each character\\n4. The initial value is 1 (the third parameter to reduce)\\n5. The lambda function multiplies the accumulator (x) with each new ASCII value\\n\\nYou can test it with any string. For example, you could try `checksum(\"hello\")` to see it in action.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 824; Out: 125; Cache create: 0; Cache read: 0; Total: 949)" ] }, "execution_count": null, @@ -770,68 +756,39 @@ "output_type": "stream", "text": [ "Looking up username\n", - "Certainly! I'll use the `checksum` function we just created to calculate the checksum of the username for this session. To do this, we'll first need to get the username using the `get_user` function, and then we'll apply the `checksum` function to that username. Here's how we'll do it:\n", + "Use it to get the checksum of the username of this session.\n", + "I'll first get the username using `get_user` and then apply our `checksum` function to it.\n", "get_user({'ignored': ''})\n", "Press Enter to execute, or enter \"n\" to skip?\n", "```\n", - "username = \"Jeremy\"\n", - "result = checksum(username)\n", - "print(f\"The checksum of the username '{username}' is: {result}\")\n", + "print(checksum(\"Jeremy\"))\n", "```\n", "\n", - "Now that we have the username \"Jeremy\", let's calculate its checksum:\n", - "username = \"Jeremy\"\n", - "result = checksum(username)\n", - "print(f\"The checksum of the username '{username}' is: {result}\")\n", - "There you have it! The checksum of the username \"Jeremy\" for this session is 1134987783204.\n", - "\n", - "To break it down:\n", - "1. We first retrieved the username \"Jeremy\" using the `get_user` function.\n", - "2. Then we used our previously defined `checksum` function to calculate the checksum of this username.\n", - "3. The result, 1134987783204, is the product of the ASCII values of each character in \"Jeremy\".\n", - "\n", - "For verification, we can manually calculate this:\n", - "- ASCII values: J (74), e (101), r (114), e (101), m (109), y (121)\n", - "- 74 * 101 * 114 * 101 * 109 * 121 = 1134987783204\n", - "\n", - "This confirms that our `checksum` function is working correctly for the username of this session.\n", - "\n", - "Is there anything else you'd like to do with the username or the checksum function?\n" + "Now I'll calculate the checksum of \"Jeremy\":\n", + "print(checksum(\"Jeremy\"))\n", + "The checksum of the username \"Jeremy\" is 1134987783204. This was calculated by multiplying together the ASCII values of each character in \"Jeremy\".\n" ] }, { "data": { "text/markdown": [ - "There you have it! The checksum of the username \"Jeremy\" for this session is 1134987783204.\n", - "\n", - "To break it down:\n", - "1. We first retrieved the username \"Jeremy\" using the `get_user` function.\n", - "2. Then we used our previously defined `checksum` function to calculate the checksum of this username.\n", - "3. The result, 1134987783204, is the product of the ASCII values of each character in \"Jeremy\".\n", - "\n", - "For verification, we can manually calculate this:\n", - "- ASCII values: J (74), e (101), r (114), e (101), m (109), y (121)\n", - "- 74 * 101 * 114 * 101 * 109 * 121 = 1134987783204\n", - "\n", - "This confirms that our `checksum` function is working correctly for the username of this session.\n", - "\n", - "Is there anything else you'd like to do with the username or the checksum function?\n", + "The checksum of the username \"Jeremy\" is 1134987783204. This was calculated by multiplying together the ASCII values of each character in \"Jeremy\".\n", "\n", "
\n", "\n", - "- id: `msg_01Htvo4rw9rBaozPapFy8XQE`\n", - "- content: `[{'text': 'There you have it! The checksum of the username \"Jeremy\" for this session is 1134987783204.\\n\\nTo break it down:\\n1. We first retrieved the username \"Jeremy\" using the `get_user` function.\\n2. Then we used our previously defined `checksum` function to calculate the checksum of this username.\\n3. The result, 1134987783204, is the product of the ASCII values of each character in \"Jeremy\".\\n\\nFor verification, we can manually calculate this:\\n- ASCII values: J (74), e (101), r (114), e (101), m (109), y (121)\\n- 74 * 101 * 114 * 101 * 109 * 121 = 1134987783204\\n\\nThis confirms that our `checksum` function is working correctly for the username of this session.\\n\\nIs there anything else you\\'d like to do with the username or the checksum function?', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01UXvtcLzzykZpnQUT35v4uD`\n", + "- content: `[{'text': 'The checksum of the username \"Jeremy\" is 1134987783204. This was calculated by multiplying together the ASCII values of each character in \"Jeremy\".', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 1474, 'output_tokens': 215, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 1143, 'output_tokens': 38, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01Htvo4rw9rBaozPapFy8XQE', content=[TextBlock(text='There you have it! The checksum of the username \"Jeremy\" for this session is 1134987783204.\\n\\nTo break it down:\\n1. We first retrieved the username \"Jeremy\" using the `get_user` function.\\n2. Then we used our previously defined `checksum` function to calculate the checksum of this username.\\n3. The result, 1134987783204, is the product of the ASCII values of each character in \"Jeremy\".\\n\\nFor verification, we can manually calculate this:\\n- ASCII values: J (74), e (101), r (114), e (101), m (109), y (121)\\n- 74 * 101 * 114 * 101 * 109 * 121 = 1134987783204\\n\\nThis confirms that our `checksum` function is working correctly for the username of this session.\\n\\nIs there anything else you\\'d like to do with the username or the checksum function?', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 1474; Out: 215; Cache create: 0; Cache read: 0; Total: 1689)" + "Message(id='msg_01UXvtcLzzykZpnQUT35v4uD', content=[TextBlock(text='The checksum of the username \"Jeremy\" is 1134987783204. This was calculated by multiplying together the ASCII values of each character in \"Jeremy\".', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 1143; Out: 38; Cache create: 0; Cache read: 0; Total: 1181)" ] }, "execution_count": null, diff --git a/02_async.ipynb b/02_async.ipynb index 85b3eff..441bc52 100644 --- a/02_async.ipynb +++ b/02_async.ipynb @@ -572,7 +572,6 @@ " **kw):\n", " await self._append_pr(pr)\n", " if self.tools: kw['tools'] = [get_schema(o) for o in self.tools]\n", - " if self.tool_choice and pr: kw['tool_choice'] = mk_tool_choice(self.tool_choice)\n", " res = await self.c(self.h, stream=stream, prefill=prefill, sp=self.sp, temp=temp, maxtok=maxtok, **kw)\n", " if stream: return self._stream(res)\n", " self.h += mk_toolres(self.c.result, ns=self.tools, obj=self)\n", diff --git a/README.md b/README.md index 9180c72..49eef18 100644 --- a/README.md +++ b/README.md @@ -89,9 +89,9 @@ available from the SDK. models ``` - ('claude-3-opus-20240229', - 'claude-3-5-sonnet-20240620', - 'claude-3-haiku-20240307') + ['claude-3-opus-20240229', + 'claude-3-5-sonnet-20241022', + 'claude-3-haiku-20240307'] For these examples, we’ll use Sonnet 3.5, since it’s awesome! @@ -110,20 +110,20 @@ chat = Chat(model, sp="""You are a helpful and concise assistant.""") chat("I'm Jeremy") ``` -Hello Jeremy, it’s nice to meet you. How can I assist you today? +Hello Jeremy, nice to meet you.
-- id: `msg_014J96J6f9Bxrmyr7uA5Z4E3` +- id: `msg_015oK9jEcra3TEKHUGYULjWB` - content: - `[{'text': "Hello Jeremy, it's nice to meet you. How can I assist you today?", 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': 'Hello Jeremy, nice to meet you.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 19, 'output_tokens': 20, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 19, 'output_tokens': 11, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -132,20 +132,40 @@ r = chat("What's my name?") r ``` -Your name is Jeremy, as you just told me. +Your name is Jeremy.
-- id: `msg_01RpP5rBhFK34UkZwAiMnL85` -- content: - `[{'text': 'Your name is Jeremy, as you just told me.', 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` +- id: `msg_01Si8sTFJe8d8vq7enanbAwj` +- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 47, 'output_tokens': 14, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 38, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + +
+ +``` python +r = chat("What's my name?") +r +``` + +Your name is Jeremy. + +
+ +- id: `msg_01BHWRoAX8eBsoLn2bzpBkvx` +- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 54, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -157,7 +177,7 @@ collapsible section. Alternatively you can `print` the details: print(r) ``` - Message(id='msg_01RpP5rBhFK34UkZwAiMnL85', content=[TextBlock(text='Your name is Jeremy, as you just told me.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 47; Out: 14; Total: 61) + Message(id='msg_01BHWRoAX8eBsoLn2bzpBkvx', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 54; Out: 8; Cache create: 0; Cache read: 0; Total: 62) Claude supports adding an extra `assistant` message at the end, which contains the *prefill* – i.e. the text we want Claude to assume the @@ -168,21 +188,21 @@ chat("Concisely, what is the meaning of life?", prefill='According to Douglas Adams,') ``` -According to Douglas Adams, “42.” More seriously, it’s often considered -to be finding personal fulfillment, happiness, and purpose. +According to Douglas Adams,42. Philosophically, it’s to find personal +meaning through relationships, purpose, and experiences.
-- id: `msg_01Qgsa4a7cdPJkCrm989emJ5` +- id: `msg_01R9RvMdFwea9iRX5uYSSHG7` - content: - `[{'text': 'According to Douglas Adams, "42." More seriously, it\'s often considered to be finding personal fulfillment, happiness, and purpose.', 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': "According to Douglas Adams,42. Philosophically, it's to find personal meaning through relationships, purpose, and experiences.", 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 81, 'output_tokens': 27, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 82, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -197,6 +217,46 @@ for o in chat("Concisely, what book was that in?", prefill='It was in', stream=T It was in "The Hitchhiker's Guide to the Galaxy" by Douglas Adams. +### Async + +Alternatively, you can use +[`AsyncChat`](https://claudette.answer.ai/async.html#asyncchat) (or +[`AsyncClient`](https://claudette.answer.ai/async.html#asyncclient)) for +the async versions, e.g: + +``` python +chat = AsyncChat(model) +await chat("I'm Jeremy") +``` + +Hi Jeremy! Nice to meet you. I’m Claude, an AI assistant created by +Anthropic. How can I help you today? + +
+ +- id: `msg_016Q8cdc3sPWBS8eXcNj841L` +- content: + `[{'text': "Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?", 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 10, 'output_tokens': 31, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + +
+ +Remember to use `async for` when streaming in this case: + +``` python +async for o in await chat("Concisely, what is the meaning of life?", + prefill='According to Douglas Adams,', stream=True): + print(o, end='') +``` + + According to Douglas Adams, it's 42. But in my view, there's no single universal meaning - each person must find their own purpose through relationships, personal growth, contribution to others, and pursuit of what they find meaningful. + ## Prompt caching If you use `mk_msg(msg, cache=True)`, then the message is cached using @@ -219,45 +279,38 @@ r = chat(mk_msg(msg, cache=True)) r ``` -Based on the readme, the main purpose of the Claudette project is to -provide a high-level wrapper around Anthropic’s Python SDK for -interacting with Claude AI models. Key features and goals include: - -1. Automating and simplifying interactions with Claude, reducing - boilerplate code. +Claudette is a high-level wrapper for Anthropic’s Python SDK that +automates common tasks and provides additional functionality. Its main +features include: -2. Providing a stateful dialog interface through the - [`Chat`](https://claudette.answer.ai/core.html#chat) class. +1. A Chat class for stateful dialogs +2. Support for prefill (controlling Claude’s initial response words) +3. Convenient image handling +4. Simple tool use API integration +5. Support for multiple model providers (Anthropic, AWS Bedrock, Google + Vertex) -3. Supporting features like prefill (specifying the start of Claude’s - response) and image handling. +The project is notable for being the first “literate nbdev” project, +meaning its source code is written as a detailed, readable Jupyter +Notebook that includes explanations, examples, and teaching material +alongside the functional code. -4. Offering convenient support for Claude’s Tool Use API. - -5. Serving as an example of “literate programming”, with the source - code designed to be readable and educational, including explanations - of how and why the code is written. - -6. Supporting multiple model providers, including direct Anthropic API - access as well as Claude models available through Amazon Bedrock and - Google Vertex AI. - -The project aims to make working with Claude models more convenient and -accessible for developers while also serving as an educational resource -on how to effectively use and interact with these AI models. +The goal is to simplify working with Claude’s API while maintaining full +control, reducing boilerplate code and manual work that would otherwise +be needed with the base SDK.
-- id: `msg_01HkjoKjfY5zrmBrkjHvtDpG` +- id: `msg_014rVQnYoZXZuyWUCMELG1QW` - content: - `[{'text': 'Based on the readme, the main purpose of the Claudette project is to provide a high-level wrapper around Anthropic\'s Python SDK for interacting with Claude AI models. Key features and goals include:\n\n1. Automating and simplifying interactions with Claude, reducing boilerplate code.\n\n2. Providing a stateful dialog interface through the [`Chat`](https://claudette.answer.ai/core.html#chat) class.\n\n3. Supporting features like prefill (specifying the start of Claude\'s response) and image handling.\n\n4. Offering convenient support for Claude\'s Tool Use API.\n\n5. Serving as an example of "literate programming", with the source code designed to be readable and educational, including explanations of how and why the code is written.\n\n6. Supporting multiple model providers, including direct Anthropic API access as well as Claude models available through Amazon Bedrock and Google Vertex AI.\n\nThe project aims to make working with Claude models more convenient and accessible for developers while also serving as an educational resource on how to effectively use and interact with these AI models.', 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': 'Claudette is a high-level wrapper for Anthropic\'s Python SDK that automates common tasks and provides additional functionality. Its main features include:\n\n1. A Chat class for stateful dialogs\n2. Support for prefill (controlling Claude\'s initial response words)\n3. Convenient image handling\n4. Simple tool use API integration\n5. Support for multiple model providers (Anthropic, AWS Bedrock, Google Vertex)\n\nThe project is notable for being the first "literate nbdev" project, meaning its source code is written as a detailed, readable Jupyter Notebook that includes explanations, examples, and teaching material alongside the functional code.\n\nThe goal is to simplify working with Claude\'s API while maintaining full control, reducing boilerplate code and manual work that would otherwise be needed with the base SDK.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 4, 'output_tokens': 220, 'cache_creation_input_tokens': 7171, 'cache_read_input_tokens': 0}` + `{'input_tokens': 4, 'output_tokens': 179, 'cache_creation_input_tokens': 7205, 'cache_read_input_tokens': 0}`
@@ -268,7 +321,7 @@ tokens: print(r.usage) ``` - Usage(input_tokens=4, output_tokens=220, cache_creation_input_tokens=7171, cache_read_input_tokens=0) + Usage(input_tokens=4, output_tokens=179, cache_creation_input_tokens=7205, cache_read_input_tokens=0) We can now ask a followup question in this chat: @@ -277,51 +330,53 @@ r = chat('How does it make tool use more ergonomic?') r ``` -Claudette makes tool use more ergonomic in several ways: +According to the README, Claudette makes tool use more ergonomic in +several ways: -1. Simplified function definition: It uses docments to make defining - Python functions for tools as simple as possible. Each parameter and - the return value should have a type and a description. +1. It uses docments to make Python function definitions more + user-friendly - each parameter and return value should have a type + and description -2. Automatic handling: The - [`Chat`](https://claudette.answer.ai/core.html#chat) class can be - initialized with a list of tools, and Claudette handles the - back-and-forth between Claude and the tools automatically. +2. It handles the tool calling process automatically - when Claude + returns a tool_use message, Claudette manages calling the tool with + the provided parameters behind the scenes -3. Single-step execution: The - [`Chat.toolloop`](https://claudette.answer.ai/toolloop.html#chat.toolloop) - method allows for executing a series of tool calls in a single step, - even if multiple tools are needed to solve a problem. +3. It provides a `toolloop` method that can handle multiple tool calls + in a single step to solve more complex problems -4. Forced tool use: You can set `tool_choice` to force Claude to always - answer using a specific tool. +4. It allows you to pass a list of tools to the Chat constructor and + optionally force Claude to always use a specific tool via + `tool_choice` -5. Tracing: The `toolloop` method supports a `trace_func` parameter, - allowing you to see each response from Claude during the process. +Here’s a simple example from the README: -6. Automatic parameter passing: When Claude decides to use a tool, - Claudette automatically calls the tool with the provided parameters. +``` python +def sums( + a:int, # First thing to sum + b:int=1 # Second thing to sum +) -> int: # The sum of the inputs + "Adds a + b." + print(f"Finding the sum of {a} and {b}") + return a + b -7. System prompt integration: It allows setting a system prompt to - guide Claude’s behavior when using tools, such as instructing it not - to mention the tools it’s using. +chat = Chat(model, sp=sp, tools=[sums], tool_choice='sums') +``` -These features significantly reduce the amount of code and manual -handling required to use Claude’s tool use capabilities, making the -process more streamlined and developer-friendly. +This makes it much simpler compared to manually handling all the tool +use logic that would be required with the base SDK.
-- id: `msg_01EfFbEBeYETAvfjGncn8Vcb` +- id: `msg_01EdUvvFBnpPxMtdLRCaSZAU` - content: - `[{'text': "Claudette makes tool use more ergonomic in several ways:\n\n1. Simplified function definition: It uses docments to make defining Python functions for tools as simple as possible. Each parameter and the return value should have a type and a description.\n\n2. Automatic handling: The [`Chat`](https://claudette.answer.ai/core.html#chat) class can be initialized with a list of tools, and Claudette handles the back-and-forth between Claude and the tools automatically.\n\n3. Single-step execution: The [`Chat.toolloop`](https://claudette.answer.ai/toolloop.html#chat.toolloop) method allows for executing a series of tool calls in a single step, even if multiple tools are needed to solve a problem.\n\n4. Forced tool use: You can set`tool_choice`to force Claude to always answer using a specific tool.\n\n5. Tracing: The`toolloop`method supports a`trace_func`parameter, allowing you to see each response from Claude during the process.\n\n6. Automatic parameter passing: When Claude decides to use a tool, Claudette automatically calls the tool with the provided parameters.\n\n7. System prompt integration: It allows setting a system prompt to guide Claude's behavior when using tools, such as instructing it not to mention the tools it's using.\n\nThese features significantly reduce the amount of code and manual handling required to use Claude's tool use capabilities, making the process more streamlined and developer-friendly.", 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': 'According to the README, Claudette makes tool use more ergonomic in several ways:\n\n1. It uses docments to make Python function definitions more user-friendly - each parameter and return value should have a type and description\n\n2. It handles the tool calling process automatically - when Claude returns a tool_use message, Claudette manages calling the tool with the provided parameters behind the scenes\n\n3. It provides a`toolloop`method that can handle multiple tool calls in a single step to solve more complex problems\n\n4. It allows you to pass a list of tools to the Chat constructor and optionally force Claude to always use a specific tool via`tool_choice```` \n\nHere\'s a simple example from the README:\n\n```python\ndef sums(\n a:int, # First thing to sum \n b:int=1 # Second thing to sum\n) -> int: # The sum of the inputs\n "Adds a + b."\n print(f"Finding the sum of {a} and {b}")\n return a + b\n\nchat = Chat(model, sp=sp, tools=[sums], tool_choice=\'sums\')\n```\n\nThis makes it much simpler compared to manually handling all the tool use logic that would be required with the base SDK.', 'type': 'text'}] ```` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 240, 'output_tokens': 289, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 7171}` + `{'input_tokens': 197, 'output_tokens': 280, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 7205}`
@@ -332,7 +387,13 @@ context tokens have been read from cache. print(r.usage) ``` - Usage(input_tokens=240, output_tokens=289, cache_creation_input_tokens=0, cache_read_input_tokens=7171) + Usage(input_tokens=197, output_tokens=280, cache_creation_input_tokens=0, cache_read_input_tokens=7205) + +``` python +chat.use +``` + + In: 201; Out: 459; Cache create: 7205; Cache read: 7205; Total: 15070 ## Tool use @@ -374,39 +435,39 @@ pr 'What is 604542+6458932?' To use tools, pass a list of them to -[`Chat`](https://claudette.answer.ai/core.html#chat), and to force it to -always answer using a tool, set `tool_choice` to that function name: +[`Chat`](https://claudette.answer.ai/core.html#chat): ``` python -chat = Chat(model, sp=sp, tools=[sums], tool_choice='sums') +chat = Chat(model, sp=sp, tools=[sums]) ``` -Now when we call that with our prompt, Claude doesn’t return the answer, -but instead returns a `tool_use` message, which means we have to call -the named tool with the provided parameters: +To force Claude to always answer using a tool, set `tool_choice` to that +function name. When Claude needs to use a tool, it doesn’t return the +answer, but instead returns a `tool_use` message, which means we have to +call the named tool with the provided parameters. ``` python -r = chat(pr) +r = chat(pr, tool_choice='sums') r ``` Finding the sum of 604542 and 6458932 -ToolUseBlock(id=‘toolu_01C6G2iuLtBBftESiujKzXfx’, input={‘a’: 604542, +ToolUseBlock(id=‘toolu_014ip2xWyEq8RnAccVT4SySt’, input={‘a’: 604542, ‘b’: 6458932}, name=‘sums’, type=‘tool_use’)
-- id: `msg_01HPZwX3mQ7sMbjWUHEwgUsT` +- id: `msg_014xrPyotyiBmFSctkp1LZHk` - content: - `[{'id': 'toolu_01C6G2iuLtBBftESiujKzXfx', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'id': 'toolu_014ip2xWyEq8RnAccVT4SySt', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `tool_use` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 414, 'output_tokens': 53, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 442, 'output_tokens': 53, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -421,16 +482,16 @@ The sum of 604542 and 6458932 is 7063474.
-- id: `msg_01RNHxf1jXfS76h2UpF8RnZ2` +- id: `msg_01151puJxG8Fa6k6QSmzwKQA` - content: `[{'text': 'The sum of 604542 and 6458932 is 7063474.', 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 515, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 524, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -442,7 +503,7 @@ You can see how many tokens have been used at any time by checking the chat.use ``` - In: 929; Out: 76; Total: 1005 + In: 966; Out: 76; Cache create: 0; Cache read: 0; Total: 1042 We can do everything needed to use tools in a single step, by using [`Chat.toolloop`](https://claudette.answer.ai/toolloop.html#chat.toolloop). @@ -475,35 +536,81 @@ chat.toolloop(pr, trace_func=print) ``` Finding the sum of 604542 and 6458932 - Message(id='msg_01DgZ9Fcs6h8HB7qEaFkg3Ah', content=[TextBlock(text='Certainly! To calculate (604542+6458932)*2, we\'ll need to use the available tools to perform the addition and multiplication operations. Let\'s break it down step by step:\n\n1. First, we\'ll add 604542 and 6458932 using the "sums" function.\n2. Then, we\'ll multiply the result by 2 using the "mults" function.\n\nLet\'s start with the addition:', type='text'), ToolUseBlock(id='toolu_01XTMLyKo9Q6TX4SpCVmmUsP', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 538; Out: 168; Total: 706) + [{'role': 'user', 'content': [{'type': 'text', 'text': 'Calculate (604542+6458932)*2'}]}, {'role': 'assistant', 'content': [TextBlock(text="I'll help you break this down into steps:\n\nFirst, let's add those numbers:", type='text'), ToolUseBlock(id='toolu_01St5UKxYUU4DKC96p2PjgcD', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01St5UKxYUU4DKC96p2PjgcD', 'content': '7063474'}]}] Finding the product of 7063474 and 2 - Message(id='msg_011P8jdxwLoKNf8nTDua7VM8', content=[TextBlock(text="Great! The sum of 604542 and 6458932 is 7063474.\n\nNow, let's multiply this result by 2:", type='text'), ToolUseBlock(id='toolu_01FETWkj4a9HyX25c8ETULYh', input={'a': 7063474, 'b': 2}, name='mults', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 721; Out: 106; Total: 827) - Message(id='msg_01UXwy69vUWSy9bK24skQ3yE', content=[TextBlock(text='Now we have our final result. \n\nThe calculation (604542+6458932)*2 equals 14126948.\n\nTo break it down:\n1. 604542 + 6458932 = 7063474\n2. 7063474 * 2 = 14126948\n\nSo, the final answer to (604542+6458932)*2 is 14126948.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 841; Out: 95; Total: 936) - -Now we have our final result. - -The calculation (604542+6458932)\*2 equals 14126948. + [{'role': 'assistant', 'content': [TextBlock(text="Now, let's multiply this result by 2:", type='text'), ToolUseBlock(id='toolu_01FpmRG4ZskKEWN1gFZzx49s', input={'a': 7063474, 'b': 2}, name='mults', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01FpmRG4ZskKEWN1gFZzx49s', 'content': '14126948'}]}] + [{'role': 'assistant', 'content': [TextBlock(text='The final result is 14,126,948.', type='text')]}] -To break it down: 1. 604542 + 6458932 = 7063474 2. 7063474 \* 2 = -14126948 - -So, the final answer to (604542+6458932)\*2 is 14126948. +The final result is 14,126,948.
-- id: `msg_01UXwy69vUWSy9bK24skQ3yE` +- id: `msg_0162teyBcJHriUzZXMPz4r5d` - content: - `[{'text': 'Now we have our final result. \n\nThe calculation (604542+6458932)*2 equals 14126948.\n\nTo break it down:\n1. 604542 + 6458932 = 7063474\n2. 7063474 * 2 = 14126948\n\nSo, the final answer to (604542+6458932)*2 is 14126948.', 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': 'The final result is 14,126,948.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 841, 'output_tokens': 95, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 741, 'output_tokens': 15, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
+## Structured data + +If you just want the immediate result from a single tool, use +[`Client.structured`](https://claudette.answer.ai/core.html#client.structured). + +``` python +cli = Client(model) +``` + +``` python +def sums( + a:int, # First thing to sum + b:int=1 # Second thing to sum +) -> int: # The sum of the inputs + "Adds a + b." + print(f"Finding the sum of {a} and {b}") + return a + b +``` + +``` python +cli.structured("What is 604542+6458932", sums) +``` + + Finding the sum of 604542 and 6458932 + + [7063474] + +This is particularly useful for getting back structured information, +e.g: + +``` python +class President: + "Information about a president of the United States" + def __init__(self, + first:str, # first name + last:str, # last name + spouse:str, # name of spouse + years_in_office:str, # format: "{start_year}-{end_year}" + birthplace:str, # name of city + birth_year:int # year of birth, `0` if unknown + ): + assert re.match(r'\d{4}-\d{4}', years_in_office), "Invalid format: `years_in_office`" + store_attr() + + __repr__ = basic_repr('first, last, spouse, years_in_office, birthplace, birth_year') +``` + +``` python +cli.structured("Provide key information about the 3rd President of the United States", President) +``` + + [President(first='Thomas', last='Jefferson', spouse='Martha Wayles', years_in_office='1801-1809', birthplace='Shadwell', birth_year=1743)] + ## Images Claude can handle image data as well. As everyone knows, when testing @@ -514,7 +621,7 @@ fn = Path('samples/puppy.jpg') display.Image(filename=fn, width=200) ``` - We create a [`Chat`](https://claudette.answer.ai/core.html#chat) object @@ -536,22 +643,21 @@ Prompts to Claudette can be lists, containing text, images, or both, eg: chat([img, "In brief, what color flowers are in this image?"]) ``` -The flowers in this image are purple. They appear to be small, -daisy-like flowers, possibly asters or some type of purple daisy, -blooming in the background behind the adorable puppy in the foreground. +In this adorable puppy photo, there are purple/lavender colored flowers +(appears to be asters or similar daisy-like flowers) in the background.
-- id: `msg_01XtkdWMWHVppHqtiv7gdmtA` +- id: `msg_01LHjGv1WwFvDsWUbyLmTEKT` - content: - `[{'text': 'The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple daisy, blooming in the background behind the adorable puppy in the foreground.', 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': 'In this adorable puppy photo, there are purple/lavender colored flowers (appears to be asters or similar daisy-like flowers) in the background.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 110, 'output_tokens': 50, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 110, 'output_tokens': 37, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -561,7 +667,7 @@ The image is included as input tokens. chat.use ``` - In: 110; Out: 50; Total: 160 + In: 110; Out: 37; Cache create: 0; Cache read: 0; Total: 147 Alternatively, Claudette supports creating a multi-stage chat with separate image and text prompts. For instance, you can pass just the @@ -574,36 +680,28 @@ chat = Chat(model) chat(img) ``` -This image shows an adorable puppy lying in the grass. The puppy appears -to be a Cavalier King Charles Spaniel or a similar breed, with -distinctive white and reddish-brown fur coloring. Its face is -predominantly white with large, expressive eyes and a small black nose. - -The puppy is resting on a grassy surface, and behind it, you can see -some purple flowers, likely asters or michaelmas daisies. These flowers -provide a lovely contrast to the puppy’s fur colors. In the background, -there seems to be a wooden structure, possibly a fence or the side of a -building, giving the scene a rustic, outdoor feel. - -The composition of the image is quite charming, with the puppy as the -main focus in the foreground and the flowers adding a soft, colorful -backdrop. The lighting appears natural, highlighting the puppy’s soft -fur and the delicate petals of the flowers. Overall, it’s a heartwarming -scene that captures the innocence and cuteness of a young dog in a -picturesque outdoor setting. +What an adorable Cavalier King Charles Spaniel puppy! The photo captures +the classic brown and white coloring of the breed, with those soulful +dark eyes that are so characteristic. The puppy is lying in the grass, +and there are lovely purple asters blooming in the background, creating +a beautiful natural setting. The combination of the puppy’s sweet +expression and the delicate flowers makes for a charming composition. +Cavalier King Charles Spaniels are known for their gentle, affectionate +nature, and this little one certainly seems to embody those traits with +its endearing look.
-- id: `msg_01Simo36wFes3M21SXZFGBT2` +- id: `msg_01Ciyymq44uwp2iYwRZdKWNN` - content: - `[{'text': "This image shows an adorable puppy lying in the grass. The puppy appears to be a Cavalier King Charles Spaniel or a similar breed, with distinctive white and reddish-brown fur coloring. Its face is predominantly white with large, expressive eyes and a small black nose.\n\nThe puppy is resting on a grassy surface, and behind it, you can see some purple flowers, likely asters or michaelmas daisies. These flowers provide a lovely contrast to the puppy's fur colors. In the background, there seems to be a wooden structure, possibly a fence or the side of a building, giving the scene a rustic, outdoor feel.\n\nThe composition of the image is quite charming, with the puppy as the main focus in the foreground and the flowers adding a soft, colorful backdrop. The lighting appears natural, highlighting the puppy's soft fur and the delicate petals of the flowers. Overall, it's a heartwarming scene that captures the innocence and cuteness of a young dog in a picturesque outdoor setting.", 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': "What an adorable Cavalier King Charles Spaniel puppy! The photo captures the classic brown and white coloring of the breed, with those soulful dark eyes that are so characteristic. The puppy is lying in the grass, and there are lovely purple asters blooming in the background, creating a beautiful natural setting. The combination of the puppy's sweet expression and the delicate flowers makes for a charming composition. Cavalier King Charles Spaniels are known for their gentle, affectionate nature, and this little one certainly seems to embody those traits with its endearing look.", 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 98, 'output_tokens': 232, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 98, 'output_tokens': 130, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -611,26 +709,25 @@ picturesque outdoor setting. chat('What direction is the puppy facing?') ``` -The puppy in the image is facing towards the left side of the frame. Its -head is turned slightly, allowing us to see most of its face, including -both eyes, its nose, and part of its mouth. The puppy’s body is angled -diagonally, with its front paws visible and resting on the grass. This -positioning gives the viewer a good look at the puppy’s adorable facial -features and the distinctive coloring of its fur, while also creating an -engaging composition within the photograph. +The puppy is facing towards the left side of the image. Its head is +positioned so we can see its right side profile, though it appears to be +looking slightly towards the camera, giving us a good view of its +distinctive brown and white facial markings and one of its dark eyes. +The puppy is lying down with its white chest/front visible against the +green grass.
-- id: `msg_019YhPzDxXXjrcpjaS6G8fmF` +- id: `msg_01AeR9eWjbxa788YF97iErtN` - content: - `[{'text': "The puppy in the image is facing towards the left side of the frame. Its head is turned slightly, allowing us to see most of its face, including both eyes, its nose, and part of its mouth. The puppy's body is angled diagonally, with its front paws visible and resting on the grass. This positioning gives the viewer a good look at the puppy's adorable facial features and the distinctive coloring of its fur, while also creating an engaging composition within the photograph.", 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': 'The puppy is facing towards the left side of the image. Its head is positioned so we can see its right side profile, though it appears to be looking slightly towards the camera, giving us a good view of its distinctive brown and white facial markings and one of its dark eyes. The puppy is lying down with its white chest/front visible against the green grass.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 341, 'output_tokens': 108, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 239, 'output_tokens': 79, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -638,33 +735,25 @@ engaging composition within the photograph. chat('What color is it?') ``` -The puppy in the image has a combination of two main colors: - -1. White: The majority of its face, including the area around its eyes, - muzzle, and part of its chest, is white. - -2. Reddish-brown: This color, often referred to as “chestnut” or “ruby” - in Cavalier King Charles Spaniels, covers the puppy’s ears and - extends to patches on its body. - -The contrast between these two colors is quite striking and typical of -the Cavalier King Charles Spaniel breed. The white fur appears bright -and clean, while the reddish-brown areas have a rich, warm tone. This -color combination, along with the puppy’s expressive eyes and small -black nose, contributes to its incredibly cute and appealing appearance. +The puppy has a classic Cavalier King Charles Spaniel coat with a rich +chestnut brown (sometimes called Blenheim) coloring on its ears and +patches on its face, combined with a bright white base color. The white +is particularly prominent on its face (creating a distinctive blaze down +the center) and chest area. This brown and white combination is one of +the most recognizable color patterns for the breed.
-- id: `msg_01WVxHA2sAff5q1En3q9km8F` +- id: `msg_01R91AqXG7pLc8hK24F5mc7x` - content: - `[{'text': 'The puppy in the image has a combination of two main colors:\n\n1. White: The majority of its face, including the area around its eyes, muzzle, and part of its chest, is white.\n\n2. Reddish-brown: This color, often referred to as "chestnut" or "ruby" in Cavalier King Charles Spaniels, covers the puppy\'s ears and extends to patches on its body.\n\nThe contrast between these two colors is quite striking and typical of the Cavalier King Charles Spaniel breed. The white fur appears bright and clean, while the reddish-brown areas have a rich, warm tone. This color combination, along with the puppy\'s expressive eyes and small black nose, contributes to its incredibly cute and appealing appearance.', 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': 'The puppy has a classic Cavalier King Charles Spaniel coat with a rich chestnut brown (sometimes called Blenheim) coloring on its ears and patches on its face, combined with a bright white base color. The white is particularly prominent on its face (creating a distinctive blaze down the center) and chest area. This brown and white combination is one of the most recognizable color patterns for the breed.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` - usage: - `{'input_tokens': 457, 'output_tokens': 175, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + `{'input_tokens': 326, 'output_tokens': 92, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -676,7 +765,7 @@ that number of input tokens increases quickly with this kind of chat. chat.use ``` - In: 896; Out: 515; Total: 1411 + In: 663; Out: 301; Cache create: 0; Cache read: 0; Total: 964 ## Other model providers @@ -690,10 +779,10 @@ These are the models available through Bedrock: models_aws ``` - ('anthropic.claude-3-haiku-20240307-v1:0', + ['anthropic.claude-3-opus-20240229-v1:0', + 'anthropic.claude-3-5-sonnet-20241022-v2:0', 'anthropic.claude-3-sonnet-20240229-v1:0', - 'anthropic.claude-3-opus-20240229-v1:0', - 'anthropic.claude-3-5-sonnet-20240620-v1:0') + 'anthropic.claude-3-haiku-20240307-v1:0'] To use them, call `AnthropicBedrock` with your access details, and pass that to [`Client`](https://claudette.answer.ai/core.html#client): @@ -719,21 +808,20 @@ chat = Chat(cli=client) chat("I'm Jeremy") ``` -Hello Jeremy! It’s nice to meet you. How can I assist you today? Is -there anything specific you’d like to talk about or any questions you -have? +It’s nice to meet you, Jeremy! I’m Claude, an AI assistant created by +Anthropic. How can I help you today?
-- id: `msg_bdrk_01VFVE1Pe5LNubaWYKC1sz8f` +- id: `msg_bdrk_01V3B5RF2Pyzmh3NeR8xMMpq` - content: - `[{'text': "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?", 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` + `[{'text': "It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?", 'type': 'text'}]` +- model: `claude-3-haiku-20240307` - role: `assistant` - stop_reason: `end_turn` - stop_sequence: `None` - type: `message` -- usage: `{'input_tokens': 10, 'output_tokens': 36}` +- usage: `{'input_tokens': 10, 'output_tokens': 32}`
@@ -745,10 +833,10 @@ These are the models available through Vertex: models_goog ``` - ('claude-3-haiku@20240307', + ['claude-3-opus@20240229', + 'claude-3-5-sonnet-v2@20241022', 'claude-3-sonnet@20240229', - 'claude-3-opus@20240229', - 'claude-3-5-sonnet@20240620') + 'claude-3-haiku@20240307'] To use them, call `AnthropicVertex` with your access details, and pass that to [`Client`](https://claudette.answer.ai/core.html#client): @@ -769,24 +857,7 @@ chat = Chat(cli=client) chat("I'm Jeremy") ``` -Hello Jeremy! It’s nice to meet you. How can I assist you today? Is -there anything specific you’d like to talk about or any questions you -have? - -
- -- id: `msg_vrtx_01P251BUJXBBvihsvb3VVgZ3` -- content: - `[{'text': "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?", 'type': 'text'}]` -- model: `claude-3-5-sonnet-20240620` -- role: `assistant` -- stop_reason: `end_turn` -- stop_sequence: `None` -- type: `message` -- usage: `{'input_tokens': 10, 'output_tokens': 36}` - -
- ## Extensions -- [Pydantic Structured Ouput](https://github.com/tom-pollak/claudette-pydantic) +- [Pydantic Structured + Ouput](https://github.com/tom-pollak/claudette-pydantic) diff --git a/README.txt b/README.txt index 572bd46..49eef18 100644 --- a/README.txt +++ b/README.txt @@ -89,9 +89,9 @@ available from the SDK. models ``` - ('claude-3-opus-20240229', - 'claude-3-5-sonnet-20240620', - 'claude-3-haiku-20240307') + ['claude-3-opus-20240229', + 'claude-3-5-sonnet-20241022', + 'claude-3-haiku-20240307'] For these examples, we’ll use Sonnet 3.5, since it’s awesome! @@ -110,20 +110,20 @@ chat = Chat(model, sp="""You are a helpful and concise assistant.""") chat("I'm Jeremy") ``` -Hello Jeremy, it’s nice to meet you. How can I assist you today? +Hello Jeremy, nice to meet you.
-- id: msg_01Ht1YKw3qr4oeiE8uZkuuGW -- content: \[{‘text’: “Hello Jeremy, it’s nice to meet you. How can I - assist you today?”, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 19, ‘output_tokens’: 20, - ‘cache_creation_input_tokens’: 0, ‘cache_read_input_tokens’: 0} +- id: `msg_015oK9jEcra3TEKHUGYULjWB` +- content: + `[{'text': 'Hello Jeremy, nice to meet you.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 19, 'output_tokens': 11, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -132,20 +132,40 @@ r = chat("What's my name?") r ``` -Your name is Jeremy, as you just told me. +Your name is Jeremy.
-- id: msg_01WKwTVxHNz96wEqprwtDR75 -- content: \[{‘text’: ‘Your name is Jeremy, as you just told me.’, - ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 47, ‘output_tokens’: 14, - ‘cache_creation_input_tokens’: 0, ‘cache_read_input_tokens’: 0} +- id: `msg_01Si8sTFJe8d8vq7enanbAwj` +- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 38, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + +
+ +``` python +r = chat("What's my name?") +r +``` + +Your name is Jeremy. + +
+ +- id: `msg_01BHWRoAX8eBsoLn2bzpBkvx` +- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 54, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -157,7 +177,7 @@ collapsible section. Alternatively you can `print` the details: print(r) ``` - Message(id='msg_01WKwTVxHNz96wEqprwtDR75', content=[TextBlock(text='Your name is Jeremy, as you just told me.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 47; Out: 14; Total: 61) + Message(id='msg_01BHWRoAX8eBsoLn2bzpBkvx', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 54; Out: 8; Cache create: 0; Cache read: 0; Total: 62) Claude supports adding an extra `assistant` message at the end, which contains the *prefill* – i.e. the text we want Claude to assume the @@ -168,22 +188,21 @@ chat("Concisely, what is the meaning of life?", prefill='According to Douglas Adams,') ``` -According to Douglas Adams, “42.” More seriously, it’s often considered -to be finding personal fulfillment, happiness, and purpose. +According to Douglas Adams,42. Philosophically, it’s to find personal +meaning through relationships, purpose, and experiences.
-- id: msg_01P4vNp6tti1nxLa5UBmtXGQ -- content: \[{‘text’: ‘According to Douglas Adams, “42.” More seriously, - it's often considered to be finding personal fulfillment, happiness, - and purpose.’, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 81, ‘output_tokens’: 27, - ‘cache_creation_input_tokens’: 0, ‘cache_read_input_tokens’: 0} +- id: `msg_01R9RvMdFwea9iRX5uYSSHG7` +- content: + `[{'text': "According to Douglas Adams,42. Philosophically, it's to find personal meaning through relationships, purpose, and experiences.", 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 82, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -198,6 +217,184 @@ for o in chat("Concisely, what book was that in?", prefill='It was in', stream=T It was in "The Hitchhiker's Guide to the Galaxy" by Douglas Adams. +### Async + +Alternatively, you can use +[`AsyncChat`](https://claudette.answer.ai/async.html#asyncchat) (or +[`AsyncClient`](https://claudette.answer.ai/async.html#asyncclient)) for +the async versions, e.g: + +``` python +chat = AsyncChat(model) +await chat("I'm Jeremy") +``` + +Hi Jeremy! Nice to meet you. I’m Claude, an AI assistant created by +Anthropic. How can I help you today? + +
+ +- id: `msg_016Q8cdc3sPWBS8eXcNj841L` +- content: + `[{'text': "Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?", 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 10, 'output_tokens': 31, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` + +
+ +Remember to use `async for` when streaming in this case: + +``` python +async for o in await chat("Concisely, what is the meaning of life?", + prefill='According to Douglas Adams,', stream=True): + print(o, end='') +``` + + According to Douglas Adams, it's 42. But in my view, there's no single universal meaning - each person must find their own purpose through relationships, personal growth, contribution to others, and pursuit of what they find meaningful. + +## Prompt caching + +If you use `mk_msg(msg, cache=True)`, then the message is cached using +Claude’s [prompt +caching](https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching) +feature. For instance, here we use caching when asking about Claudette’s +readme file: + +``` python +chat = Chat(model, sp="""You are a helpful and concise assistant.""") +``` + +``` python +nbtxt = Path('README.txt').read_text() +msg = f''' +{nbtxt} + +In brief, what is the purpose of this project based on the readme?''' +r = chat(mk_msg(msg, cache=True)) +r +``` + +Claudette is a high-level wrapper for Anthropic’s Python SDK that +automates common tasks and provides additional functionality. Its main +features include: + +1. A Chat class for stateful dialogs +2. Support for prefill (controlling Claude’s initial response words) +3. Convenient image handling +4. Simple tool use API integration +5. Support for multiple model providers (Anthropic, AWS Bedrock, Google + Vertex) + +The project is notable for being the first “literate nbdev” project, +meaning its source code is written as a detailed, readable Jupyter +Notebook that includes explanations, examples, and teaching material +alongside the functional code. + +The goal is to simplify working with Claude’s API while maintaining full +control, reducing boilerplate code and manual work that would otherwise +be needed with the base SDK. + +
+ +- id: `msg_014rVQnYoZXZuyWUCMELG1QW` +- content: + `[{'text': 'Claudette is a high-level wrapper for Anthropic\'s Python SDK that automates common tasks and provides additional functionality. Its main features include:\n\n1. A Chat class for stateful dialogs\n2. Support for prefill (controlling Claude\'s initial response words)\n3. Convenient image handling\n4. Simple tool use API integration\n5. Support for multiple model providers (Anthropic, AWS Bedrock, Google Vertex)\n\nThe project is notable for being the first "literate nbdev" project, meaning its source code is written as a detailed, readable Jupyter Notebook that includes explanations, examples, and teaching material alongside the functional code.\n\nThe goal is to simplify working with Claude\'s API while maintaining full control, reducing boilerplate code and manual work that would otherwise be needed with the base SDK.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 4, 'output_tokens': 179, 'cache_creation_input_tokens': 7205, 'cache_read_input_tokens': 0}` + +
+ +The response records the a cache has been created using these input +tokens: + +``` python +print(r.usage) +``` + + Usage(input_tokens=4, output_tokens=179, cache_creation_input_tokens=7205, cache_read_input_tokens=0) + +We can now ask a followup question in this chat: + +``` python +r = chat('How does it make tool use more ergonomic?') +r +``` + +According to the README, Claudette makes tool use more ergonomic in +several ways: + +1. It uses docments to make Python function definitions more + user-friendly - each parameter and return value should have a type + and description + +2. It handles the tool calling process automatically - when Claude + returns a tool_use message, Claudette manages calling the tool with + the provided parameters behind the scenes + +3. It provides a `toolloop` method that can handle multiple tool calls + in a single step to solve more complex problems + +4. It allows you to pass a list of tools to the Chat constructor and + optionally force Claude to always use a specific tool via + `tool_choice` + +Here’s a simple example from the README: + +``` python +def sums( + a:int, # First thing to sum + b:int=1 # Second thing to sum +) -> int: # The sum of the inputs + "Adds a + b." + print(f"Finding the sum of {a} and {b}") + return a + b + +chat = Chat(model, sp=sp, tools=[sums], tool_choice='sums') +``` + +This makes it much simpler compared to manually handling all the tool +use logic that would be required with the base SDK. + +
+ +- id: `msg_01EdUvvFBnpPxMtdLRCaSZAU` +- content: + `[{'text': 'According to the README, Claudette makes tool use more ergonomic in several ways:\n\n1. It uses docments to make Python function definitions more user-friendly - each parameter and return value should have a type and description\n\n2. It handles the tool calling process automatically - when Claude returns a tool_use message, Claudette manages calling the tool with the provided parameters behind the scenes\n\n3. It provides a`toolloop`method that can handle multiple tool calls in a single step to solve more complex problems\n\n4. It allows you to pass a list of tools to the Chat constructor and optionally force Claude to always use a specific tool via`tool_choice```` \n\nHere\'s a simple example from the README:\n\n```python\ndef sums(\n a:int, # First thing to sum \n b:int=1 # Second thing to sum\n) -> int: # The sum of the inputs\n "Adds a + b."\n print(f"Finding the sum of {a} and {b}")\n return a + b\n\nchat = Chat(model, sp=sp, tools=[sums], tool_choice=\'sums\')\n```\n\nThis makes it much simpler compared to manually handling all the tool use logic that would be required with the base SDK.', 'type': 'text'}] ```` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 197, 'output_tokens': 280, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 7205}` + +
+ +We can see that this only used ~200 regular input tokens – the 7000+ +context tokens have been read from cache. + +``` python +print(r.usage) +``` + + Usage(input_tokens=197, output_tokens=280, cache_creation_input_tokens=0, cache_read_input_tokens=7205) + +``` python +chat.use +``` + + In: 201; Out: 459; Cache create: 7205; Cache read: 7205; Total: 15070 + ## Tool use [Tool use](https://docs.anthropic.com/claude/docs/tool-use) lets Claude @@ -238,38 +435,39 @@ pr 'What is 604542+6458932?' To use tools, pass a list of them to -[`Chat`](https://claudette.answer.ai/core.html#chat), and to force it to -always answer using a tool, set `tool_choice` to that function name: +[`Chat`](https://claudette.answer.ai/core.html#chat): ``` python -chat = Chat(model, sp=sp, tools=[sums], tool_choice='sums') +chat = Chat(model, sp=sp, tools=[sums]) ``` -Now when we call that with our prompt, Claude doesn’t return the answer, -but instead returns a `tool_use` message, which means we have to call -the named tool with the provided parameters: +To force Claude to always answer using a tool, set `tool_choice` to that +function name. When Claude needs to use a tool, it doesn’t return the +answer, but instead returns a `tool_use` message, which means we have to +call the named tool with the provided parameters. ``` python -r = chat(pr) +r = chat(pr, tool_choice='sums') r ``` Finding the sum of 604542 and 6458932 -ToolUseBlock(id=‘toolu_018a7fT3Mnoa2XqtAMXqawXU’, input={‘a’: 604542, +ToolUseBlock(id=‘toolu_014ip2xWyEq8RnAccVT4SySt’, input={‘a’: 604542, ‘b’: 6458932}, name=‘sums’, type=‘tool_use’)
-- id: msg_011r1ZTjJSLFCJKceMoFSPky -- content: \[{‘id’: ‘toolu_018a7fT3Mnoa2XqtAMXqawXU’, ‘input’: {‘a’: - 604542, ‘b’: 6458932}, ‘name’: ‘sums’, ‘type’: ‘tool_use’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: tool_use -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 414, ‘output_tokens’: 53} +- id: `msg_014xrPyotyiBmFSctkp1LZHk` +- content: + `[{'id': 'toolu_014ip2xWyEq8RnAccVT4SySt', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `tool_use` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 442, 'output_tokens': 53, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -280,22 +478,20 @@ happens automatically: chat() ``` - Finding the sum of 7063474 and 1 - -ToolUseBlock(id=‘toolu_01Pc9N6fQSUNnuqrUKR2DkmL’, input={‘a’: 7063474}, -name=‘sums’, type=‘tool_use’) +The sum of 604542 and 6458932 is 7063474.
-- id: msg_01Ct8URrPt5hf5mjUSndwigS -- content: \[{‘id’: ‘toolu_01Pc9N6fQSUNnuqrUKR2DkmL’, ‘input’: {‘a’: - 7063474}, ‘name’: ‘sums’, ‘type’: ‘tool_use’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: tool_use -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 502, ‘output_tokens’: 35} +- id: `msg_01151puJxG8Fa6k6QSmzwKQA` +- content: + `[{'text': 'The sum of 604542 and 6458932 is 7063474.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 524, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -307,7 +503,7 @@ You can see how many tokens have been used at any time by checking the chat.use ``` - In: 916; Out: 88; Total: 1004 + In: 966; Out: 76; Cache create: 0; Cache read: 0; Total: 1042 We can do everything needed to use tools in a single step, by using [`Chat.toolloop`](https://claudette.answer.ai/toolloop.html#chat.toolloop). @@ -340,35 +536,80 @@ chat.toolloop(pr, trace_func=print) ``` Finding the sum of 604542 and 6458932 - Message(id='msg_016tRhA75RqXTkCMR9WoZvyE', content=[TextBlock(text='Certainly! To calculate (604542+6458932)*2, we\'ll need to use the available tools to perform the addition and multiplication operations. Let\'s break it down step by step:\n\n1. First, we\'ll add 604542 and 6458932 using the "sums" function.\n2. Then, we\'ll multiply the result by 2 using the "mults" function.\n\nLet\'s start with the addition:', type='text'), ToolUseBlock(id='toolu_017aomxeCUA6DxM9U6UQjMu2', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 538; Out: 168; Total: 706) + [{'role': 'user', 'content': [{'type': 'text', 'text': 'Calculate (604542+6458932)*2'}]}, {'role': 'assistant', 'content': [TextBlock(text="I'll help you break this down into steps:\n\nFirst, let's add those numbers:", type='text'), ToolUseBlock(id='toolu_01St5UKxYUU4DKC96p2PjgcD', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01St5UKxYUU4DKC96p2PjgcD', 'content': '7063474'}]}] Finding the product of 7063474 and 2 - Message(id='msg_016w97cNedVktfhHUoL2Ey7a', content=[TextBlock(text="Great! The sum of 604542 and 6458932 is 7063474.\n\nNow, let's multiply this result by 2:", type='text'), ToolUseBlock(id='toolu_0115UMmS2GMeej1CafKLKvBB', input={'a': 7063474, 'b': 2}, name='mults', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 721; Out: 106; Total: 827) - Message(id='msg_0139BkRu9b9yKPp8N8NHBMpj', content=[TextBlock(text='Now we have our final result. \n\nThe calculation (604542+6458932)*2 equals 14126948.\n\nTo break it down:\n1. 604542 + 6458932 = 7063474\n2. 7063474 * 2 = 14126948\n\nSo, the final answer to (604542+6458932)*2 is 14126948.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 841; Out: 95; Total: 936) + [{'role': 'assistant', 'content': [TextBlock(text="Now, let's multiply this result by 2:", type='text'), ToolUseBlock(id='toolu_01FpmRG4ZskKEWN1gFZzx49s', input={'a': 7063474, 'b': 2}, name='mults', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01FpmRG4ZskKEWN1gFZzx49s', 'content': '14126948'}]}] + [{'role': 'assistant', 'content': [TextBlock(text='The final result is 14,126,948.', type='text')]}] -Now we have our final result. +The final result is 14,126,948. -The calculation (604542+6458932)\*2 equals 14126948. +
-To break it down: 1. 604542 + 6458932 = 7063474 2. 7063474 \* 2 = -14126948 +- id: `msg_0162teyBcJHriUzZXMPz4r5d` +- content: + `[{'text': 'The final result is 14,126,948.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 741, 'output_tokens': 15, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}` -So, the final answer to (604542+6458932)\*2 is 14126948. +
-
+## Structured data -- id: msg_0139BkRu9b9yKPp8N8NHBMpj -- content: \[{‘text’: ’Now we have our final result. calculation - (604542+6458932)*2 equals 14126948.break it down:. 604542 + 6458932 = - 7063474. 7063474 * 2 = 14126948, the final answer to - (604542+6458932)\*2 is 14126948.’, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 841, ‘output_tokens’: 95} +If you just want the immediate result from a single tool, use +[`Client.structured`](https://claudette.answer.ai/core.html#client.structured). -
+``` python +cli = Client(model) +``` + +``` python +def sums( + a:int, # First thing to sum + b:int=1 # Second thing to sum +) -> int: # The sum of the inputs + "Adds a + b." + print(f"Finding the sum of {a} and {b}") + return a + b +``` + +``` python +cli.structured("What is 604542+6458932", sums) +``` + + Finding the sum of 604542 and 6458932 + + [7063474] + +This is particularly useful for getting back structured information, +e.g: + +``` python +class President: + "Information about a president of the United States" + def __init__(self, + first:str, # first name + last:str, # last name + spouse:str, # name of spouse + years_in_office:str, # format: "{start_year}-{end_year}" + birthplace:str, # name of city + birth_year:int # year of birth, `0` if unknown + ): + assert re.match(r'\d{4}-\d{4}', years_in_office), "Invalid format: `years_in_office`" + store_attr() + + __repr__ = basic_repr('first, last, spouse, years_in_office, birthplace, birth_year') +``` + +``` python +cli.structured("Provide key information about the 3rd President of the United States", President) +``` + + [President(first='Thomas', last='Jefferson', spouse='Martha Wayles', years_in_office='1801-1809', birthplace='Shadwell', birth_year=1743)] ## Images @@ -380,7 +621,7 @@ fn = Path('samples/puppy.jpg') display.Image(filename=fn, width=200) ``` - We create a [`Chat`](https://claudette.answer.ai/core.html#chat) object @@ -402,23 +643,21 @@ Prompts to Claudette can be lists, containing text, images, or both, eg: chat([img, "In brief, what color flowers are in this image?"]) ``` -The flowers in this image are purple. They appear to be small, -daisy-like flowers, possibly asters or some type of purple wildflower, -blooming in the background behind the adorable puppy in the foreground. +In this adorable puppy photo, there are purple/lavender colored flowers +(appears to be asters or similar daisy-like flowers) in the background.
-- id: msg_01EsC5ZMpo5chJRzcjkceLu9 -- content: \[{‘text’: ‘The flowers in this image are purple. They appear - to be small, daisy-like flowers, possibly asters or some type of - purple wildflower, blooming in the background behind the adorable - puppy in the foreground.’, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 110, ‘output_tokens’: 51} +- id: `msg_01LHjGv1WwFvDsWUbyLmTEKT` +- content: + `[{'text': 'In this adorable puppy photo, there are purple/lavender colored flowers (appears to be asters or similar daisy-like flowers) in the background.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 110, 'output_tokens': 37, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -428,7 +667,7 @@ The image is included as input tokens. chat.use ``` - In: 110; Out: 51; Total: 161 + In: 110; Out: 37; Cache create: 0; Cache read: 0; Total: 147 Alternatively, Claudette supports creating a multi-stage chat with separate image and text prompts. For instance, you can pass just the @@ -441,46 +680,28 @@ chat = Chat(model) chat(img) ``` -This image shows an adorable puppy lying in the grass. The puppy appears -to be a Cavalier King Charles Spaniel or a similar breed, with -distinctive white and reddish-brown fur coloring. Its face is -predominantly white with large, expressive dark eyes and a small black -nose. - -The puppy is resting on a grassy surface, giving the impression of being -outdoors. In the background, you can see purple flowers, which look like -asters or michaelmas daisies, adding a lovely splash of color to the -scene. There’s also what appears to be a wooden structure or fence -behind the flowers, suggesting a garden setting. - -The composition of the image creates a charming, pastoral scene that -highlights the puppy’s cuteness against the natural backdrop. The -contrast between the soft fur of the puppy and the delicate purple -flowers makes for a visually appealing and heartwarming picture. +What an adorable Cavalier King Charles Spaniel puppy! The photo captures +the classic brown and white coloring of the breed, with those soulful +dark eyes that are so characteristic. The puppy is lying in the grass, +and there are lovely purple asters blooming in the background, creating +a beautiful natural setting. The combination of the puppy’s sweet +expression and the delicate flowers makes for a charming composition. +Cavalier King Charles Spaniels are known for their gentle, affectionate +nature, and this little one certainly seems to embody those traits with +its endearing look.
-- id: msg_013TMWFVwfmYh2qzUtUqen68 -- content: \[{‘text’: “This image shows an adorable puppy lying in the - grass. The puppy appears to be a Cavalier King Charles Spaniel or a - similar breed, with distinctive white and reddish-brown fur coloring. - Its face is predominantly white with large, expressive dark eyes and a - small black nose.puppy is resting on a grassy surface, giving the - impression of being outdoors. In the background, you can see purple - flowers, which look like asters or michaelmas daisies, adding a lovely - splash of color to the scene. There’s also what appears to be a wooden - structure or fence behind the flowers, suggesting a garden - setting.composition of the image creates a charming, pastoral scene - that highlights the puppy’s cuteness against the natural backdrop. The - contrast between the soft fur of the puppy and the delicate purple - flowers makes for a visually appealing and heartwarming picture.”, - ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 98, ‘output_tokens’: 197} +- id: `msg_01Ciyymq44uwp2iYwRZdKWNN` +- content: + `[{'text': "What an adorable Cavalier King Charles Spaniel puppy! The photo captures the classic brown and white coloring of the breed, with those soulful dark eyes that are so characteristic. The puppy is lying in the grass, and there are lovely purple asters blooming in the background, creating a beautiful natural setting. The combination of the puppy's sweet expression and the delicate flowers makes for a charming composition. Cavalier King Charles Spaniels are known for their gentle, affectionate nature, and this little one certainly seems to embody those traits with its endearing look.", 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 98, 'output_tokens': 130, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -488,29 +709,25 @@ flowers makes for a visually appealing and heartwarming picture. chat('What direction is the puppy facing?') ``` -The puppy in the image is facing towards the camera, looking directly at -the viewer. Its head is slightly tilted, giving it an endearing and -curious expression. The puppy’s body is angled slightly to its left (our -right), with its front paws visible and resting on the grass. This -positioning allows us to see the full face of the puppy, showcasing its -adorable features and the distinctive color pattern of its fur. +The puppy is facing towards the left side of the image. Its head is +positioned so we can see its right side profile, though it appears to be +looking slightly towards the camera, giving us a good view of its +distinctive brown and white facial markings and one of its dark eyes. +The puppy is lying down with its white chest/front visible against the +green grass.
-- id: msg_015W9aQZt9JsjExJKcN72cDo -- content: \[{‘text’: “The puppy in the image is facing towards the - camera, looking directly at the viewer. Its head is slightly tilted, - giving it an endearing and curious expression. The puppy’s body is - angled slightly to its left (our right), with its front paws visible - and resting on the grass. This positioning allows us to see the full - face of the puppy, showcasing its adorable features and the - distinctive color pattern of its fur.”, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 306, ‘output_tokens’: 95} +- id: `msg_01AeR9eWjbxa788YF97iErtN` +- content: + `[{'text': 'The puppy is facing towards the left side of the image. Its head is positioned so we can see its right side profile, though it appears to be looking slightly towards the camera, giving us a good view of its distinctive brown and white facial markings and one of its dark eyes. The puppy is lying down with its white chest/front visible against the green grass.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 239, 'output_tokens': 79, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
@@ -518,51 +735,37 @@ adorable features and the distinctive color pattern of its fur. chat('What color is it?') ``` -The puppy in the image has a combination of two main colors: - -1. White: The majority of its face, including the muzzle area and a - portion of its body that’s visible, is white. - -2. Reddish-brown (often called “chestnut” or “ruby” in this breed): - This color appears on the ears and extends to patches on the body, - though the full extent isn’t visible in this image. - -This color combination is typical for Cavalier King Charles Spaniels, -particularly the Blenheim variety. The contrast between the white and -the reddish-brown fur creates a striking and adorable appearance, -highlighting the puppy’s facial features and giving it a distinctive -look. +The puppy has a classic Cavalier King Charles Spaniel coat with a rich +chestnut brown (sometimes called Blenheim) coloring on its ears and +patches on its face, combined with a bright white base color. The white +is particularly prominent on its face (creating a distinctive blaze down +the center) and chest area. This brown and white combination is one of +the most recognizable color patterns for the breed.
-- id: msg_0146KQcPjSoXFGLjqjpzwU3Q -- content: \[{‘text’: ‘The puppy in the image has a combination of two - main colors:. White: The majority of its face, including the muzzle - area and a portion of its body that's visible, is white.. - Reddish-brown (often called “chestnut” or “ruby” in this breed): This - color appears on the ears and extends to patches on the body, though - the full extent isn't visible in this image.color combination is - typical for Cavalier King Charles Spaniels, particularly the Blenheim - variety. The contrast between the white and the reddish-brown fur - creates a striking and adorable appearance, highlighting the puppy's - facial features and giving it a distinctive look.’, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 409, ‘output_tokens’: 157} +- id: `msg_01R91AqXG7pLc8hK24F5mc7x` +- content: + `[{'text': 'The puppy has a classic Cavalier King Charles Spaniel coat with a rich chestnut brown (sometimes called Blenheim) coloring on its ears and patches on its face, combined with a bright white base color. The white is particularly prominent on its face (creating a distinctive blaze down the center) and chest area. This brown and white combination is one of the most recognizable color patterns for the breed.', 'type': 'text'}]` +- model: `claude-3-5-sonnet-20241022` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: + `{'input_tokens': 326, 'output_tokens': 92, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`
Note that the image is passed in again for every input in the dialog, so that number of input tokens increases quickly with this kind of chat. +(For large images, using prompt caching might be a good idea.) ``` python chat.use ``` - In: 813; Out: 449; Total: 1262 + In: 663; Out: 301; Cache create: 0; Cache read: 0; Total: 964 ## Other model providers @@ -576,10 +779,10 @@ These are the models available through Bedrock: models_aws ``` - ('anthropic.claude-3-haiku-20240307-v1:0', + ['anthropic.claude-3-opus-20240229-v1:0', + 'anthropic.claude-3-5-sonnet-20241022-v2:0', 'anthropic.claude-3-sonnet-20240229-v1:0', - 'anthropic.claude-3-opus-20240229-v1:0', - 'anthropic.claude-3-5-sonnet-20240620-v1:0') + 'anthropic.claude-3-haiku-20240307-v1:0'] To use them, call `AnthropicBedrock` with your access details, and pass that to [`Client`](https://claudette.answer.ai/core.html#client): @@ -605,22 +808,20 @@ chat = Chat(cli=client) chat("I'm Jeremy") ``` -Hello Jeremy! It’s nice to meet you. How can I assist you today? Is -there anything specific you’d like to talk about or any questions you -have? +It’s nice to meet you, Jeremy! I’m Claude, an AI assistant created by +Anthropic. How can I help you today?
-- id: msg_bdrk_011MuMcLUba22CUttpxWuHJC -- content: \[{‘text’: “Hello Jeremy! It’s nice to meet you. How can I - assist you today? Is there anything specific you’d like to talk about - or any questions you have?”, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 10, ‘output_tokens’: 36} +- id: `msg_bdrk_01V3B5RF2Pyzmh3NeR8xMMpq` +- content: + `[{'text': "It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?", 'type': 'text'}]` +- model: `claude-3-haiku-20240307` +- role: `assistant` +- stop_reason: `end_turn` +- stop_sequence: `None` +- type: `message` +- usage: `{'input_tokens': 10, 'output_tokens': 32}`
@@ -632,10 +833,10 @@ These are the models available through Vertex: models_goog ``` - ('claude-3-haiku@20240307', + ['claude-3-opus@20240229', + 'claude-3-5-sonnet-v2@20241022', 'claude-3-sonnet@20240229', - 'claude-3-opus@20240229', - 'claude-3-5-sonnet@20240620') + 'claude-3-haiku@20240307'] To use them, call `AnthropicVertex` with your access details, and pass that to [`Client`](https://claudette.answer.ai/core.html#client): @@ -656,25 +857,7 @@ chat = Chat(cli=client) chat("I'm Jeremy") ``` -Hello Jeremy! It’s nice to meet you. How can I assist you today? Is -there anything specific you’d like to talk about or any questions you -have? - -
- -- id: msg_vrtx_015zVvSZxfmNQ8FRZjb9zPdJ -- content: \[{‘text’: “Hello Jeremy! It’s nice to meet you. How can I - assist you today? Is there anything specific you’d like to talk about - or any questions you have?”, ‘type’: ‘text’}\] -- model: claude-3-5-sonnet-20240620 -- role: assistant -- stop_reason: end_turn -- stop_sequence: None -- type: message -- usage: {‘input_tokens’: 10, ‘output_tokens’: 36} - -
- ## Extensions -- [Pydantic Structured Ouput](https://github.com/tom-pollak/claudette-pydantic) +- [Pydantic Structured + Ouput](https://github.com/tom-pollak/claudette-pydantic) diff --git a/claudette/asink.py b/claudette/asink.py index 121c72b..08181dd 100644 --- a/claudette/asink.py +++ b/claudette/asink.py @@ -84,7 +84,6 @@ async def __call__(self:AsyncChat, **kw): await self._append_pr(pr) if self.tools: kw['tools'] = [get_schema(o) for o in self.tools] - if self.tool_choice and pr: kw['tool_choice'] = mk_tool_choice(self.tool_choice) res = await self.c(self.h, stream=stream, prefill=prefill, sp=self.sp, temp=temp, maxtok=maxtok, **kw) if stream: return self._stream(res) self.h += mk_toolres(self.c.result, ns=self.tools, obj=self) diff --git a/index.ipynb b/index.ipynb index 0b31b53..7e06b4d 100644 --- a/index.ipynb +++ b/index.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "60650229", + "id": "cd824a27", "metadata": {}, "source": [ "# claudette\n", @@ -136,7 +136,7 @@ "data": { "text/plain": [ "['claude-3-opus-20240229',\n", - " 'claude-3-5-sonnet-20240620',\n", + " 'claude-3-5-sonnet-20241022',\n", " 'claude-3-haiku-20240307']" ] }, @@ -192,23 +192,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy, it's nice to meet you. How can I assist you today?\n", + "Hello Jeremy, nice to meet you.\n", "\n", "
\n", "\n", - "- id: `msg_01XCDYdAigXPV3tr4Gk1jgHa`\n", - "- content: `[{'text': \"Hello Jeremy, it's nice to meet you. How can I assist you today?\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_015oK9jEcra3TEKHUGYULjWB`\n", + "- content: `[{'text': 'Hello Jeremy, nice to meet you.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 19, 'output_tokens': 20, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 19, 'output_tokens': 11, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01XCDYdAigXPV3tr4Gk1jgHa', content=[TextBlock(text=\"Hello Jeremy, it's nice to meet you. How can I assist you today?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 19; Out: 20; Cache create: 0; Cache read: 0; Total: 39)" + "Message(id='msg_015oK9jEcra3TEKHUGYULjWB', content=[TextBlock(text='Hello Jeremy, nice to meet you.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 19; Out: 11; Cache create: 0; Cache read: 0; Total: 30)" ] }, "execution_count": null, @@ -230,23 +230,23 @@ { "data": { "text/markdown": [ - "Your name is Jeremy, as you just told me.\n", + "Your name is Jeremy.\n", "\n", "
\n", "\n", - "- id: `msg_011MfA5LVyBsZLLChaTTJDBk`\n", - "- content: `[{'text': 'Your name is Jeremy, as you just told me.', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01Si8sTFJe8d8vq7enanbAwj`\n", + "- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 47, 'output_tokens': 14, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 38, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_011MfA5LVyBsZLLChaTTJDBk', content=[TextBlock(text='Your name is Jeremy, as you just told me.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 47; Out: 14; Cache create: 0; Cache read: 0; Total: 61)" + "Message(id='msg_01Si8sTFJe8d8vq7enanbAwj', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 38; Out: 8; Cache create: 0; Cache read: 0; Total: 46)" ] }, "execution_count": null, @@ -272,19 +272,19 @@ "\n", "
\n", "\n", - "- id: `msg_01Qosij9Tbc9u82bTFmdoXpU`\n", + "- id: `msg_01BHWRoAX8eBsoLn2bzpBkvx`\n", "- content: `[{'text': 'Your name is Jeremy.', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 69, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 54, 'output_tokens': 8, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01Qosij9Tbc9u82bTFmdoXpU', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 69; Out: 8; Cache create: 0; Cache read: 0; Total: 77)" + "Message(id='msg_01BHWRoAX8eBsoLn2bzpBkvx', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 54; Out: 8; Cache create: 0; Cache read: 0; Total: 62)" ] }, "execution_count": null, @@ -315,7 +315,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Message(id='msg_01Qosij9Tbc9u82bTFmdoXpU', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 69; Out: 8; Cache create: 0; Cache read: 0; Total: 77)\n" + "Message(id='msg_01BHWRoAX8eBsoLn2bzpBkvx', content=[TextBlock(text='Your name is Jeremy.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 54; Out: 8; Cache create: 0; Cache read: 0; Total: 62)\n" ] } ], @@ -340,23 +340,23 @@ { "data": { "text/markdown": [ - "According to Douglas Adams, the meaning of life is 42. Philosophically, it's often considered to be finding purpose, happiness, and fulfillment in one's existence.\n", + "According to Douglas Adams,42. Philosophically, it's to find personal meaning through relationships, purpose, and experiences.\n", "\n", "
\n", "\n", - "- id: `msg_01MvA934wD5Ssyr3jhWTmV1G`\n", - "- content: `[{'text': \"According to Douglas Adams, the meaning of life is 42. Philosophically, it's often considered to be finding purpose, happiness, and fulfillment in one's existence.\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01R9RvMdFwea9iRX5uYSSHG7`\n", + "- content: `[{'text': \"According to Douglas Adams,42. Philosophically, it's to find personal meaning through relationships, purpose, and experiences.\", 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 97, 'output_tokens': 36, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 82, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01MvA934wD5Ssyr3jhWTmV1G', content=[TextBlock(text=\"According to Douglas Adams, the meaning of life is 42. Philosophically, it's often considered to be finding purpose, happiness, and fulfillment in one's existence.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 97; Out: 36; Cache create: 0; Cache read: 0; Total: 133)" + "Message(id='msg_01R9RvMdFwea9iRX5uYSSHG7', content=[TextBlock(text=\"According to Douglas Adams,42. Philosophically, it's to find personal meaning through relationships, purpose, and experiences.\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 82; Out: 23; Cache create: 0; Cache read: 0; Total: 105)" ] }, "execution_count": null, @@ -421,23 +421,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", + "Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?\n", "\n", "
\n", "\n", - "- id: `msg_018i1EFCqB2vHmNBvspg9eUZ`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_016Q8cdc3sPWBS8eXcNj841L`\n", + "- content: `[{'text': \"Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 31, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_018i1EFCqB2vHmNBvspg9eUZ', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Cache create: 0; Cache read: 0; Total: 46)" + "Message(id='msg_016Q8cdc3sPWBS8eXcNj841L', content=[TextBlock(text=\"Hi Jeremy! Nice to meet you. I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 31; Cache create: 0; Cache read: 0; Total: 41)" ] }, "execution_count": null, @@ -468,17 +468,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "According to Douglas Adams, the meaning of life is 42. More seriously, philosophers have debated this for millennia. Common answers include:\n", - "\n", - "1. Finding personal happiness\n", - "2. Serving others\n", - "3. Pursuing knowledge\n", - "4. Creating meaning through our choices\n", - "5. Fulfilling our potential\n", - "6. Connecting with others\n", - "7. Experiencing love and beauty\n", - "\n", - "Ultimately, many believe we must each find our own meaning." + "According to Douglas Adams, it's 42. But in my view, there's no single universal meaning - each person must find their own purpose through relationships, personal growth, contribution to others, and pursuit of what they find meaningful." ] } ], @@ -523,37 +513,33 @@ { "data": { "text/markdown": [ - "Based on the readme, the main purpose of the Claudette project is to provide a high-level wrapper around Anthropic's Python SDK for interacting with Claude AI models. Key features and goals include:\n", - "\n", - "1. Automating and simplifying interactions with Claude, reducing boilerplate code.\n", - "\n", - "2. Providing a stateful dialog interface through the `Chat` class.\n", - "\n", - "3. Supporting features like prefill (specifying the start of Claude's response) and image handling.\n", + "Claudette is a high-level wrapper for Anthropic's Python SDK that automates common tasks and provides additional functionality. Its main features include:\n", "\n", - "4. Offering convenient support for Claude's Tool Use API.\n", + "1. A Chat class for stateful dialogs\n", + "2. Support for prefill (controlling Claude's initial response words)\n", + "3. Convenient image handling\n", + "4. Simple tool use API integration\n", + "5. Support for multiple model providers (Anthropic, AWS Bedrock, Google Vertex)\n", "\n", - "5. Serving as an example of \"literate programming\", with the source code designed to be readable and educational, including explanations of how and why the code is written.\n", + "The project is notable for being the first \"literate nbdev\" project, meaning its source code is written as a detailed, readable Jupyter Notebook that includes explanations, examples, and teaching material alongside the functional code.\n", "\n", - "6. Supporting multiple model providers, including direct Anthropic API access as well as Claude models available through Amazon Bedrock and Google Vertex AI.\n", - "\n", - "The project aims to make working with Claude models more convenient and accessible for developers while also serving as an educational resource on how to effectively use and interact with these AI models.\n", + "The goal is to simplify working with Claude's API while maintaining full control, reducing boilerplate code and manual work that would otherwise be needed with the base SDK.\n", "\n", "
\n", "\n", - "- id: `msg_015khP4yqW57tH4qK6tGTkQr`\n", - "- content: `[{'text': 'Based on the readme, the main purpose of the Claudette project is to provide a high-level wrapper around Anthropic\\'s Python SDK for interacting with Claude AI models. Key features and goals include:\\n\\n1. Automating and simplifying interactions with Claude, reducing boilerplate code.\\n\\n2. Providing a stateful dialog interface through the `Chat` class.\\n\\n3. Supporting features like prefill (specifying the start of Claude\\'s response) and image handling.\\n\\n4. Offering convenient support for Claude\\'s Tool Use API.\\n\\n5. Serving as an example of \"literate programming\", with the source code designed to be readable and educational, including explanations of how and why the code is written.\\n\\n6. Supporting multiple model providers, including direct Anthropic API access as well as Claude models available through Amazon Bedrock and Google Vertex AI.\\n\\nThe project aims to make working with Claude models more convenient and accessible for developers while also serving as an educational resource on how to effectively use and interact with these AI models.', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_014rVQnYoZXZuyWUCMELG1QW`\n", + "- content: `[{'text': 'Claudette is a high-level wrapper for Anthropic\\'s Python SDK that automates common tasks and provides additional functionality. Its main features include:\\n\\n1. A Chat class for stateful dialogs\\n2. Support for prefill (controlling Claude\\'s initial response words)\\n3. Convenient image handling\\n4. Simple tool use API integration\\n5. Support for multiple model providers (Anthropic, AWS Bedrock, Google Vertex)\\n\\nThe project is notable for being the first \"literate nbdev\" project, meaning its source code is written as a detailed, readable Jupyter Notebook that includes explanations, examples, and teaching material alongside the functional code.\\n\\nThe goal is to simplify working with Claude\\'s API while maintaining full control, reducing boilerplate code and manual work that would otherwise be needed with the base SDK.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 4, 'output_tokens': 220, 'cache_creation_input_tokens': 7171, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 4, 'output_tokens': 179, 'cache_creation_input_tokens': 7205, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_015khP4yqW57tH4qK6tGTkQr', content=[TextBlock(text='Based on the readme, the main purpose of the Claudette project is to provide a high-level wrapper around Anthropic\\'s Python SDK for interacting with Claude AI models. Key features and goals include:\\n\\n1. Automating and simplifying interactions with Claude, reducing boilerplate code.\\n\\n2. Providing a stateful dialog interface through the `Chat` class.\\n\\n3. Supporting features like prefill (specifying the start of Claude\\'s response) and image handling.\\n\\n4. Offering convenient support for Claude\\'s Tool Use API.\\n\\n5. Serving as an example of \"literate programming\", with the source code designed to be readable and educational, including explanations of how and why the code is written.\\n\\n6. Supporting multiple model providers, including direct Anthropic API access as well as Claude models available through Amazon Bedrock and Google Vertex AI.\\n\\nThe project aims to make working with Claude models more convenient and accessible for developers while also serving as an educational resource on how to effectively use and interact with these AI models.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 4; Out: 220; Cache create: 7171; Cache read: 0; Total: 7395)" + "Message(id='msg_014rVQnYoZXZuyWUCMELG1QW', content=[TextBlock(text='Claudette is a high-level wrapper for Anthropic\\'s Python SDK that automates common tasks and provides additional functionality. Its main features include:\\n\\n1. A Chat class for stateful dialogs\\n2. Support for prefill (controlling Claude\\'s initial response words)\\n3. Convenient image handling\\n4. Simple tool use API integration\\n5. Support for multiple model providers (Anthropic, AWS Bedrock, Google Vertex)\\n\\nThe project is notable for being the first \"literate nbdev\" project, meaning its source code is written as a detailed, readable Jupyter Notebook that includes explanations, examples, and teaching material alongside the functional code.\\n\\nThe goal is to simplify working with Claude\\'s API while maintaining full control, reducing boilerplate code and manual work that would otherwise be needed with the base SDK.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 4; Out: 179; Cache create: 7205; Cache read: 0; Total: 7388)" ] }, "execution_count": null, @@ -589,7 +575,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Usage(input_tokens=4, output_tokens=220, cache_creation_input_tokens=7171, cache_read_input_tokens=0)\n" + "Usage(input_tokens=4, output_tokens=179, cache_creation_input_tokens=7205, cache_read_input_tokens=0)\n" ] } ], @@ -614,39 +600,47 @@ { "data": { "text/markdown": [ - "Claudette makes tool use more ergonomic in several ways:\n", + "According to the README, Claudette makes tool use more ergonomic in several ways:\n", "\n", - "1. Simplified function definition: It uses docments to make defining Python functions for tools as simple as possible. Each parameter and the return value should have a type and a description.\n", + "1. It uses docments to make Python function definitions more user-friendly - each parameter and return value should have a type and description\n", "\n", - "2. Automatic handling: The `Chat` class can be initialized with a list of tools, and Claudette handles the back-and-forth between Claude and the tools automatically.\n", + "2. It handles the tool calling process automatically - when Claude returns a tool_use message, Claudette manages calling the tool with the provided parameters behind the scenes\n", "\n", - "3. Single-step execution: The `Chat.toolloop` method allows for executing a series of tool calls in a single step, even if multiple tools are needed to solve a problem.\n", + "3. It provides a `toolloop` method that can handle multiple tool calls in a single step to solve more complex problems\n", "\n", - "4. Forced tool use: You can set `tool_choice` to force Claude to always answer using a specific tool.\n", + "4. It allows you to pass a list of tools to the Chat constructor and optionally force Claude to always use a specific tool via `tool_choice`\n", "\n", - "5. Tracing: The `toolloop` method supports a `trace_func` parameter, allowing you to see each response from Claude during the process.\n", + "Here's a simple example from the README:\n", "\n", - "6. Automatic parameter passing: When Claude decides to use a tool, Claudette automatically calls the tool with the provided parameters.\n", + "```python\n", + "def sums(\n", + " a:int, # First thing to sum \n", + " b:int=1 # Second thing to sum\n", + ") -> int: # The sum of the inputs\n", + " \"Adds a + b.\"\n", + " print(f\"Finding the sum of {a} and {b}\")\n", + " return a + b\n", "\n", - "7. System prompt integration: It allows setting a system prompt to guide Claude's behavior when using tools, such as instructing it not to mention the tools it's using.\n", + "chat = Chat(model, sp=sp, tools=[sums], tool_choice='sums')\n", + "```\n", "\n", - "These features significantly reduce the amount of code and manual handling required to use Claude's tool use capabilities, making the process more streamlined and developer-friendly.\n", + "This makes it much simpler compared to manually handling all the tool use logic that would be required with the base SDK.\n", "\n", "
\n", "\n", - "- id: `msg_01B4KHLHzM6MUnRgiB3tZ1m5`\n", - "- content: `[{'text': \"Claudette makes tool use more ergonomic in several ways:\\n\\n1. Simplified function definition: It uses docments to make defining Python functions for tools as simple as possible. Each parameter and the return value should have a type and a description.\\n\\n2. Automatic handling: The `Chat` class can be initialized with a list of tools, and Claudette handles the back-and-forth between Claude and the tools automatically.\\n\\n3. Single-step execution: The `Chat.toolloop` method allows for executing a series of tool calls in a single step, even if multiple tools are needed to solve a problem.\\n\\n4. Forced tool use: You can set `tool_choice` to force Claude to always answer using a specific tool.\\n\\n5. Tracing: The `toolloop` method supports a `trace_func` parameter, allowing you to see each response from Claude during the process.\\n\\n6. Automatic parameter passing: When Claude decides to use a tool, Claudette automatically calls the tool with the provided parameters.\\n\\n7. System prompt integration: It allows setting a system prompt to guide Claude's behavior when using tools, such as instructing it not to mention the tools it's using.\\n\\nThese features significantly reduce the amount of code and manual handling required to use Claude's tool use capabilities, making the process more streamlined and developer-friendly.\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01EdUvvFBnpPxMtdLRCaSZAU`\n", + "- content: `[{'text': 'According to the README, Claudette makes tool use more ergonomic in several ways:\\n\\n1. It uses docments to make Python function definitions more user-friendly - each parameter and return value should have a type and description\\n\\n2. It handles the tool calling process automatically - when Claude returns a tool_use message, Claudette manages calling the tool with the provided parameters behind the scenes\\n\\n3. It provides a `toolloop` method that can handle multiple tool calls in a single step to solve more complex problems\\n\\n4. It allows you to pass a list of tools to the Chat constructor and optionally force Claude to always use a specific tool via `tool_choice`\\n\\nHere\\'s a simple example from the README:\\n\\n```python\\ndef sums(\\n a:int, # First thing to sum \\n b:int=1 # Second thing to sum\\n) -> int: # The sum of the inputs\\n \"Adds a + b.\"\\n print(f\"Finding the sum of {a} and {b}\")\\n return a + b\\n\\nchat = Chat(model, sp=sp, tools=[sums], tool_choice=\\'sums\\')\\n```\\n\\nThis makes it much simpler compared to manually handling all the tool use logic that would be required with the base SDK.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 240, 'output_tokens': 289, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 7171}`\n", + "- usage: `{'input_tokens': 197, 'output_tokens': 280, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 7205}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01B4KHLHzM6MUnRgiB3tZ1m5', content=[TextBlock(text=\"Claudette makes tool use more ergonomic in several ways:\\n\\n1. Simplified function definition: It uses docments to make defining Python functions for tools as simple as possible. Each parameter and the return value should have a type and a description.\\n\\n2. Automatic handling: The `Chat` class can be initialized with a list of tools, and Claudette handles the back-and-forth between Claude and the tools automatically.\\n\\n3. Single-step execution: The `Chat.toolloop` method allows for executing a series of tool calls in a single step, even if multiple tools are needed to solve a problem.\\n\\n4. Forced tool use: You can set `tool_choice` to force Claude to always answer using a specific tool.\\n\\n5. Tracing: The `toolloop` method supports a `trace_func` parameter, allowing you to see each response from Claude during the process.\\n\\n6. Automatic parameter passing: When Claude decides to use a tool, Claudette automatically calls the tool with the provided parameters.\\n\\n7. System prompt integration: It allows setting a system prompt to guide Claude's behavior when using tools, such as instructing it not to mention the tools it's using.\\n\\nThese features significantly reduce the amount of code and manual handling required to use Claude's tool use capabilities, making the process more streamlined and developer-friendly.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 240; Out: 289; Cache create: 0; Cache read: 7171; Total: 7700)" + "Message(id='msg_01EdUvvFBnpPxMtdLRCaSZAU', content=[TextBlock(text='According to the README, Claudette makes tool use more ergonomic in several ways:\\n\\n1. It uses docments to make Python function definitions more user-friendly - each parameter and return value should have a type and description\\n\\n2. It handles the tool calling process automatically - when Claude returns a tool_use message, Claudette manages calling the tool with the provided parameters behind the scenes\\n\\n3. It provides a `toolloop` method that can handle multiple tool calls in a single step to solve more complex problems\\n\\n4. It allows you to pass a list of tools to the Chat constructor and optionally force Claude to always use a specific tool via `tool_choice`\\n\\nHere\\'s a simple example from the README:\\n\\n```python\\ndef sums(\\n a:int, # First thing to sum \\n b:int=1 # Second thing to sum\\n) -> int: # The sum of the inputs\\n \"Adds a + b.\"\\n print(f\"Finding the sum of {a} and {b}\")\\n return a + b\\n\\nchat = Chat(model, sp=sp, tools=[sums], tool_choice=\\'sums\\')\\n```\\n\\nThis makes it much simpler compared to manually handling all the tool use logic that would be required with the base SDK.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 197; Out: 280; Cache create: 0; Cache read: 7205; Total: 7682)" ] }, "execution_count": null, @@ -677,7 +671,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Usage(input_tokens=240, output_tokens=289, cache_creation_input_tokens=0, cache_read_input_tokens=7171)\n" + "Usage(input_tokens=197, output_tokens=280, cache_creation_input_tokens=0, cache_read_input_tokens=7205)\n" ] } ], @@ -694,7 +688,7 @@ { "data": { "text/plain": [ - "In: 244; Out: 509; Cache create: 7171; Cache read: 7171; Total: 15095" + "In: 201; Out: 459; Cache create: 7205; Cache read: 7205; Total: 15070" ] }, "execution_count": null, @@ -794,7 +788,7 @@ "id": "a0a0b65e", "metadata": {}, "source": [ - "To use tools, pass a list of them to `Chat`, and to force it to always answer using a tool, set `tool_choice` to that function name:" + "To use tools, pass a list of them to `Chat`:" ] }, { @@ -804,7 +798,7 @@ "metadata": {}, "outputs": [], "source": [ - "chat = Chat(model, sp=sp, tools=[sums], tool_choice='sums')" + "chat = Chat(model, sp=sp, tools=[sums])" ] }, { @@ -812,7 +806,7 @@ "id": "4a9d219f", "metadata": {}, "source": [ - "Now when we call that with our prompt, Claude doesn't return the answer, but instead returns a `tool_use` message, which means we have to call the named tool with the provided parameters:" + "To force Claude to always answer using a tool, set `tool_choice` to that function name. When Claude needs to use a tool, it doesn't return the answer, but instead returns a `tool_use` message, which means we have to call the named tool with the provided parameters." ] }, { @@ -831,23 +825,23 @@ { "data": { "text/markdown": [ - "ToolUseBlock(id='toolu_01SCB8X3NpcucTBQcHCtRxuZ', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", + "ToolUseBlock(id='toolu_014ip2xWyEq8RnAccVT4SySt', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')\n", "\n", "
\n", "\n", - "- id: `msg_01J1fmKt3wwMVrbBxoZANA9X`\n", - "- content: `[{'id': 'toolu_01SCB8X3NpcucTBQcHCtRxuZ', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_014xrPyotyiBmFSctkp1LZHk`\n", + "- content: `[{'id': 'toolu_014ip2xWyEq8RnAccVT4SySt', 'input': {'a': 604542, 'b': 6458932}, 'name': 'sums', 'type': 'tool_use'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `tool_use`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 414, 'output_tokens': 53, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 442, 'output_tokens': 53, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01J1fmKt3wwMVrbBxoZANA9X', content=[ToolUseBlock(id='toolu_01SCB8X3NpcucTBQcHCtRxuZ', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 414; Out: 53; Cache create: 0; Cache read: 0; Total: 467)" + "Message(id='msg_014xrPyotyiBmFSctkp1LZHk', content=[ToolUseBlock(id='toolu_014ip2xWyEq8RnAccVT4SySt', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 442; Out: 53; Cache create: 0; Cache read: 0; Total: 495)" ] }, "execution_count": null, @@ -856,7 +850,7 @@ } ], "source": [ - "r = chat(pr)\n", + "r = chat(pr, tool_choice='sums')\n", "r" ] }, @@ -881,19 +875,19 @@ "\n", "
\n", "\n", - "- id: `msg_01R4SGHWELZeSYkGyrK2A2Cn`\n", + "- id: `msg_01151puJxG8Fa6k6QSmzwKQA`\n", "- content: `[{'text': 'The sum of 604542 and 6458932 is 7063474.', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 515, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 524, 'output_tokens': 23, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01R4SGHWELZeSYkGyrK2A2Cn', content=[TextBlock(text='The sum of 604542 and 6458932 is 7063474.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 515; Out: 23; Cache create: 0; Cache read: 0; Total: 538)" + "Message(id='msg_01151puJxG8Fa6k6QSmzwKQA', content=[TextBlock(text='The sum of 604542 and 6458932 is 7063474.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 524; Out: 23; Cache create: 0; Cache read: 0; Total: 547)" ] }, "execution_count": null, @@ -922,7 +916,7 @@ { "data": { "text/plain": [ - "In: 929; Out: 76; Cache create: 0; Cache read: 0; Total: 1005" + "In: 966; Out: 76; Cache create: 0; Cache read: 0; Total: 1042" ] }, "execution_count": null, @@ -1000,40 +994,32 @@ "output_type": "stream", "text": [ "Finding the sum of 604542 and 6458932\n", - "Message(id='msg_01BidPp2g3FuMLzFJd7jHDeb', content=[TextBlock(text='Certainly! To calculate (604542+6458932)*2, we\\'ll need to use the available tools to perform the addition and multiplication operations. Let\\'s break it down step by step:\\n\\n1. First, we\\'ll add 604542 and 6458932 using the \"sums\" function.\\n2. Then, we\\'ll multiply the result by 2 using the \"mults\" function.\\n\\nLet\\'s start with the addition:', type='text'), ToolUseBlock(id='toolu_017v8XraNE8sEaErP3SqwWw2', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 538; Out: 168; Cache create: 0; Cache read: 0; Total: 706)\n", + "[{'role': 'user', 'content': [{'type': 'text', 'text': 'Calculate (604542+6458932)*2'}]}, {'role': 'assistant', 'content': [TextBlock(text=\"I'll help you break this down into steps:\\n\\nFirst, let's add those numbers:\", type='text'), ToolUseBlock(id='toolu_01St5UKxYUU4DKC96p2PjgcD', input={'a': 604542, 'b': 6458932}, name='sums', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01St5UKxYUU4DKC96p2PjgcD', 'content': '7063474'}]}]\n", "Finding the product of 7063474 and 2\n", - "Message(id='msg_01XpkGk396hzw5zS8qfC6zb5', content=[TextBlock(text=\"Great! The sum of 604542 and 6458932 is 7063474.\\n\\nNow, let's multiply this result by 2:\", type='text'), ToolUseBlock(id='toolu_012R3kQMdwT75GtbzWjfXL3k', input={'a': 7063474, 'b': 2}, name='mults', type='tool_use')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='tool_use', stop_sequence=None, type='message', usage=In: 721; Out: 106; Cache create: 0; Cache read: 0; Total: 827)\n", - "Message(id='msg_018bqP3PhfdcP5N7KKyTSLzF', content=[TextBlock(text='Now we have our final result. \\n\\nThe calculation (604542+6458932)*2 equals 14126948.\\n\\nTo break it down:\\n1. 604542 + 6458932 = 7063474\\n2. 7063474 * 2 = 14126948\\n\\nSo, the final answer to (604542+6458932)*2 is 14126948.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 841; Out: 95; Cache create: 0; Cache read: 0; Total: 936)\n" + "[{'role': 'assistant', 'content': [TextBlock(text=\"Now, let's multiply this result by 2:\", type='text'), ToolUseBlock(id='toolu_01FpmRG4ZskKEWN1gFZzx49s', input={'a': 7063474, 'b': 2}, name='mults', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_01FpmRG4ZskKEWN1gFZzx49s', 'content': '14126948'}]}]\n", + "[{'role': 'assistant', 'content': [TextBlock(text='The final result is 14,126,948.', type='text')]}]\n" ] }, { "data": { "text/markdown": [ - "Now we have our final result. \n", - "\n", - "The calculation (604542+6458932)*2 equals 14126948.\n", - "\n", - "To break it down:\n", - "1. 604542 + 6458932 = 7063474\n", - "2. 7063474 * 2 = 14126948\n", - "\n", - "So, the final answer to (604542+6458932)*2 is 14126948.\n", + "The final result is 14,126,948.\n", "\n", "
\n", "\n", - "- id: `msg_018bqP3PhfdcP5N7KKyTSLzF`\n", - "- content: `[{'text': 'Now we have our final result. \\n\\nThe calculation (604542+6458932)*2 equals 14126948.\\n\\nTo break it down:\\n1. 604542 + 6458932 = 7063474\\n2. 7063474 * 2 = 14126948\\n\\nSo, the final answer to (604542+6458932)*2 is 14126948.', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_0162teyBcJHriUzZXMPz4r5d`\n", + "- content: `[{'text': 'The final result is 14,126,948.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 841, 'output_tokens': 95, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 741, 'output_tokens': 15, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_018bqP3PhfdcP5N7KKyTSLzF', content=[TextBlock(text='Now we have our final result. \\n\\nThe calculation (604542+6458932)*2 equals 14126948.\\n\\nTo break it down:\\n1. 604542 + 6458932 = 7063474\\n2. 7063474 * 2 = 14126948\\n\\nSo, the final answer to (604542+6458932)*2 is 14126948.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 841; Out: 95; Cache create: 0; Cache read: 0; Total: 936)" + "Message(id='msg_0162teyBcJHriUzZXMPz4r5d', content=[TextBlock(text='The final result is 14,126,948.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 741; Out: 15; Cache create: 0; Cache read: 0; Total: 756)" ] }, "execution_count": null, @@ -1155,7 +1141,7 @@ { "data": { "text/plain": [ - "[__main__.President(first='Thomas', last='Jefferson', spouse='Martha Wayles Skelton', years_in_office='1801-1809', birthplace='Shadwell, Virginia', birth_year=1743)]" + "[President(first='Thomas', last='Jefferson', spouse='Martha Wayles', years_in_office='1801-1809', birthplace='Shadwell', birth_year=1743)]" ] }, "execution_count": null, @@ -1263,23 +1249,23 @@ { "data": { "text/markdown": [ - "The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple daisy, blooming in the background behind the adorable puppy in the foreground.\n", + "In this adorable puppy photo, there are purple/lavender colored flowers (appears to be asters or similar daisy-like flowers) in the background.\n", "\n", "
\n", "\n", - "- id: `msg_01Wq2UqWLrQhWmmcuS7Dd8aL`\n", - "- content: `[{'text': 'The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple daisy, blooming in the background behind the adorable puppy in the foreground.', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01LHjGv1WwFvDsWUbyLmTEKT`\n", + "- content: `[{'text': 'In this adorable puppy photo, there are purple/lavender colored flowers (appears to be asters or similar daisy-like flowers) in the background.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 110, 'output_tokens': 50, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 110, 'output_tokens': 37, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01Wq2UqWLrQhWmmcuS7Dd8aL', content=[TextBlock(text='The flowers in this image are purple. They appear to be small, daisy-like flowers, possibly asters or some type of purple daisy, blooming in the background behind the adorable puppy in the foreground.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 50; Cache create: 0; Cache read: 0; Total: 160)" + "Message(id='msg_01LHjGv1WwFvDsWUbyLmTEKT', content=[TextBlock(text='In this adorable puppy photo, there are purple/lavender colored flowers (appears to be asters or similar daisy-like flowers) in the background.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 110; Out: 37; Cache create: 0; Cache read: 0; Total: 147)" ] }, "execution_count": null, @@ -1308,7 +1294,7 @@ { "data": { "text/plain": [ - "In: 110; Out: 50; Cache create: 0; Cache read: 0; Total: 160" + "In: 110; Out: 37; Cache create: 0; Cache read: 0; Total: 147" ] }, "execution_count": null, @@ -1337,29 +1323,23 @@ { "data": { "text/markdown": [ - "This image shows an adorable puppy lying in the grass. The puppy appears to be a Cavalier King Charles Spaniel or a similar breed, with distinctive white and reddish-brown fur coloring. Its face is predominantly white with large, expressive dark eyes and a small black nose.\n", - "\n", - "The puppy is resting on a grassy surface, and behind it, you can see some purple flowers, which look like asters or michaelmas daisies. These flowers add a lovely splash of color to the background. There's also what seems to be a wooden structure or fence visible behind the puppy, giving the scene a rustic, garden-like feel.\n", - "\n", - "The composition of the image is quite charming, with the puppy as the main focus in the foreground and the natural elements providing a beautiful, colorful backdrop. The lighting appears soft, highlighting the puppy's fur and giving the whole image a warm, inviting atmosphere.\n", - "\n", - "This kind of image would be perfect for a greeting card, calendar, or as a heartwarming pet portrait. It captures the innocence and cuteness of a young dog in a picturesque outdoor setting.\n", + "What an adorable Cavalier King Charles Spaniel puppy! The photo captures the classic brown and white coloring of the breed, with those soulful dark eyes that are so characteristic. The puppy is lying in the grass, and there are lovely purple asters blooming in the background, creating a beautiful natural setting. The combination of the puppy's sweet expression and the delicate flowers makes for a charming composition. Cavalier King Charles Spaniels are known for their gentle, affectionate nature, and this little one certainly seems to embody those traits with its endearing look.\n", "\n", "
\n", "\n", - "- id: `msg_015NoQzCLM5ofbZTCxDPmWAT`\n", - "- content: `[{'text': \"This image shows an adorable puppy lying in the grass. The puppy appears to be a Cavalier King Charles Spaniel or a similar breed, with distinctive white and reddish-brown fur coloring. Its face is predominantly white with large, expressive dark eyes and a small black nose.\\n\\nThe puppy is resting on a grassy surface, and behind it, you can see some purple flowers, which look like asters or michaelmas daisies. These flowers add a lovely splash of color to the background. There's also what seems to be a wooden structure or fence visible behind the puppy, giving the scene a rustic, garden-like feel.\\n\\nThe composition of the image is quite charming, with the puppy as the main focus in the foreground and the natural elements providing a beautiful, colorful backdrop. The lighting appears soft, highlighting the puppy's fur and giving the whole image a warm, inviting atmosphere.\\n\\nThis kind of image would be perfect for a greeting card, calendar, or as a heartwarming pet portrait. It captures the innocence and cuteness of a young dog in a picturesque outdoor setting.\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01Ciyymq44uwp2iYwRZdKWNN`\n", + "- content: `[{'text': \"What an adorable Cavalier King Charles Spaniel puppy! The photo captures the classic brown and white coloring of the breed, with those soulful dark eyes that are so characteristic. The puppy is lying in the grass, and there are lovely purple asters blooming in the background, creating a beautiful natural setting. The combination of the puppy's sweet expression and the delicate flowers makes for a charming composition. Cavalier King Charles Spaniels are known for their gentle, affectionate nature, and this little one certainly seems to embody those traits with its endearing look.\", 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 98, 'output_tokens': 248, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 98, 'output_tokens': 130, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_015NoQzCLM5ofbZTCxDPmWAT', content=[TextBlock(text=\"This image shows an adorable puppy lying in the grass. The puppy appears to be a Cavalier King Charles Spaniel or a similar breed, with distinctive white and reddish-brown fur coloring. Its face is predominantly white with large, expressive dark eyes and a small black nose.\\n\\nThe puppy is resting on a grassy surface, and behind it, you can see some purple flowers, which look like asters or michaelmas daisies. These flowers add a lovely splash of color to the background. There's also what seems to be a wooden structure or fence visible behind the puppy, giving the scene a rustic, garden-like feel.\\n\\nThe composition of the image is quite charming, with the puppy as the main focus in the foreground and the natural elements providing a beautiful, colorful backdrop. The lighting appears soft, highlighting the puppy's fur and giving the whole image a warm, inviting atmosphere.\\n\\nThis kind of image would be perfect for a greeting card, calendar, or as a heartwarming pet portrait. It captures the innocence and cuteness of a young dog in a picturesque outdoor setting.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 98; Out: 248; Cache create: 0; Cache read: 0; Total: 346)" + "Message(id='msg_01Ciyymq44uwp2iYwRZdKWNN', content=[TextBlock(text=\"What an adorable Cavalier King Charles Spaniel puppy! The photo captures the classic brown and white coloring of the breed, with those soulful dark eyes that are so characteristic. The puppy is lying in the grass, and there are lovely purple asters blooming in the background, creating a beautiful natural setting. The combination of the puppy's sweet expression and the delicate flowers makes for a charming composition. Cavalier King Charles Spaniels are known for their gentle, affectionate nature, and this little one certainly seems to embody those traits with its endearing look.\", type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 98; Out: 130; Cache create: 0; Cache read: 0; Total: 228)" ] }, "execution_count": null, @@ -1381,23 +1361,23 @@ { "data": { "text/markdown": [ - "The puppy in the image is facing towards the left side of the frame. Its head is turned slightly, allowing us to see most of its face, including both eyes, which are looking directly at the camera. The puppy's body is angled diagonally, with its front paws and chest visible as it rests on the grass. This positioning gives a good view of the puppy's facial features and part of its body, creating an engaging and endearing portrait of the young dog.\n", + "The puppy is facing towards the left side of the image. Its head is positioned so we can see its right side profile, though it appears to be looking slightly towards the camera, giving us a good view of its distinctive brown and white facial markings and one of its dark eyes. The puppy is lying down with its white chest/front visible against the green grass.\n", "\n", "
\n", "\n", - "- id: `msg_018bjcun7oQyBLtn3eMi1nHU`\n", - "- content: `[{'text': \"The puppy in the image is facing towards the left side of the frame. Its head is turned slightly, allowing us to see most of its face, including both eyes, which are looking directly at the camera. The puppy's body is angled diagonally, with its front paws and chest visible as it rests on the grass. This positioning gives a good view of the puppy's facial features and part of its body, creating an engaging and endearing portrait of the young dog.\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01AeR9eWjbxa788YF97iErtN`\n", + "- content: `[{'text': 'The puppy is facing towards the left side of the image. Its head is positioned so we can see its right side profile, though it appears to be looking slightly towards the camera, giving us a good view of its distinctive brown and white facial markings and one of its dark eyes. The puppy is lying down with its white chest/front visible against the green grass.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 357, 'output_tokens': 105, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 239, 'output_tokens': 79, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_018bjcun7oQyBLtn3eMi1nHU', content=[TextBlock(text=\"The puppy in the image is facing towards the left side of the frame. Its head is turned slightly, allowing us to see most of its face, including both eyes, which are looking directly at the camera. The puppy's body is angled diagonally, with its front paws and chest visible as it rests on the grass. This positioning gives a good view of the puppy's facial features and part of its body, creating an engaging and endearing portrait of the young dog.\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 357; Out: 105; Cache create: 0; Cache read: 0; Total: 462)" + "Message(id='msg_01AeR9eWjbxa788YF97iErtN', content=[TextBlock(text='The puppy is facing towards the left side of the image. Its head is positioned so we can see its right side profile, though it appears to be looking slightly towards the camera, giving us a good view of its distinctive brown and white facial markings and one of its dark eyes. The puppy is lying down with its white chest/front visible against the green grass.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 239; Out: 79; Cache create: 0; Cache read: 0; Total: 318)" ] }, "execution_count": null, @@ -1418,29 +1398,23 @@ { "data": { "text/markdown": [ - "The puppy in the image has a combination of two main colors:\n", - "\n", - "1. White: The majority of its face, including the area around its eyes, muzzle, and part of its chest, is white.\n", - "\n", - "2. Reddish-brown (often called \"ruby\" or \"chestnut\" in dog breed descriptions): This color appears on its ears and patches on its body.\n", - "\n", - "This color combination is typical of Cavalier King Charles Spaniels, particularly the Blenheim variety, though without being able to see the full body, it's hard to confirm the exact breed. The contrast between the white and reddish-brown fur creates a striking and adorable appearance for the puppy.\n", + "The puppy has a classic Cavalier King Charles Spaniel coat with a rich chestnut brown (sometimes called Blenheim) coloring on its ears and patches on its face, combined with a bright white base color. The white is particularly prominent on its face (creating a distinctive blaze down the center) and chest area. This brown and white combination is one of the most recognizable color patterns for the breed.\n", "\n", "
\n", "\n", - "- id: `msg_01T4JvKPNT9a9iWXachmszAU`\n", - "- content: `[{'text': 'The puppy in the image has a combination of two main colors:\\n\\n1. White: The majority of its face, including the area around its eyes, muzzle, and part of its chest, is white.\\n\\n2. Reddish-brown (often called \"ruby\" or \"chestnut\" in dog breed descriptions): This color appears on its ears and patches on its body.\\n\\nThis color combination is typical of Cavalier King Charles Spaniels, particularly the Blenheim variety, though without being able to see the full body, it\\'s hard to confirm the exact breed. The contrast between the white and reddish-brown fur creates a striking and adorable appearance for the puppy.', 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_01R91AqXG7pLc8hK24F5mc7x`\n", + "- content: `[{'text': 'The puppy has a classic Cavalier King Charles Spaniel coat with a rich chestnut brown (sometimes called Blenheim) coloring on its ears and patches on its face, combined with a bright white base color. The white is particularly prominent on its face (creating a distinctive blaze down the center) and chest area. This brown and white combination is one of the most recognizable color patterns for the breed.', 'type': 'text'}]`\n", + "- model: `claude-3-5-sonnet-20241022`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 470, 'output_tokens': 154, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", + "- usage: `{'input_tokens': 326, 'output_tokens': 92, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_01T4JvKPNT9a9iWXachmszAU', content=[TextBlock(text='The puppy in the image has a combination of two main colors:\\n\\n1. White: The majority of its face, including the area around its eyes, muzzle, and part of its chest, is white.\\n\\n2. Reddish-brown (often called \"ruby\" or \"chestnut\" in dog breed descriptions): This color appears on its ears and patches on its body.\\n\\nThis color combination is typical of Cavalier King Charles Spaniels, particularly the Blenheim variety, though without being able to see the full body, it\\'s hard to confirm the exact breed. The contrast between the white and reddish-brown fur creates a striking and adorable appearance for the puppy.', type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 470; Out: 154; Cache create: 0; Cache read: 0; Total: 624)" + "Message(id='msg_01R91AqXG7pLc8hK24F5mc7x', content=[TextBlock(text='The puppy has a classic Cavalier King Charles Spaniel coat with a rich chestnut brown (sometimes called Blenheim) coloring on its ears and patches on its face, combined with a bright white base color. The white is particularly prominent on its face (creating a distinctive blaze down the center) and chest area. This brown and white combination is one of the most recognizable color patterns for the breed.', type='text')], model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 326; Out: 92; Cache create: 0; Cache read: 0; Total: 418)" ] }, "execution_count": null, @@ -1469,7 +1443,7 @@ { "data": { "text/plain": [ - "In: 925; Out: 507; Cache create: 0; Cache read: 0; Total: 1432" + "In: 663; Out: 301; Cache create: 0; Cache read: 0; Total: 964" ] }, "execution_count": null, @@ -1522,10 +1496,10 @@ { "data": { "text/plain": [ - "('anthropic.claude-3-haiku-20240307-v1:0',\n", + "['anthropic.claude-3-opus-20240229-v1:0',\n", + " 'anthropic.claude-3-5-sonnet-20241022-v2:0',\n", " 'anthropic.claude-3-sonnet-20240229-v1:0',\n", - " 'anthropic.claude-3-opus-20240229-v1:0',\n", - " 'anthropic.claude-3-5-sonnet-20240620-v1:0')" + " 'anthropic.claude-3-haiku-20240307-v1:0']" ] }, "execution_count": null, @@ -1586,23 +1560,23 @@ { "data": { "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", + "It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?\n", "\n", "
\n", "\n", - "- id: `msg_bdrk_01VFVE1Pe5LNubaWYKC1sz8f`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", + "- id: `msg_bdrk_01V3B5RF2Pyzmh3NeR8xMMpq`\n", + "- content: `[{'text': \"It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", 'type': 'text'}]`\n", + "- model: `claude-3-haiku-20240307`\n", "- role: `assistant`\n", "- stop_reason: `end_turn`\n", "- stop_sequence: `None`\n", "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36}`\n", + "- usage: `{'input_tokens': 10, 'output_tokens': 32}`\n", "\n", "
" ], "text/plain": [ - "Message(id='msg_bdrk_01VFVE1Pe5LNubaWYKC1sz8f', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" + "Message(id='msg_bdrk_01V3B5RF2Pyzmh3NeR8xMMpq', content=[TextBlock(text=\"It's nice to meet you, Jeremy! I'm Claude, an AI assistant created by Anthropic. How can I help you today?\", type='text')], model='claude-3-haiku-20240307', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 32; Cache create: 0; Cache read: 0; Total: 42)" ] }, "execution_count": null, @@ -1640,10 +1614,10 @@ { "data": { "text/plain": [ - "('claude-3-haiku@20240307',\n", + "['claude-3-opus@20240229',\n", + " 'claude-3-5-sonnet-v2@20241022',\n", " 'claude-3-sonnet@20240229',\n", - " 'claude-3-opus@20240229',\n", - " 'claude-3-5-sonnet@20240620')" + " 'claude-3-haiku@20240307']" ] }, "execution_count": null, @@ -1691,34 +1665,7 @@ "execution_count": null, "id": "7d0481b5", "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\n", - "\n", - "
\n", - "\n", - "- id: `msg_vrtx_01P251BUJXBBvihsvb3VVgZ3`\n", - "- content: `[{'text': \"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", 'type': 'text'}]`\n", - "- model: `claude-3-5-sonnet-20240620`\n", - "- role: `assistant`\n", - "- stop_reason: `end_turn`\n", - "- stop_sequence: `None`\n", - "- type: `message`\n", - "- usage: `{'input_tokens': 10, 'output_tokens': 36}`\n", - "\n", - "
" - ], - "text/plain": [ - "Message(id='msg_vrtx_01P251BUJXBBvihsvb3VVgZ3', content=[TextBlock(text=\"Hello Jeremy! It's nice to meet you. How can I assist you today? Is there anything specific you'd like to talk about or any questions you have?\", type='text')], model='claude-3-5-sonnet-20240620', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=In: 10; Out: 36; Total: 46)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "chat = Chat(cli=client)\n", "chat(\"I'm Jeremy\")"