diff --git a/10_MultipleModels/00_llm_multiple_models_w_bedrock_litellm.ipynb b/10_MultipleModels/00_llm_multiple_models_w_bedrock_litellm.ipynb new file mode 100644 index 00000000..88f3e5b8 --- /dev/null +++ b/10_MultipleModels/00_llm_multiple_models_w_bedrock_litellm.ipynb @@ -0,0 +1,149 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Switch between multiple Bedrock Deployments\n", + "\n", + "## Introduction\n", + "\n", + "In this notebook, we will go over how to call multiple Bedrock models + instances using [LiteLLM](https://github.com/BerriAI/litellm).\n", + "\n", + "Bedrock provides support for:\n", + "\n", + "\n", + "* Anthropic\n", + "* Cohere\n", + "* AI21\n", + "* Amazon Titan\n", + "\n", + "\n", + "Each of these providers have different param names, prompt formats, etc. We will use LiteLLM to handle the translation for us." + ], + "metadata": { + "id": "yFs6uSIXXUYc" + } + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "VFFJDVnYTtuD" + }, + "outputs": [], + "source": [ + "!pip install litellm boto3" + ] + }, + { + "cell_type": "markdown", + "source": [ + "# Switch Models\n", + "\n", + "LiteLLM handles the prompt formatting, param translation, etc. Letting you switch between Claude, Cohere, AI21 and Titan on Bedrock.[Docs](https://docs.litellm.ai/docs/providers/bedrock#passing-credentials-as-parameters---completion)" + ], + "metadata": { + "id": "TtGZTM5SUsYg" + } + }, + { + "cell_type": "code", + "source": [ + "import litellm\n", + "\n", + "print(litellm.bedrock_models)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "1_4s32mBVG9A", + "outputId": "1eec15cb-4514-4e6c-daab-16a0a7b74fbc" + }, + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "['ai21.j2-mid-v1', 'ai21.j2-ultra-v1', 'amazon.titan-text-lite-v1', 'amazon.titan-text-express-v1', 'anthropic.claude-v1', 'anthropic.claude-v2', 'anthropic.claude-instant-v1', 'cohere.command-text-v14']\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "from litellm import completion\n", + "\n", + "os.environ[\"AWS_ACCESS_KEY_ID\"] = \"\"\n", + "os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"\"\n", + "os.environ[\"AWS_REGION_NAME\"] = \"\"\n", + "\n", + "for model in litellm.bedrock_models:\n", + " response = completion(\n", + " model=model,\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}])\n", + " print(response)" + ], + "metadata": { + "id": "_65TH74JT-O2" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Switch Accounts / Regions\n", + "\n", + "You can also pass in your credentials as part of the `completion()` call, letting you switch between accounts / regions. [Docs](https://docs.litellm.ai/docs/providers/bedrock#passing-credentials-as-parameters---completion)" + ], + "metadata": { + "id": "8f9EAJMjVTEH" + } + }, + { + "cell_type": "code", + "source": [ + "accounts = [{\n", + " \"aws_access_key_id\": \"\",\n", + " \"aws_secret_access_key\": \"\",\n", + " \"aws_region_name\": \"\"\n", + "}, {\n", + " \"aws_access_key_id\": \"\",\n", + " \"aws_secret_access_key\": \"\",\n", + " \"aws_region_name\": \"\"\n", + "}]\n", + "\n", + "for item in accounts:\n", + " response = completion(\n", + " model=\"anthropic.claude-instant-v1\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " **item)\n", + "\n", + " print(response)" + ], + "metadata": { + "id": "yIYhzfccVWDM" + }, + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file