From 0e79bdc09c394dea5560fc951d81347376c7bacf Mon Sep 17 00:00:00 2001 From: Robert Haase Date: Fri, 11 Oct 2024 13:44:17 +0200 Subject: [PATCH] added kisski endpoint --- .../15_endpoint_apis/06_kisski_endpoint.ipynb | 168 ++++++++++++++++++ docs/_toc.yml | 1 + 2 files changed, 169 insertions(+) create mode 100644 docs/15_endpoint_apis/06_kisski_endpoint.ipynb diff --git a/docs/15_endpoint_apis/06_kisski_endpoint.ipynb b/docs/15_endpoint_apis/06_kisski_endpoint.ipynb new file mode 100644 index 0000000..74bda4f --- /dev/null +++ b/docs/15_endpoint_apis/06_kisski_endpoint.ipynb @@ -0,0 +1,168 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "87404224-b84b-409f-8683-c4a243d29722", + "metadata": {}, + "source": [ + "# KISSKI / GWDG endpoint\n", + "In this notebook we will use the [KISSKI LLM service](https://kisski.gwdg.de/leistungen/2-02-llm-service/) infrastructure. KISSKI is the German AI Service Center for Sensible and Critical Infrastructure. Before you can access it, you need to create an API key by filling out [this form](https://services.kisski.de/services/en/service/?service=2-02-llm-service.json); make sure to check the box \"API access to our chat service\". You will see that also this method uses the OpenAI API and we change the `base_url`." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "752e974d-9aaf-44aa-80fb-01a042cf5774", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'1.43.0'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import os\n", + "import openai\n", + "openai.__version__" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "915d61b5-efe2-4efa-88ac-2ae1866d32e7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "ab55e229-93b9-4e9b-974d-037002690bf0", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "def prompt_kisski(message:str, model=\"meta-llama-3.1-70b-instruct\"):\n", + " \"\"\"A prompt helper function that sends a message to KISSKI Chat AI API\n", + " and returns only the text response.\n", + " \"\"\"\n", + " import os\n", + " \n", + " # convert message in the right format if necessary\n", + " if isinstance(message, str):\n", + " message = [{\"role\": \"user\", \"content\": message}]\n", + " \n", + " # setup connection to the LLM\n", + " client = openai.OpenAI()\n", + " client.base_url = \"https://chat-ai.academiccloud.de/v1\"\n", + " client.api_key = os.environ.get('KISSKI_API_KEY')\n", + " \n", + " response = client.chat.completions.create(\n", + " model=model,\n", + " messages=message\n", + " )\n", + " \n", + " # extract answer\n", + " return response.choices[0].message.content" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "a7654a20-a307-4b26-8d25-bef20b70224e", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "\"How's it going? Is there something I can help you with or would you like to chat?\"" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "prompt_kisski(\"Hi!\")" + ] + }, + { + "cell_type": "markdown", + "id": "578e9edd-b58f-4fd0-a56d-1966105221dc", + "metadata": {}, + "source": [ + "## Exercise\n", + "List the models available in the blablador endpoint and try them out by specifying them when calling `prompt_blablador()`." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "05171ba7-a775-41c5-954d-7d4fc2b5b625", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "meta-llama-3.1-8b-instruct\n", + "meta-llama-3.1-70b-instruct\n", + "mistral-large-instruct\n", + "llama-3.1-sauerkrautlm-70b-instruct\n", + "qwen2.5-72b-instruct\n", + "codestral-22b\n", + "occiglot-7b-eu5-instruct\n" + ] + } + ], + "source": [ + "client = openai.OpenAI()\n", + "client.base_url = \"https://chat-ai.academiccloud.de/v1\"\n", + "client.api_key = os.environ.get('KISSKI_API_KEY')\n", + "\n", + "print(\"\\n\".join([model.id for model in client.models.list().data]))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e810ee2-4d22-42f6-add5-532cf95b4b9c", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/_toc.yml b/docs/_toc.yml index ff282c5..867f07a 100644 --- a/docs/_toc.yml +++ b/docs/_toc.yml @@ -24,6 +24,7 @@ parts: - file: 15_endpoint_apis/01_openai_api.ipynb - file: 15_endpoint_apis/02_ollama_endpoint.ipynb - file: 15_endpoint_apis/04_scadsai_llm_endpoint.ipynb + - file: 15_endpoint_apis/06_kisski_endpoint.ipynb - file: 15_endpoint_apis/03_blablador_endpoint.ipynb - file: 15_endpoint_apis/05_azure_endpoints.ipynb - file: 15_endpoint_apis/10_anthropic_api.ipynb