Skip to content
This repository has been archived by the owner on Jun 12, 2024. It is now read-only.

Commit

Permalink
feat: update async client for open router
Browse files Browse the repository at this point in the history
  • Loading branch information
dsdanielpark committed Apr 21, 2024
1 parent 7a18c2e commit 89984a2
Show file tree
Hide file tree
Showing 5 changed files with 178 additions and 14 deletions.
71 changes: 69 additions & 2 deletions documents/README_OPENROUTER.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# OpenRouter: Free Open-Source LLMs API


OpenRouter offers free access to various open-source Large Language Models (LLMs), allowing you to explore and experiment with these powerful models without incurring any charges.
OpenRouter offers free access to various open-source Large Language Models (LLMs), allowing you to explore and experiment with these powerful models without incurring any charges. After contacting Open Route via Discord, they have confirmed that there are no plans to switch the free models to paid ones for the time being. (2024-04)

> [!IMPORTANT]
> The free models may be temporary and subject to change based on policies. Please refer to the following page to check the available free models: [Open Router Models](https://openrouter.ai/docs#models) (Free limit: 10 requests/minute)
Expand Down Expand Up @@ -132,6 +131,74 @@ response = toppy_client.create_chat_completion(prompt)
print(response)
```


<br><br><br>

# OpenRouter Async API Client

For the Gemini API, due to issues like rate limiting and blocking, sync objects are preferred over async, as async can be easily neutralized. However, since OpenRouter reliably provides open-source LLMs, you can use the asynchronous implementation as follows.

The `OpenRouter` class is designed to manage API interactions with OpenRouter for creating chat completions using AI models asynchronously. This class utilizes `aiohttp` for asynchronous network calls.

## Features

- **Asynchronous API Calls**: Makes use of Python's `asyncio` and `aiohttp` to perform asynchronous API calls.
- **Concurrent Completions**: Ability to handle multiple chat completions concurrently.
- **Error Handling**: Basic error handling for API keys and message formatting.


## Class Usage

### Initialization

Initialize an instance of `OpenRouter` with your model identifier and API key:

```python
from open_router_async import OpenRouter

api_key = 'your_api_key_here'
model = 'google/gemma-7b-it:free'
router = OpenRouter(model, api_key)
```

### Single Chat Completion

To generate a single chat completion asynchronously:

```python
import asyncio

async def main():
completion = await router.create_chat_completion("Hello, how can I help you today?")
print(completion)

if __name__ == "__main__":
asyncio.run(main())
```

### Multiple Chat Completions

To handle multiple chat completions concurrently:

```python
import asyncio

async def main():
messages = [
"Hello, how can I help you today?",
"What is the weather like today?",
"Can you recommend some books?"
]
completions = await router.create_multi_chat_completions(messages)
for completion in completions:
print(completion)

if __name__ == "__main__":
asyncio.run(main())
```

This README provides a basic guide to setting up and using the `OpenRouter` class, including how to send asynchronous requests for single and multiple chat completions. You can include this README file in your project directory to help users understand and use your asynchronous API client effectively.

### More Examples

Check out the [OpenRouter documentation](https://openrouter.ai/docs) for more examples and usage details for the other available free models.
Expand Down
2 changes: 2 additions & 0 deletions gemini/src/modules/openrouter/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
from .client import OpenRouter
from .async_client import AsyncOpenRouter
from .const import FreeModel
93 changes: 93 additions & 0 deletions gemini/src/modules/openrouter/async_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
import aiohttp
import asyncio
from const import FreeModel
from typing import List, Optional


class AsyncOpenRouter:
"""
Manages API interactions with OpenRouter for creating chat completions using AI models asynchronously.
Attributes and methods are analogous to the synchronous version but adapted for async operation.
"""

def __init__(self, model: str, api_key: str) -> None:
if not api_key:
raise ValueError(
"API key required. Please visit https://openrouter.ai/keys"
)
self.api_key = api_key
self.model = model
self._validate_model(model)

def get_model_list(self) -> List[str]:
return self.FREE_MODEL_LIST

async def create_chat_completion(
self,
message: str,
site_url: Optional[str] = None,
app_name: Optional[str] = None,
) -> str:
response = await self.generate_content(message, site_url, app_name)
return response["choices"][0]["message"]["content"]

async def create_multi_chat_completions(
self,
messages: List[str],
site_url: Optional[str] = None,
app_name: Optional[str] = None,
) -> List[str]:
tasks = [
self.create_chat_completion(message, site_url, app_name)
for message in messages
]
results = await asyncio.gather(*tasks)
return results

async def generate_content(
self,
message: str,
site_url: Optional[str] = None,
app_name: Optional[str] = None,
) -> dict:
self._validate_message(message)
self._validate_model(self.model)

headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
}
if site_url:
headers["HTTP-Referer"] = site_url
if app_name:
headers["X-Title"] = app_name

data = {
"model": self.model,
"messages": [
{"role": "user", "content": message},
],
}

async with aiohttp.ClientSession() as session:
async with session.post(
"https://openrouter.ai/api/v1/chat/completions",
headers=headers,
json=data,
) as response:
response.raise_for_status()
return await response.json()

def _validate_message(self, message: str) -> None:
if not isinstance(message, str):
raise ValueError("Message must be a string")

def _validate_model(self, model: str) -> None:
"""
Checks if the specified model is in the list of free models.
"""
if model not in FreeModel:
print(
"This model may not be free. Please check the following list for costs.\nUsers are responsible for API costs. Visit https://openrouter.ai/docs#models"
)
14 changes: 2 additions & 12 deletions gemini/src/modules/openrouter/client.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import requests
from const import FreeModel
from typing import List, Optional
from requests.models import Response

Expand All @@ -22,17 +23,6 @@ class OpenRouter:
ValueError: If an API key is not provided or if the message format is incorrect.
"""

FREE_MODEL_LIST: List[str] = [
"google/gemma-7b-it:free",
"huggingfaceh4/zephyr-7b-beta:free",
"mistralai/mistral-7b-instruct:free",
"openrouter/cinematika-7b:free",
"undi95/toppy-m-7b:free",
"gryphe/mythomist-7b:free",
"nousresearch/nous-capybara-7b:free",
"openchat/openchat-7b:free",
]

def __init__(self, model: str, api_key: str) -> None:
"""
Initializes the OpenRouter instance with a specified model and API key.
Expand Down Expand Up @@ -132,7 +122,7 @@ def _validate_model(self, model: str) -> None:
"""
Checks if the specified model is in the list of free models.
"""
if model not in self.FREE_MODEL_LIST:
if model not in FreeModel:
print(
"This model may not be free. Please check the following list for costs.\nUsers are responsible for API costs. Visit https://openrouter.ai/docs#models"
)
12 changes: 12 additions & 0 deletions gemini/src/modules/openrouter/const.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from enum import Enum


class FreeModel(Enum):
GEMMA_7B = "google/gemma-7b-it:free"
ZEPHYR_7B = "huggingfaceh4/zephyr-7b-beta:free"
MISTRAL_7B = "mistralai/mistral-7b-instruct:free"
CINEMATIKA_7B = "openrouter/cinematika-7b:free"
TOPPY_M_7B = "undi95/toppy-m-7b:free"
MYTHOMIST_7B = "gryphe/mythomist-7b:free"
CAPYBARA_7B = "nousresearch/nous-capybara-7b:free"
OPENCHAT_7B = "openchat/openchat-7b:free"

0 comments on commit 89984a2

Please sign in to comment.