Skip to content

Commit

Permalink
Merge pull request #1854 from hlohaus/nem
Browse files Browse the repository at this point in the history
Add Ecosia Provider, Add OpenaiAccount alias
  • Loading branch information
hlohaus authored Apr 18, 2024
2 parents 0b6601f + 7e543f4 commit 718ea7c
Show file tree
Hide file tree
Showing 12 changed files with 125 additions and 40 deletions.
18 changes: 18 additions & 0 deletions examples/ecosia.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import asyncio
import g4f
from g4f.client import AsyncClient

async def main():
client = AsyncClient(
provider=g4f.Provider.Ecosia,
)
async for chunk in client.chat.completions.create(
[{"role": "user", "content": "happy dogs on work. write some lines"}],
g4f.models.default,
stream=True,
green=True,
):
print(chunk.choices[0].delta.content or "", end="")
print(f"\nwith {chunk.model}")

asyncio.run(main())
47 changes: 47 additions & 0 deletions g4f/Provider/Ecosia.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@

from __future__ import annotations

import base64
import json
from aiohttp import ClientSession, BaseConnector

from ..typing import AsyncResult, Messages
from ..requests.raise_for_status import raise_for_status
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import get_connector

class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.ecosia.org"
working = True
supports_gpt_35_turbo = True
default_model = "gpt-3.5-turbo-0125"
model_aliases = {"gpt-3.5-turbo": "gpt-3.5-turbo-0125"}

@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
connector: BaseConnector = None,
green: bool = False,
proxy: str = None,
**kwargs
) -> AsyncResult:
cls.get_model(model)
headers = {
"authority": "api.ecosia.org",
"accept": "*/*",
"origin": cls.url,
"referer": f"{cls.url}/",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36",
}
async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
data = {
"messages": base64.b64encode(json.dumps(messages).encode()).decode()
}
api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if green else 'productivity'}"
async with session.post(api_url, json=data) as response:
await raise_for_status(response)
async for chunk in response.content.iter_any():
if chunk:
yield chunk.decode(errors="ignore")
1 change: 0 additions & 1 deletion g4f/Provider/You.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,6 @@ async def upload_file(cls, client: StreamSession, cookies: Cookies, file: bytes,

@classmethod
async def get_cookies(cls, client: StreamSession) -> Cookies:

if not cls._cookies or cls._cookies_used >= 5:
cls._cookies = await cls.create_cookies(client)
cls._cookies_used = 0
Expand Down
3 changes: 2 additions & 1 deletion g4f/Provider/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@
from .ChatgptNext import ChatgptNext
from .ChatgptX import ChatgptX
from .Cnote import Cnote
from .Cohere import Cohere
from .DeepInfra import DeepInfra
from .DeepInfraImage import DeepInfraImage
from .DuckDuckGo import DuckDuckGo
from .Ecosia import Ecosia
from .Feedough import Feedough
from .FlowGpt import FlowGpt
from .FreeChatgpt import FreeChatgpt
Expand All @@ -46,7 +48,6 @@
from .Vercel import Vercel
from .WhiteRabbitNeo import WhiteRabbitNeo
from .You import You
from .Cohere import Cohere

import sys

Expand Down
2 changes: 1 addition & 1 deletion g4f/Provider/needs_auth/OpenRouter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class OpenRouter(Openai):
label = "OpenRouter"
url = "https://openrouter.ai"
working = True
default_model = "openrouter/auto"
default_model = "mistralai/mistral-7b-instruct:free"

@classmethod
def get_models(cls):
Expand Down
11 changes: 9 additions & 2 deletions g4f/Provider/needs_auth/Openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ async def create_async_generator(
await raise_for_status(response)
if not stream:
data = await response.json()
cls.raise_error(data)
choice = data["choices"][0]
if "content" in choice["message"]:
yield choice["message"]["content"].strip()
Expand All @@ -70,8 +71,7 @@ async def create_async_generator(
if chunk == b"[DONE]":
break
data = json.loads(chunk)
if "error_message" in data:
raise ResponseError(data["error_message"])
cls.raise_error(data)
choice = data["choices"][0]
if "content" in choice["delta"] and choice["delta"]["content"]:
delta = choice["delta"]["content"]
Expand All @@ -89,6 +89,13 @@ def read_finish_reason(choice: dict) -> Optional[FinishReason]:
if "finish_reason" in choice and choice["finish_reason"] is not None:
return FinishReason(choice["finish_reason"])

@staticmethod
def raise_error(data: dict):
if "error_message" in data:
raise ResponseError(data["error_message"])
elif "error" in data:
raise ResponseError(f'Error {data["error"]["code"]}: {data["error"]["message"]}')

@classmethod
def get_headers(cls, stream: bool, api_key: str = None, headers: dict = None) -> dict:
return {
Expand Down
7 changes: 7 additions & 0 deletions g4f/Provider/needs_auth/OpenaiAccount.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from __future__ import annotations

from .OpenaiChat import OpenaiChat

class OpenaiAccount(OpenaiChat):
label = "OpenAI ChatGPT with Account"
needs_auth = True
28 changes: 16 additions & 12 deletions g4f/Provider/needs_auth/OpenaiChat.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ...webdriver import get_browser
from ...typing import AsyncResult, Messages, Cookies, ImageType, Union, AsyncIterator
from ...typing import AsyncResult, Messages, Cookies, ImageType, AsyncIterator
from ...requests import get_args_from_browser, raise_for_status
from ...requests.aiohttp import StreamSession
from ...image import to_image, to_bytes, ImageResponse, ImageRequest
Expand All @@ -35,7 +35,7 @@
class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
"""A class for creating and managing conversations with OpenAI chat service"""

lebel = "OpenAI ChatGPT"
label = "OpenAI ChatGPT"
url = "https://chat.openai.com"
working = True
supports_gpt_35_turbo = True
Expand Down Expand Up @@ -295,7 +295,7 @@ async def create_async_generator(
model: str,
messages: Messages,
proxy: str = None,
timeout: int = 120,
timeout: int = 180,
api_key: str = None,
cookies: Cookies = None,
auto_continue: bool = False,
Expand Down Expand Up @@ -348,7 +348,7 @@ async def create_async_generator(
if api_key is not None:
cls._set_api_key(api_key)

if cls.default_model is None and cls._api_key is not None:
if cls.default_model is None and (not cls.needs_auth or cls._api_key is not None):
try:
if not model:
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))
Expand All @@ -368,12 +368,12 @@ async def create_async_generator(
arkose_token, api_key, cookies = await getArkoseAndAccessToken(proxy)
cls._create_request_args(cookies)
cls._set_api_key(api_key)
except NoValidHarFileError:
except NoValidHarFileError as e:
...
if cls._api_key is None:
if debug.logging:
print("Getting access token with nodriver.")
await cls.nodriver_access_token()
if cls._api_key is None and cls.needs_auth:
raise e
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))

async with session.post(
Expand Down Expand Up @@ -589,10 +589,11 @@ async def nodriver_access_token(cls):
user_data_dir = user_config_dir("g4f-nodriver")
except:
user_data_dir = None

if debug.logging:
print(f"Open nodriver with user_dir: {user_data_dir}")
browser = await uc.start(user_data_dir=user_data_dir)
page = await browser.get("https://chat.openai.com/")
while await page.query_selector("#prompt-textarea") is None:
while await page.find("[id^=headlessui-menu-button-]") is None:
await asyncio.sleep(1)
api_key = await page.evaluate(
"(async () => {"
Expand All @@ -609,8 +610,9 @@ async def nodriver_access_token(cls):
for c in await page.browser.cookies.get_all():
if c.domain.endswith("chat.openai.com"):
cookies[c.name] = c.value
user_agent = await page.evaluate("window.navigator.userAgent")
await page.close()
cls._create_request_args(cookies)
cls._create_request_args(cookies, user_agent)
cls._set_api_key(api_key)

@classmethod
Expand Down Expand Up @@ -662,7 +664,7 @@ def get_default_headers() -> dict:
"content-type": "application/json",
"oai-device-id": str(uuid.uuid4()),
"oai-language": "en-US",
"sec-ch-ua": "\"Chromium\";v=\"122\", \"Not(A:Brand\";v=\"24\", \"Google Chrome\";v=\"122\"",
"sec-ch-ua": "\"Google Chrome\";v=\"123\", \"Not:A-Brand\";v=\"8\", \"Chromium\";v=\"123\"",
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": "\"Linux\"",
"sec-fetch-dest": "empty",
Expand All @@ -675,8 +677,10 @@ def _format_cookies(cookies: Cookies):
return "; ".join(f"{k}={v}" for k, v in cookies.items() if k != "access_token")

@classmethod
def _create_request_args(cls, cookies: Cookies = None):
def _create_request_args(cls, cookies: Cookies = None, user_agent: str = None):
cls._headers = cls.get_default_headers()
if user_agent is not None:
cls._headers["user-agent"] = user_agent
cls._cookies = {} if cookies is None else cookies
cls._update_cookie_header()

Expand Down
3 changes: 2 additions & 1 deletion g4f/Provider/needs_auth/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
from .Poe import Poe
from .Openai import Openai
from .Groq import Groq
from .OpenRouter import OpenRouter
from .OpenRouter import OpenRouter
from .OpenaiAccount import OpenaiAccount
29 changes: 14 additions & 15 deletions g4f/api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
import logging
import json
import uvicorn
import nest_asyncio

from fastapi import FastAPI, Response, Request
from fastapi.responses import StreamingResponse, RedirectResponse, HTMLResponse, JSONResponse
from fastapi.exceptions import RequestValidationError
from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from typing import List, Union
from typing import List, Union, Optional

import g4f
import g4f.debug
from g4f.client import Client
from g4f.client import AsyncClient
from g4f.typing import Messages

class ChatCompletionsConfig(BaseModel):
messages: Messages
model: str
provider: Union[str, None] = None
provider: Optional[str] = None
stream: bool = False
temperature: Union[float, None] = None
max_tokens: Union[int, None] = None
temperature: Optional[float] = None
max_tokens: Optional[int] = None
stop: Union[list[str], str, None] = None
api_key: Union[str, None] = None
api_key: Optional[str] = None
web_search: Optional[bool] = None

class Api:
def __init__(self, engine: g4f, debug: bool = True, sentry: bool = False,
Expand All @@ -36,9 +36,7 @@ def __init__(self, engine: g4f, debug: bool = True, sentry: bool = False,

if debug:
g4f.debug.logging = True
self.client = Client()

nest_asyncio.apply()
self.client = AsyncClient()
self.app = FastAPI()

self.routes()
Expand Down Expand Up @@ -90,7 +88,7 @@ async def models():
@self.app.get("/v1/models/{model_name}")
async def model_info(model_name: str):
try:
model_info = g4f.ModelUtils.convert[model_name]
model_info = g4f.models.ModelUtils.convert[model_name]
return JSONResponse({
'id': model_name,
'object': 'model',
Expand Down Expand Up @@ -119,17 +117,18 @@ async def chat_completions(config: ChatCompletionsConfig = None, request: Reques
return Response(content=format_exception(e, config), status_code=500, media_type="application/json")

if not config.stream:
return JSONResponse(response.to_json())
return JSONResponse((await response).to_json())

def streaming():
async def streaming():
try:
for chunk in response:
async for chunk in response:
yield f"data: {json.dumps(chunk.to_json())}\n\n"
except GeneratorExit:
pass
except Exception as e:
logging.exception(e)
yield f'data: {format_exception(e, config)}'
yield f'data: {format_exception(e, config)}\n\n'
yield "data: [DONE]\n\n"

return StreamingResponse(streaming(), media_type="text/event-stream")

Expand Down
8 changes: 4 additions & 4 deletions g4f/gui/client/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ <h3>Settings</h3>
<textarea id="Gemini-api_key" name="Gemini[api_key]" placeholder="&quot;__Secure-1PSID&quot; cookie"></textarea>
</div>
<div class="field box">
<label for="GeminiPro-api_key" class="label" title="">GeminiPro:</label>
<label for="GeminiPro-api_key" class="label" title="">GeminiPro API:</label>
<textarea id="GeminiPro-api_key" name="GeminiPro[api_key]" placeholder="api_key"></textarea>
</div>
<div class="field box">
Expand All @@ -146,12 +146,12 @@ <h3>Settings</h3>
<textarea id="HuggingFace-api_key" name="HuggingFace[api_key]" placeholder="api_key"></textarea>
</div>
<div class="field box">
<label for="Openai-api_key" class="label" title="">Openai:</label>
<label for="Openai-api_key" class="label" title="">OpenAI API:</label>
<textarea id="Openai-api_key" name="Openai[api_key]" placeholder="api_key"></textarea>
</div>
<div class="field box">
<label for="OpenaiChat-api_key" class="label" title="">OpenaiChat:</label>
<textarea id="OpenaiChat-api_key" name="OpenaiChat[api_key]" placeholder="api_key"></textarea>
<label for="OpenaiAccount-api_key" class="label" title="">OpenAI ChatGPT:</label>
<textarea id="OpenaiAccount-api_key" name="OpenaiAccount[api_key]" placeholder="access_key"></textarea>
</div>
<div class="field box">
<label for="OpenRouter-api_key" class="label" title="">OpenRouter:</label>
Expand Down
8 changes: 5 additions & 3 deletions g4f/providers/base_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,16 +269,18 @@ async def create_async_generator(
AsyncResult: An asynchronous generator yielding results.
"""
raise NotImplementedError()

class ProviderModelMixin:
default_model: str
models: list[str] = []
model_aliases: dict[str, str] = {}

@classmethod
def get_models(cls) -> list[str]:
if not cls.models:
return [cls.default_model]
return cls.models

@classmethod
def get_model(cls, model: str) -> str:
if not model and cls.default_model is not None:
Expand Down

0 comments on commit 718ea7c

Please sign in to comment.