Skip to content

Commit

Permalink
Add Aichatos and Blackbox Provider (#1822)
Browse files Browse the repository at this point in the history
Providers added:
- Feedough - Added a new provider with GPT-3 model
- Cnote - Added a new provider with GPT-3.5 model
  • Loading branch information
kqlio67 authored Apr 12, 2024
1 parent f724c07 commit fa739d2
Show file tree
Hide file tree
Showing 6 changed files with 229 additions and 2 deletions.
57 changes: 57 additions & 0 deletions g4f/Provider/Aichatos.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from __future__ import annotations

from aiohttp import ClientSession

from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt

import random


class Aichatos(AsyncGeneratorProvider):
url = "https://chat10.aichatos.xyz"
api = "https://api.binjie.fun"
working = True
supports_gpt_35_turbo = True

@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Content-Type": "application/json",
"Origin": "https://chat10.aichatos.xyz",
"DNT": "1",
"Sec-GPC": "1",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "cross-site",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
userId = random.randint(1000000000000, 9999999999999)
system_message: str = "",
data = {
"prompt": prompt,
"userId": "#/chat/{userId}",
"network": True,
"system": system_message,
"withoutContext": False,
"stream": True,
}
async with session.post(f"{cls.api}/api/generateStream", json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
if chunk:
yield chunk.decode()
56 changes: 56 additions & 0 deletions g4f/Provider/Blackbox.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
from __future__ import annotations

import uuid
import secrets
from aiohttp import ClientSession

from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider

class Blackbox(AsyncGeneratorProvider):
url = "https://www.blackbox.ai"
working = True

@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Referer": cls.url,
"Content-Type": "application/json",
"Origin": cls.url,
"DNT": "1",
"Sec-GPC": "1",
"Alt-Used": "www.blackbox.ai",
"Connection": "keep-alive",
}
async with ClientSession(headers=headers) as session:
random_id = secrets.token_hex(16)
random_user_id = str(uuid.uuid4())
data = {
"messages": messages,
"id": random_id,
"userId": random_user_id,
"codeModelMode": True,
"agentMode": {},
"trendingAgentMode": {},
"isMicMode": False,
"isChromeExt": False,
"playgroundMode": False,
"webSearchMode": False,
"userSystemPrompt": "",
"githubToken": None
}
async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
if chunk:
yield chunk.decode()
58 changes: 58 additions & 0 deletions g4f/Provider/Cnote.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from __future__ import annotations

import json
from aiohttp import ClientSession

from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt


class Cnote(AsyncGeneratorProvider):
url = "https://f1.cnote.top"
api_url = "https://p1api.xjai.pro/freeapi/chat-process"
working = True
supports_gpt_35_turbo = True

@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Content-Type": "application/json",
"Origin": cls.url,
"DNT": "1",
"Sec-GPC": "1",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "cross-site",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
system_message: str = "",
data = {
"prompt": prompt,
"systemMessage": system_message,
"temperature": 0.8,
"top_p": 1,
}
async with session.post(cls.api_url, json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
if chunk:
try:
data = json.loads(chunk.decode().split("&KFw6loC9Qvy&")[-1])
text = data.get("text", "")
yield text
except (json.JSONDecodeError, IndexError):
pass
52 changes: 52 additions & 0 deletions g4f/Provider/Feedough.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
from __future__ import annotations

import json
from aiohttp import ClientSession

from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
from .helper import format_prompt


class Feedough(AsyncGeneratorProvider):
url = "https://www.feedough.com"
working = True
supports_gpt_35_turbo = True

@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"Referer": "https://www.feedough.com/ai-prompt-generator/",
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
"Origin": "https://www.feedough.com",
"DNT": "1",
"Sec-GPC": "1",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
prompt = format_prompt(messages)
data = {
"action": "aixg_generate",
"prompt": prompt,
}
async with session.post(f"{cls.url}/wp-admin/admin-ajax.php", data=data, proxy=proxy) as response:
response.raise_for_status()
response_text = await response.text()
response_json = json.loads(response_text)
if response_json["success"]:
message = response_json["data"]["message"]
yield message
2 changes: 1 addition & 1 deletion g4f/Provider/Liaobots.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ async def create_async_generator(
"model": models[cls.get_model(model)],
"messages": messages,
"key": "",
"prompt": kwargs.get("system_message", "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully."),
"prompt": kwargs.get("system_message", "You are a helpful assistant."),
}
async with session.post(
"https://liaobots.work/api/chat",
Expand Down
6 changes: 5 additions & 1 deletion g4f/Provider/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,22 @@
from .needs_auth import *
from .unfinished import *

from .Aichatos import Aichatos
from .Aura import Aura
from .Bing import Bing
from .BingCreateImages import BingCreateImages
from .Blackbox import Blackbox
from .ChatForAi import ChatForAi
from .Chatgpt4Online import Chatgpt4Online
from .ChatgptAi import ChatgptAi
from .ChatgptFree import ChatgptFree
from .ChatgptNext import ChatgptNext
from .ChatgptX import ChatgptX
from .Cnote import Cnote
from .DeepInfra import DeepInfra
from .DeepInfraImage import DeepInfraImage
from .DuckDuckGo import DuckDuckGo
from .Feedough import Feedough
from .FlowGpt import FlowGpt
from .FreeChatgpt import FreeChatgpt
from .FreeGpt import FreeGpt
Expand Down Expand Up @@ -62,4 +66,4 @@
])

class ProviderUtils:
convert: dict[str, ProviderType] = __map__
convert: dict[str, ProviderType] = __map__

0 comments on commit fa739d2

Please sign in to comment.