mirror of
https://github.com/xtekky/gpt4free.git
synced 2026-01-03 15:53:09 -08:00
Added new provider (g4f/Provider/Ai4Chat.py)
This commit is contained in:
parent
48e8cbfb6d
commit
4cd1ed1a71
3 changed files with 76 additions and 1 deletions
70
g4f/Provider/Ai4Chat.py
Normal file
70
g4f/Provider/Ai4Chat.py
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from aiohttp import ClientSession
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ..typing import AsyncResult, Messages
|
||||||
|
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||||
|
from .helper import format_prompt
|
||||||
|
|
||||||
|
|
||||||
|
class Ai4Chat(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
|
url = "https://www.ai4chat.co"
|
||||||
|
api_endpoint = "https://www.ai4chat.co/generate-response"
|
||||||
|
working = True
|
||||||
|
supports_gpt_4 = False
|
||||||
|
supports_stream = False
|
||||||
|
supports_system_message = True
|
||||||
|
supports_message_history = True
|
||||||
|
|
||||||
|
default_model = 'gpt-4'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_model(cls, model: str) -> str:
|
||||||
|
return cls.default_model
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def create_async_generator(
|
||||||
|
cls,
|
||||||
|
model: str,
|
||||||
|
messages: Messages,
|
||||||
|
proxy: str = None,
|
||||||
|
**kwargs
|
||||||
|
) -> AsyncResult:
|
||||||
|
model = cls.get_model(model)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'accept': '*/*',
|
||||||
|
'accept-language': 'en-US,en;q=0.9',
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
'content-type': 'application/json',
|
||||||
|
'cookie': 'messageCount=2',
|
||||||
|
'origin': 'https://www.ai4chat.co',
|
||||||
|
'pragma': 'no-cache',
|
||||||
|
'priority': 'u=1, i',
|
||||||
|
'referer': 'https://www.ai4chat.co/gpt/talkdirtytome',
|
||||||
|
'sec-ch-ua': '"Chromium";v="129", "Not=A?Brand";v="8"',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'sec-ch-ua-platform': '"Linux"',
|
||||||
|
'sec-fetch-dest': 'empty',
|
||||||
|
'sec-fetch-mode': 'cors',
|
||||||
|
'sec-fetch-site': 'same-origin',
|
||||||
|
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36'
|
||||||
|
}
|
||||||
|
|
||||||
|
async with ClientSession(headers=headers) as session:
|
||||||
|
payload = {
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": format_prompt(messages)
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response:
|
||||||
|
response.raise_for_status()
|
||||||
|
response_data = await response.json()
|
||||||
|
message = response_data.get('message', '')
|
||||||
|
clean_message = re.sub('<[^<]+?>', '', message).strip()
|
||||||
|
yield clean_message
|
||||||
|
|
@ -11,6 +11,7 @@ from .needs_auth import *
|
||||||
|
|
||||||
from .nexra import *
|
from .nexra import *
|
||||||
|
|
||||||
|
from .Ai4Chat import Ai4Chat
|
||||||
from .AI365VIP import AI365VIP
|
from .AI365VIP import AI365VIP
|
||||||
from .AIChatFree import AIChatFree
|
from .AIChatFree import AIChatFree
|
||||||
from .AIUncensored import AIUncensored
|
from .AIUncensored import AIUncensored
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ from dataclasses import dataclass
|
||||||
|
|
||||||
from .Provider import IterListProvider, ProviderType
|
from .Provider import IterListProvider, ProviderType
|
||||||
from .Provider import (
|
from .Provider import (
|
||||||
|
Ai4Chat,
|
||||||
AIChatFree,
|
AIChatFree,
|
||||||
AiMathGPT,
|
AiMathGPT,
|
||||||
Airforce,
|
Airforce,
|
||||||
|
|
@ -104,6 +105,9 @@ default = Model(
|
||||||
AmigoChat,
|
AmigoChat,
|
||||||
ChatifyAI,
|
ChatifyAI,
|
||||||
Cloudflare,
|
Cloudflare,
|
||||||
|
Ai4Chat,
|
||||||
|
Editee,
|
||||||
|
AiMathGPT,
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -148,7 +152,7 @@ gpt_4_turbo = Model(
|
||||||
gpt_4 = Model(
|
gpt_4 = Model(
|
||||||
name = 'gpt-4',
|
name = 'gpt-4',
|
||||||
base_provider = 'OpenAI',
|
base_provider = 'OpenAI',
|
||||||
best_provider = IterListProvider([NexraChatGPT, NexraChatGptV2, NexraChatGptWeb, Airforce, Chatgpt4Online, Bing, OpenaiChat])
|
best_provider = IterListProvider([NexraChatGPT, NexraChatGptV2, NexraChatGptWeb, Ai4Chat, Airforce, Chatgpt4Online, Bing, OpenaiChat])
|
||||||
)
|
)
|
||||||
|
|
||||||
# o1
|
# o1
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue