gpt4free/g4f/Provider/hf_space/CohereForAI_C4AI_Command.py
kqlio67 c3d61ad9e3 refactor: update providers and models for better compatibility
- Changed default model in commit.py from "gpt-4o" to "claude-3.7-sonnet"
- Fixed ARTA provider by adding proper auth token handling and form data submission
- Updated Blackbox provider to use OpenRouter models instead of premium models
- Improved DDG provider with simplified authentication and better error handling
- Updated DeepInfraChat provider with new models and aliases
- Removed non-working providers: Goabror, Jmuz, OIVSCode, AllenAI, ChatGptEs, FreeRouter, Glider
- Moved non-working providers to the not_working directory
- Added BlackboxPro provider in needs_auth directory with premium model support
- Updated Liaobots provider with new models and improved authentication
- Renamed Microsoft_Phi_4 to Microsoft_Phi_4_Multimodal for clarity
- Updated LambdaChat provider with direct API implementation instead of HuggingChat
- Updated models.py with new model definitions and provider mappings
- Removed BlackForestLabs_Flux1Schnell from HuggingSpace providers
- Updated model aliases across multiple providers for better compatibility
- Fixed Dynaspark provider endpoint URL to prevent spam detection
2025-05-12 20:24:36 +03:00

102 lines
4.5 KiB
Python

from __future__ import annotations
import json
from aiohttp import ClientSession, FormData
from ...typing import AsyncResult, Messages
from ...requests import raise_for_status
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..helper import format_prompt, get_last_user_message
from ...providers.response import JsonConversation, TitleGeneration
class CohereForAI_C4AI_Command(AsyncGeneratorProvider, ProviderModelMixin):
label = "CohereForAI C4AI Command"
url = "https://coherelabs-c4ai-command.hf.space"
conversation_url = f"{url}/conversation"
working = True
default_model = "command-a-03-2025"
models = [
default_model,
"command-r-plus-08-2024",
"command-r-08-2024",
"command-r-plus",
"command-r",
"command-r7b-12-2024",
"command-r7b-arabic-02-2025",
]
@classmethod
def get_model(cls, model: str, **kwargs) -> str:
if model in cls.model_aliases.values():
return model
return super().get_model(model, **kwargs)
@classmethod
async def create_async_generator(
cls, model: str, messages: Messages,
api_key: str = None,
proxy: str = None,
conversation: JsonConversation = None,
return_conversation: bool = True,
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
headers = {
"Origin": cls.url,
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:133.0) Gecko/20100101 Firefox/133.0",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Referer": "https://cohereforai-c4ai-command.hf.space/",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"Priority": "u=4",
}
if api_key is not None:
headers["Authorization"] = f"Bearer {api_key}"
async with ClientSession(
headers=headers,
cookies=None if conversation is None else conversation.cookies
) as session:
system_prompt = "\n".join([message["content"] for message in messages if message["role"] == "system"])
messages = [message for message in messages if message["role"] != "system"]
inputs = format_prompt(messages) if conversation is None else get_last_user_message(messages)
if conversation is None or conversation.model != model or conversation.preprompt != system_prompt:
data = {"model": model, "preprompt": system_prompt}
async with session.post(cls.conversation_url, json=data, proxy=proxy) as response:
await raise_for_status(response)
conversation = JsonConversation(
**await response.json(),
**data,
cookies={n: c.value for n, c in response.cookies.items()}
)
if return_conversation:
yield conversation
async with session.get(f"{cls.conversation_url}/{conversation.conversationId}/__data.json?x-sveltekit-invalidated=11", proxy=proxy) as response:
await raise_for_status(response)
node = json.loads((await response.text()).splitlines()[0])["nodes"][1]
if node["type"] == "error":
raise RuntimeError(node["error"])
data = node["data"]
message_id = data[data[data[data[0]["messages"]][-1]]["id"]]
data = FormData()
data.add_field(
"data",
json.dumps({"inputs": inputs, "id": message_id, "is_retry": False, "is_continue": False, "web_search": False, "tools": []}),
content_type="application/json"
)
async with session.post(f"{cls.conversation_url}/{conversation.conversationId}", data=data, proxy=proxy) as response:
await raise_for_status(response)
async for chunk in response.content:
try:
data = json.loads(chunk)
except (json.JSONDecodeError) as e:
raise RuntimeError(f"Failed to read response: {chunk.decode(errors='replace')}", e)
if data["type"] == "stream":
yield data["token"].replace("\u0000", "")
elif data["type"] == "title":
yield TitleGeneration(data["title"])
elif data["type"] == "finalAnswer":
break