mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
- Changed default model in commit.py from "gpt-4o" to "claude-3.7-sonnet" - Fixed ARTA provider by adding proper auth token handling and form data submission - Updated Blackbox provider to use OpenRouter models instead of premium models - Improved DDG provider with simplified authentication and better error handling - Updated DeepInfraChat provider with new models and aliases - Removed non-working providers: Goabror, Jmuz, OIVSCode, AllenAI, ChatGptEs, FreeRouter, Glider - Moved non-working providers to the not_working directory - Added BlackboxPro provider in needs_auth directory with premium model support - Updated Liaobots provider with new models and improved authentication - Renamed Microsoft_Phi_4 to Microsoft_Phi_4_Multimodal for clarity - Updated LambdaChat provider with direct API implementation instead of HuggingChat - Updated models.py with new model definitions and provider mappings - Removed BlackForestLabs_Flux1Schnell from HuggingSpace providers - Updated model aliases across multiple providers for better compatibility - Fixed Dynaspark provider endpoint URL to prevent spam detection
51 lines
1.6 KiB
Python
51 lines
1.6 KiB
Python
from __future__ import annotations
|
|
|
|
import asyncio
|
|
|
|
try:
|
|
from duckai import DuckAI
|
|
has_requirements = True
|
|
except ImportError:
|
|
has_requirements = False
|
|
|
|
from ..typing import CreateResult, Messages
|
|
from .base_provider import AbstractProvider, ProviderModelMixin
|
|
from .helper import get_last_user_message
|
|
|
|
class DuckDuckGo(AbstractProvider, ProviderModelMixin):
|
|
label = "Duck.ai (duckduckgo_search)"
|
|
url = "https://duckduckgo.com/aichat"
|
|
api_base = "https://duckduckgo.com/duckchat/v1/"
|
|
|
|
working = has_requirements
|
|
supports_stream = True
|
|
supports_system_message = True
|
|
supports_message_history = True
|
|
|
|
default_model = "gpt-4o-mini"
|
|
models = [default_model, "meta-llama/Llama-3.3-70B-Instruct-Turbo", "claude-3-haiku-20240307", "o3-mini", "mistralai/Mistral-Small-24B-Instruct-2501"]
|
|
|
|
duck_ai: DuckAI = None
|
|
|
|
model_aliases = {
|
|
"gpt-4": "gpt-4o-mini",
|
|
"llama-3.3-70b": "meta-llama/Llama-3.3-70B-Instruct-Turbo",
|
|
"claude-3-haiku": "claude-3-haiku-20240307",
|
|
"mixtral-small-24b": "mistralai/Mistral-Small-24B-Instruct-2501",
|
|
}
|
|
|
|
@classmethod
|
|
def create_completion(
|
|
cls,
|
|
model: str,
|
|
messages: Messages,
|
|
proxy: str = None,
|
|
timeout: int = 60,
|
|
**kwargs
|
|
) -> CreateResult:
|
|
if not has_requirements:
|
|
raise ImportError("duckai is not installed. Install it with `pip install -U duckai`.")
|
|
if cls.duck_ai is None:
|
|
cls.duck_ai = DuckAI(proxy=proxy, timeout=timeout)
|
|
model = cls.get_model(model)
|
|
yield cls.duck_ai.chat(get_last_user_message(messages), model, timeout)
|