active_by_default

This commit is contained in:
hlohaus 2025-07-15 13:49:29 +02:00
parent 2d3b215dbb
commit 6f8c5ea62b
18 changed files with 18 additions and 3175 deletions

View file

@ -39,6 +39,7 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin, AuthFileMixin):
url = "https://playground.ai.cloudflare.com" url = "https://playground.ai.cloudflare.com"
working = has_curl_cffi working = has_curl_cffi
use_nodriver = True use_nodriver = True
active_by_default = True
api_endpoint = "https://playground.ai.cloudflare.com/api/inference" api_endpoint = "https://playground.ai.cloudflare.com/api/inference"
models_url = "https://playground.ai.cloudflare.com/api/models" models_url = "https://playground.ai.cloudflare.com/api/models"
supports_stream = True supports_stream = True

View file

@ -43,6 +43,7 @@ class Copilot(AsyncAuthedProvider, ProviderModelMixin):
working = True working = True
supports_stream = True supports_stream = True
active_by_default = True
default_model = "Copilot" default_model = "Copilot"
models = [default_model, "Think Deeper"] models = [default_model, "Think Deeper"]

View file

@ -10,6 +10,7 @@ class PenguinAI(OpenaiTemplate):
url = "https://penguinai.tech" url = "https://penguinai.tech"
api_base = "https://api.penguinai.tech/v1" api_base = "https://api.penguinai.tech/v1"
working = True working = True
active_by_default = True
default_model = "gpt-3.5-turbo" default_model = "gpt-3.5-turbo"
default_vision_model = "gpt-4o" default_vision_model = "gpt-4o"

View file

@ -64,6 +64,7 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
label = "Pollinations AI" label = "Pollinations AI"
url = "https://pollinations.ai" url = "https://pollinations.ai"
login_url = "https://auth.pollinations.ai" login_url = "https://auth.pollinations.ai"
active_by_default = True
working = True working = True
supports_system_message = True supports_system_message = True

View file

@ -10,6 +10,7 @@ from .PollinationsAI import PollinationsAI
class PollinationsImage(PollinationsAI): class PollinationsImage(PollinationsAI):
label = "PollinationsImage" label = "PollinationsImage"
parent = PollinationsAI.__name__ parent = PollinationsAI.__name__
active_by_default = False
default_model = "flux" default_model = "flux"
default_vision_model = None default_vision_model = None
default_image_model = default_model default_image_model = default_model

View file

@ -18,6 +18,7 @@ class Together(OpenaiTemplate):
activation_endpoint = "https://www.codegeneration.ai/activate-v2" activation_endpoint = "https://www.codegeneration.ai/activate-v2"
models_endpoint = "https://api.together.xyz/v1/models" models_endpoint = "https://api.together.xyz/v1/models"
active_by_default = True
working = True working = True
needs_auth = False needs_auth = False
supports_stream = True supports_stream = True

View file

@ -24,6 +24,7 @@ class HarProvider(AsyncAuthedProvider, ProviderModelMixin):
url = "https://legacy.lmarena.ai" url = "https://legacy.lmarena.ai"
api_endpoint = "/queue/join?" api_endpoint = "/queue/join?"
working = True working = True
active_by_default = True
default_model = LegacyLMArena.default_model default_model = LegacyLMArena.default_model
@classmethod @classmethod

View file

@ -13,7 +13,7 @@ class DeepInfra(OpenaiTemplate):
api_base = "https://api.deepinfra.com/v1/openai" api_base = "https://api.deepinfra.com/v1/openai"
working = True working = True
needs_auth = True needs_auth = True
active_by_default = True
default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
default_image_model = "stabilityai/sd3.5" default_image_model = "stabilityai/sd3.5"

View file

@ -3,7 +3,7 @@ from __future__ import annotations
from .OpenaiAPI import OpenaiAPI from .OpenaiAPI import OpenaiAPI
class DeepSeek(OpenaiAPI): class DeepSeek(OpenaiAPI):
label = "DeepSeek" label = "DeepSeek API"
url = "https://platform.deepseek.com" url = "https://platform.deepseek.com"
login_url = "https://platform.deepseek.com/api_keys" login_url = "https://platform.deepseek.com/api_keys"
working = True working = True

View file

@ -18,6 +18,7 @@ except ImportError:
has_dsk = False has_dsk = False
class DeepSeekAPI(AsyncAuthedProvider, ProviderModelMixin): class DeepSeekAPI(AsyncAuthedProvider, ProviderModelMixin):
label = "DeepSeek"
url = "https://chat.deepseek.com" url = "https://chat.deepseek.com"
working = has_dsk working = has_dsk
active_by_default = has_dsk active_by_default = has_dsk

View file

@ -21,6 +21,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://ai.google.dev" url = "https://ai.google.dev"
login_url = "https://aistudio.google.com/u/0/apikey" login_url = "https://aistudio.google.com/u/0/apikey"
api_base = "https://generativelanguage.googleapis.com/v1beta" api_base = "https://generativelanguage.googleapis.com/v1beta"
active_by_default = True
working = True working = True
supports_message_history = True supports_message_history = True

View file

@ -79,7 +79,6 @@ class LMArenaBeta(AsyncGeneratorProvider, ProviderModelMixin, AuthFileMixin):
url = "https://beta.lmarena.ai" url = "https://beta.lmarena.ai"
api_endpoint = "https://beta.lmarena.ai/api/stream/create-evaluation" api_endpoint = "https://beta.lmarena.ai/api/stream/create-evaluation"
working = True working = True
active_by_default = has_nodriver
default_model = list(text_models.keys())[0] default_model = list(text_models.keys())[0]
models = list(text_models) + list(image_models) models = list(text_models) + list(image_models)

View file

@ -37,7 +37,7 @@ class HuggingChat(AsyncAuthedProvider, ProviderModelMixin):
origin = f"https://{domain}" origin = f"https://{domain}"
url = f"{origin}/chat" url = f"{origin}/chat"
working = True working = False
use_nodriver = True use_nodriver = True
supports_stream = True supports_stream = True
needs_auth = True needs_auth = True

View file

@ -22,7 +22,6 @@ class HuggingFaceMedia(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://huggingface.co" url = "https://huggingface.co"
working = True working = True
needs_auth = True needs_auth = True
model_aliases = image_model_aliases model_aliases = image_model_aliases
tasks = ["text-to-image", "text-to-video"] tasks = ["text-to-image", "text-to-video"]

View file

@ -17,7 +17,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://huggingface.co" url = "https://huggingface.co"
login_url = "https://huggingface.co/settings/tokens" login_url = "https://huggingface.co/settings/tokens"
working = True working = True
supports_message_history = True active_by_default = True
@classmethod @classmethod
def get_models(cls, **kwargs) -> list[str]: def get_models(cls, **kwargs) -> list[str]:

View file

@ -100,7 +100,7 @@ class Api:
"vision": getattr(provider, "default_vision_model", None) is not None, "vision": getattr(provider, "default_vision_model", None) is not None,
"nodriver": getattr(provider, "use_nodriver", False), "nodriver": getattr(provider, "use_nodriver", False),
"hf_space": getattr(provider, "hf_space", False), "hf_space": getattr(provider, "hf_space", False),
"active_by_default": not provider.needs_auth if provider.active_by_default is None else provider.active_by_default, "active_by_default": False if provider.active_by_default is None else provider.active_by_default,
"auth": provider.needs_auth, "auth": provider.needs_auth,
"login_url": getattr(provider, "login_url", None), "login_url": getattr(provider, "login_url", None),
} for provider in Provider.__providers__ if provider.working and safe_get_models(provider)] } for provider in Provider.__providers__ if provider.working and safe_get_models(provider)]

File diff suppressed because one or more lines are too long

View file

@ -12,7 +12,7 @@ from ..Provider.hf_space import HuggingSpace
from ..Provider import Cloudflare, Gemini, GeminiPro, Grok, DeepSeekAPI, PerplexityLabs, LambdaChat, PollinationsAI, PuterJS from ..Provider import Cloudflare, Gemini, GeminiPro, Grok, DeepSeekAPI, PerplexityLabs, LambdaChat, PollinationsAI, PuterJS
from ..Provider import Microsoft_Phi_4_Multimodal, DeepInfraChat, Blackbox, OIVSCodeSer0501, OIVSCodeSer2, TeachAnything, OperaAria, Startnest from ..Provider import Microsoft_Phi_4_Multimodal, DeepInfraChat, Blackbox, OIVSCodeSer0501, OIVSCodeSer2, TeachAnything, OperaAria, Startnest
from ..Provider import Together, WeWordle, Yqcloud, Chatai, ImageLabs, LegacyLMArena, LMArenaBeta, Free2GPT from ..Provider import Together, WeWordle, Yqcloud, Chatai, ImageLabs, LegacyLMArena, LMArenaBeta, Free2GPT
from ..Provider import EdgeTTS, gTTS, MarkItDown, OpenAIFM, Video from ..Provider import EdgeTTS, gTTS, MarkItDown, OpenAIFM, PenguinAI
from ..Provider import HarProvider, HuggingFace, HuggingFaceMedia from ..Provider import HarProvider, HuggingFace, HuggingFaceMedia
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .. import Provider from .. import Provider
@ -24,7 +24,7 @@ PROVIERS_LIST_1 = [
CopilotAccount, OpenaiChat, Cloudflare, PerplexityLabs, Gemini, Grok, DeepSeekAPI, Blackbox, OpenAIFM, CopilotAccount, OpenaiChat, Cloudflare, PerplexityLabs, Gemini, Grok, DeepSeekAPI, Blackbox, OpenAIFM,
OIVSCodeSer2, OIVSCodeSer0501, TeachAnything, WeWordle, Yqcloud, Chatai, Free2GPT, ImageLabs, OIVSCodeSer2, OIVSCodeSer0501, TeachAnything, WeWordle, Yqcloud, Chatai, Free2GPT, ImageLabs,
# Has lazy loading model lists # Has lazy loading model lists
PollinationsAI, HarProvider, LegacyLMArena, LMArenaBeta, LambdaChat, DeepInfraChat, PollinationsAI, HarProvider, LegacyLMArena, LMArenaBeta, LambdaChat, DeepInfraChat, PenguinAI,
HuggingSpace, HuggingFace, HuggingFaceMedia, GeminiPro, Together, PuterJS, OperaAria, Startnest HuggingSpace, HuggingFace, HuggingFaceMedia, GeminiPro, Together, PuterJS, OperaAria, Startnest
] ]
@ -342,6 +342,7 @@ class AnyModelProviderMixin(ProviderModelMixin):
class AnyProvider(AsyncGeneratorProvider, AnyModelProviderMixin): class AnyProvider(AsyncGeneratorProvider, AnyModelProviderMixin):
working = True working = True
active_by_default = True
@classmethod @classmethod
async def create_async_generator( async def create_async_generator(