mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
AI Provider and Model Updates: Adding New, Removing Deprecated, and Enhancing Functionality (#2739)
* docs(docs/providers-and-models.md): Update provider listings and model information * feat(g4f/models.py): update model configurations and expand provider support * fix(g4f/gui/client/static/js/chat.v1.js): correct provider checkbox initialization logic * feat(g4f/Provider/Blackbox.py): update model configurations and premium handling * feat(g4f/Provider/ChatGLM.py): add finish reason handling and update default model * chore(g4f/Provider/DDG.py): Reorder model entries for consistency * feat(g4f/Provider/ImageLabs.py): Update default image model to sdxl-turbo * feat(g4f/Provider/Liaobots.py): update supported model configurations and aliases * feat(g4f/Provider/OIVSCode.py): Update API endpoint and expand model support * fix(g4f/Provider/needs_auth/CablyAI.py): Enforce authentication requirement * Removed the provider (g4f/Provider/BlackboxAPI.py) * fix(g4f/providers/base_provider.py): improve cache validation in AsyncAuthedProvider * Update g4f/models.py * fix(g4f/Provider/Liaobots.py): remove deprecated Gemini model aliases * chore(g4f/models.py): Remove Grok-2 and update Gemini provider configurations * chore(docs/providers-and-models.md): Remove deprecated Grok models from provider listings * New provider added (g4f/Provider/AllenAI.py) * feat(g4f/models.py): Add Ai2 models and update provider references * feat(docs/providers-and-models.md): update providers and models documentation * fix(g4f/models.py): update experimental model provider configuration * fix(g4f/Provider/PollinationsImage.py): Initialize image_models list and update label * fix(g4f/Provider/PollinationsAI.py): Resolve model initialization and alias conflicts * refactor(g4f/Provider/PollinationsAI.py): improve model initialization and error handling * refactor(g4f/Provider/PollinationsImage.py): Improve model synchronization and initialization * Update g4f/Provider/AllenAI.py --------- Co-authored-by: kqlio67 <>
This commit is contained in:
parent
f23f66518b
commit
07a8dfdff7
15 changed files with 422 additions and 238 deletions
|
|
@ -5,8 +5,8 @@ from dataclasses import dataclass
|
|||
from .Provider import IterListProvider, ProviderType
|
||||
from .Provider import (
|
||||
### no auth required ###
|
||||
AllenAI,
|
||||
Blackbox,
|
||||
BlackboxAPI,
|
||||
ChatGLM,
|
||||
ChatGptEs,
|
||||
Cloudflare,
|
||||
|
|
@ -115,14 +115,14 @@ default_vision = Model(
|
|||
gpt_4 = Model(
|
||||
name = 'gpt-4',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([Blackbox, DDG, Jmuz, ChatGptEs, PollinationsAI, Yqcloud, Copilot, OpenaiChat, Liaobots, Mhystical])
|
||||
best_provider = IterListProvider([DDG, Jmuz, ChatGptEs, PollinationsAI, Yqcloud, Copilot, OpenaiChat, Liaobots, Mhystical])
|
||||
)
|
||||
|
||||
# gpt-4o
|
||||
gpt_4o = VisionModel(
|
||||
name = 'gpt-4o',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([Blackbox, Jmuz, ChatGptEs, PollinationsAI, Copilot, Liaobots, OpenaiChat])
|
||||
best_provider = IterListProvider([Jmuz, ChatGptEs, PollinationsAI, Copilot, Liaobots, OpenaiChat])
|
||||
)
|
||||
|
||||
gpt_4o_mini = Model(
|
||||
|
|
@ -144,17 +144,11 @@ o1_preview = Model(
|
|||
best_provider = Liaobots
|
||||
)
|
||||
|
||||
o1_mini = Model(
|
||||
name = 'o1-mini',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = Liaobots
|
||||
)
|
||||
|
||||
# o3
|
||||
o3_mini = Model(
|
||||
name = 'o3-mini',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([DDG, Blackbox])
|
||||
best_provider = IterListProvider([DDG, Liaobots])
|
||||
)
|
||||
|
||||
### GigaChat ###
|
||||
|
|
@ -268,14 +262,14 @@ mixtral_small_24b = Model(
|
|||
mixtral_small_28b = Model(
|
||||
name = "mixtral-small-28b",
|
||||
base_provider = "Mistral",
|
||||
best_provider = IterListProvider([Blackbox, BlackboxAPI, DeepInfraChat])
|
||||
best_provider = IterListProvider([Blackbox, DeepInfraChat])
|
||||
)
|
||||
|
||||
### NousResearch ###
|
||||
hermes_2_dpo = Model(
|
||||
name = "hermes-2-dpo",
|
||||
base_provider = "NousResearch",
|
||||
best_provider = IterListProvider([Blackbox, BlackboxAPI])
|
||||
best_provider = Blackbox
|
||||
)
|
||||
|
||||
### Microsoft ###
|
||||
|
|
@ -324,13 +318,13 @@ gemini_exp = Model(
|
|||
gemini_1_5_flash = Model(
|
||||
name = 'gemini-1.5-flash',
|
||||
base_provider = 'Google DeepMind',
|
||||
best_provider = IterListProvider([Blackbox, Jmuz, GeminiPro, Liaobots])
|
||||
best_provider = IterListProvider([Blackbox, Jmuz, GeminiPro])
|
||||
)
|
||||
|
||||
gemini_1_5_pro = Model(
|
||||
name = 'gemini-1.5-pro',
|
||||
base_provider = 'Google DeepMind',
|
||||
best_provider = IterListProvider([Blackbox, Jmuz, GeminiPro, Liaobots])
|
||||
best_provider = IterListProvider([Jmuz, GeminiPro])
|
||||
)
|
||||
|
||||
# gemini-2.0
|
||||
|
|
@ -449,7 +443,7 @@ qwen_2_5_1m = Model(
|
|||
qwq_32b = Model(
|
||||
name = 'qwq-32b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = IterListProvider([Blackbox, BlackboxAPI, Jmuz, HuggingChat])
|
||||
best_provider = IterListProvider([Blackbox, Jmuz, HuggingChat])
|
||||
)
|
||||
qvq_72b = VisionModel(
|
||||
name = 'qvq-72b',
|
||||
|
|
@ -468,19 +462,19 @@ pi = Model(
|
|||
deepseek_chat = Model(
|
||||
name = 'deepseek-chat',
|
||||
base_provider = 'DeepSeek',
|
||||
best_provider = IterListProvider([Blackbox, BlackboxAPI, Jmuz, PollinationsAI])
|
||||
best_provider = IterListProvider([Blackbox, Jmuz, PollinationsAI])
|
||||
)
|
||||
|
||||
deepseek_v3 = Model(
|
||||
name = 'deepseek-v3',
|
||||
base_provider = 'DeepSeek',
|
||||
best_provider = IterListProvider([Blackbox, BlackboxAPI, DeepInfraChat, Liaobots])
|
||||
best_provider = IterListProvider([Blackbox, DeepInfraChat, OIVSCode, Liaobots])
|
||||
)
|
||||
|
||||
deepseek_r1 = Model(
|
||||
name = 'deepseek-r1',
|
||||
base_provider = 'DeepSeek',
|
||||
best_provider = IterListProvider([Blackbox, BlackboxAPI, DeepInfraChat, Glider, PollinationsAI, Jmuz, Liaobots, HuggingChat, HuggingFace])
|
||||
best_provider = IterListProvider([Blackbox, DeepInfraChat, Glider, PollinationsAI, Jmuz, Liaobots, HuggingChat, HuggingFace])
|
||||
)
|
||||
|
||||
janus_pro_7b = VisionModel(
|
||||
|
|
@ -490,8 +484,14 @@ janus_pro_7b = VisionModel(
|
|||
)
|
||||
|
||||
### x.ai ###
|
||||
grok_2 = Model(
|
||||
name = 'grok-2',
|
||||
grok_3 = Model(
|
||||
name = 'grok-3',
|
||||
base_provider = 'x.ai',
|
||||
best_provider = Liaobots
|
||||
)
|
||||
|
||||
grok_3_r1 = Model(
|
||||
name = 'grok-3-r1',
|
||||
base_provider = 'x.ai',
|
||||
best_provider = Liaobots
|
||||
)
|
||||
|
|
@ -521,6 +521,12 @@ sonar_reasoning_pro = Model(
|
|||
best_provider = PerplexityLabs
|
||||
)
|
||||
|
||||
r1_1776 = Model(
|
||||
name = 'r1-1776',
|
||||
base_provider = 'Perplexity AI',
|
||||
best_provider = PerplexityLabs
|
||||
)
|
||||
|
||||
### Nvidia ###
|
||||
nemotron_70b = Model(
|
||||
name = 'nemotron-70b',
|
||||
|
|
@ -532,7 +538,7 @@ nemotron_70b = Model(
|
|||
dbrx_instruct = Model(
|
||||
name = 'dbrx-instruct',
|
||||
base_provider = 'Databricks',
|
||||
best_provider = IterListProvider([Blackbox, BlackboxAPI, DeepInfraChat])
|
||||
best_provider = IterListProvider([Blackbox, DeepInfraChat])
|
||||
)
|
||||
|
||||
### THUDM ###
|
||||
|
|
@ -590,6 +596,37 @@ minicpm_2_5 = Model(
|
|||
best_provider = DeepInfraChat
|
||||
)
|
||||
|
||||
### Ai2 ###
|
||||
tulu_3_405b = Model(
|
||||
name = "tulu-3-405b",
|
||||
base_provider = "Ai2",
|
||||
best_provider = AllenAI
|
||||
)
|
||||
|
||||
olmo_2_13b = Model(
|
||||
name = "olmo-2-13b",
|
||||
base_provider = "Ai2",
|
||||
best_provider = AllenAI
|
||||
)
|
||||
|
||||
tulu_3_1_8b = Model(
|
||||
name = "tulu-3-1-8b",
|
||||
base_provider = "Ai2",
|
||||
best_provider = AllenAI
|
||||
)
|
||||
|
||||
tulu_3_70b = Model(
|
||||
name = "tulu-3-70b",
|
||||
base_provider = "Ai2",
|
||||
best_provider = AllenAI
|
||||
)
|
||||
|
||||
olmoe_0125 = Model(
|
||||
name = "olmoe-0125",
|
||||
base_provider = "Ai2",
|
||||
best_provider = AllenAI
|
||||
)
|
||||
|
||||
### Uncensored AI ###
|
||||
evil = Model(
|
||||
name = 'evil',
|
||||
|
|
@ -678,7 +715,6 @@ class ModelUtils:
|
|||
# o1
|
||||
o1.name: o1,
|
||||
o1_preview.name: o1_preview,
|
||||
o1_mini.name: o1_mini,
|
||||
|
||||
# o3
|
||||
o3_mini.name: o3_mini,
|
||||
|
|
@ -776,13 +812,14 @@ class ModelUtils:
|
|||
pi.name: pi,
|
||||
|
||||
### x.ai ###
|
||||
grok_2.name: grok_2,
|
||||
grok_3.name: grok_3,
|
||||
|
||||
### Perplexity AI ###
|
||||
sonar.name: sonar,
|
||||
sonar_pro.name: sonar_pro,
|
||||
sonar_reasoning.name: sonar_reasoning,
|
||||
sonar_reasoning_pro.name: sonar_reasoning_pro,
|
||||
r1_1776.name: r1_1776,
|
||||
|
||||
### DeepSeek ###
|
||||
deepseek_chat.name: deepseek_chat,
|
||||
|
|
@ -803,6 +840,13 @@ class ModelUtils:
|
|||
lzlv_70b.name: lzlv_70b, ### Lizpreciatior ###
|
||||
minicpm_2_5.name: minicpm_2_5, ### OpenBMB ###
|
||||
|
||||
### Ai2 ###
|
||||
tulu_3_405b.name: tulu_3_405b,
|
||||
olmo_2_13b.name: olmo_2_13b,
|
||||
tulu_3_1_8b.name: tulu_3_1_8b,
|
||||
tulu_3_70b.name: tulu_3_70b,
|
||||
olmoe_0125.name: olmoe_0125,
|
||||
|
||||
evil.name: evil, ### Uncensored AI ###
|
||||
|
||||
#############
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue