mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
chore: temporarily disable some new providers
This commit is contained in:
parent
69d0d2b29b
commit
3119b8e37e
19 changed files with 91 additions and 508 deletions
|
|
@ -21,7 +21,6 @@
|
|||
|[openchat.team](https://openchat.team/)|`g4f.Provider.Aura`|✔|❌|❌|?||❌|
|
||||
|[bing.com](https://bing.com/chat)|`g4f.Provider.Bing`|`gpt-4`|✔|`gpt-4-vision`|✔||❌+✔|
|
||||
|[bing.com/images](https://www.bing.com/images/create)|`g4f.Provider.BingCreateImages`|`❌|✔|❌|❌||✔|
|
||||
|[chat18.aichatos8.com](https://chat18.aichatos8.com)|`g4f.Provider.Binjie`|`gpt-4`|❌|❌|✔||❌|
|
||||
|[blackbox.ai](https://www.blackbox.ai)|`g4f.Provider.Blackbox`|`blackboxai, blackboxai-pro, gemini-flash, llama-3.1-8b, llama-3.1-70b, gpt-4o, gemini-pro, claude-3.5-sonnet`|`flux`|✔|✔||❌|
|
||||
|[chatgot.one](https://www.chatgot.one/)|`g4f.Provider.ChatGot`|`gemini-pro`|❌|❌|✔||❌|
|
||||
|[chatgpt.com](https://chatgpt.com)|`g4f.Provider.ChatGpt`|`?`|`?`|`?`|?| |❌|
|
||||
|
|
@ -30,12 +29,12 @@
|
|||
|[chatgpt4o.one](https://chatgpt4o.one)|`g4f.Provider.Chatgpt4o`|✔|❌|❌|❌||❌|
|
||||
|[chatgptfree.ai](https://chatgptfree.ai)|`g4f.Provider.ChatgptFree`|`gpt-4o-mini`|❌|❌|?||❌|
|
||||
|[app.chathub.gg](https://app.chathub.gg)|`g4f.Provider.ChatHub`|`llama-3.1-8b, mixtral-8x7b, gemma-2, sonar-online`|❌|❌|✔||❌|
|
||||
|[chatify-ai.vercel.app](https://chatify-ai.vercel.app)|`g4f.Provider.ChatifyAI`|`llama-3.1`|❌|❌|✔||❌|
|
||||
|[chatify-ai.vercel.app](https://chatify-ai.vercel.app)|`g4f.Provider.ChatifyAI`|`llama-3.1-8b`|❌|❌|✔||❌|
|
||||
|[playground.ai.cloudflare.com](https://playground.ai.cloudflare.com)|`g4f.Provider.Cloudflare`|`german-7b, gemma-7b, llama-2-7b, llama-3-8b, llama-3.1-8b, llama-3.2-11b, llama-3.2-1b, llama-3.2-3b, mistral-7b, openchat-3.5, phi-2, qwen-1.5-0.5b, qwen-1.5-1.8b, qwen-1.5-14b, qwen-1.5-7b, tinyllama-1.1b, cybertron-7b`|❌|❌|✔||❌|
|
||||
|[aiuncensored.info](https://www.aiuncensored.info)|`g4f.Provider.DarkAI`|`gpt-4o, gpt-3.5-turbo, llama-3-70b, llama-3-405b`|❌|❌|✔||❌|
|
||||
|[duckduckgo.com](https://duckduckgo.com/duckchat/v1/chat)|`g4f.Provider.DDG`|`gpt-4o-mini, claude-3-haiku, llama-3.1-70b, mixtral-8x7b`|❌|❌|✔||❌|
|
||||
|[deepinfra.com](https://deepinfra.com)|`g4f.Provider.DeepInfra`|✔|❌|❌|✔||✔|
|
||||
|[deepinfra.com/chat](https://deepinfra.com/chat)|`g4f.Provider.DeepInfraChat`|`llama-3.1-405b, llama-3.1-70b, Llama-3.1-8B, mixtral-8x22b, mixtral-8x7b, wizardlm-2-8x22b, wizardlm-2-7b, qwen-2-72b, phi-3-medium-4k, gemma-2b-27b, minicpm-llama-3-v2.5, mistral-7b, lzlv_70b, openchat-3.6-8b, phind-codellama-34b-v2, dolphin-2.9.1-llama-3-70b`|❌|`minicpm-llama-3-v2.5`|❌||❌|
|
||||
|[deepinfra.com/chat](https://deepinfra.com/chat)|`g4f.Provider.DeepInfraChat`|`llama-3.1-405b, llama-3.1-70b, llama-3.1-8B, mixtral-8x22b, mixtral-8x7b, wizardlm-2-8x22b, wizardlm-2-7b, qwen-2-72b, phi-3-medium-4k, gemma-2b-27b, minicpm-llama-3-v2.5, mistral-7b, lzlv_70b, openchat-3.6-8b, phind-codellama-34b-v2, dolphin-2.9.1-llama-3-70b`|❌|`minicpm-llama-3-v2.5`|❌||❌|
|
||||
|[deepinfra.com](https://deepinfra.com)|`g4f.Provider.DeepInfraImage`|❌|✔|❌|❌||✔|
|
||||
|[flowgpt.com](https://flowgpt.com/chat)|`g4f.Provider.FlowGpt`|✔||❌|✔||❌|
|
||||
|[chat10.free2gpt.xyz](chat10.free2gpt.xyz)|`g4f.Provider.Free2GPT`|`llama-3.1-70b`|❌|❌|✔||❌|
|
||||
|
|
@ -51,12 +50,10 @@
|
|||
|[huggingface.co](https://huggingface.co/chat)|`g4f.Provider.HuggingFace`|✔|❌|❌|✔||❌|
|
||||
|[koala.sh/chat](https://koala.sh/chat)|`g4f.Provider.Koala`|`gpt-4o-mini`|❌|❌|?||❌|
|
||||
|[liaobots.work](https://liaobots.work)|`g4f.Provider.Liaobots`|`gpt-3.5-turbo, gpt-4o-mini, gpt-4o, gpt-4-turbo, grok-2, grok-2-mini, claude-3-opus, claude-3-sonnet, claude-3-5-sonnet, claude-3-haiku, claude-2.1, gemini-flash, gemini-pro`|❌|❌|✔||❌|
|
||||
|[lite.icoding.ink](https://lite.icoding.ink)|`g4f.Provider.LiteIcoding`|`gpt-4o-mini, gpt-4-turbo, claude-3, claude-3.5, gemini-pro`|❌|❌|❌||❌|
|
||||
|[magickpen.com](https://magickpen.com)|`g4f.Provider.MagickPen`|`gpt-4o-mini`|❌|❌|✔||❌|
|
||||
|[meta.ai](https://www.meta.ai)|`g4f.Provider.MetaAI`|✔|✔|?|?||✔|
|
||||
|[app.myshell.ai/chat](https://app.myshell.ai/chat)|`g4f.Provider.MyShell`|✔|❌|?|?||❌|
|
||||
|[nexra.aryahcr.cc/midjourney](https://nexra.aryahcr.cc/documentation/midjourney/en)|`g4f.Provider.NexraAnimagineXL`|❌ |`animagine-xl`|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/bing](https://nexra.aryahcr.cc/documentation/bing/en)|`g4f.Provider.NexraBing`|`gpt-4` |❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/bing](https://nexra.aryahcr.cc/documentation/bing/en)|`g4f.Provider.NexraBing`|✔|❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/blackbox](https://nexra.aryahcr.cc/documentation/blackbox/en)|`g4f.Provider.NexraBlackbox`|`blackboxai` |❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/chatgpt](https://nexra.aryahcr.cc/documentation/chatgpt/en)|`g4f.Provider.NexraChatGPT`|`gpt-4, gpt-3.5-turbo, gpt-3` |❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/chatgpt](https://nexra.aryahcr.cc/documentation/chatgpt/en)|`g4f.Provider.NexraChatGPT4o`|`gpt-4o` |❌|❌|✔||❌|
|
||||
|
|
@ -67,16 +64,16 @@
|
|||
|[nexra.aryahcr.cc/dall-e](https://nexra.aryahcr.cc/documentation/dall-e/en)|`g4f.Provider.NexraDalleMini`|❌ |`dalle-mini`|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/emi](https://nexra.aryahcr.cc/documentation/emi/en)|`g4f.Provider.NexraEmi`|❌ |`emi`|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/flux-pro](https://nexra.aryahcr.cc/documentation/flux-pro/en)|`g4f.Provider.NexraFluxPro`|❌ |`flux-pro`|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/gemini-pro](https://nexra.aryahcr.cc/documentation/gemini-pro/en)|`g4f.Provider.NexraGeminiPro`|`gemini-pro`|❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/llama-3.1](https://nexra.aryahcr.cc/documentation/llama-3.1/en)|`g4f.Provider.NexraLLaMA31`|`llama-3.1`|❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/midjourney](https://nexra.aryahcr.cc/documentation/midjourney/en)|`g4f.Provider.NexraMidjourney`|❌|`midjourney`|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/prodia](https://nexra.aryahcr.cc/documentation/prodia/en)|`g4f.Provider.NexraProdiaAI`|❌|✔|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/gemini-pro](https://nexra.aryahcr.cc/documentation/gemini-pro/en)|`g4f.Provider.NexraGeminiPro`|✔|❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/llama-3.1](https://nexra.aryahcr.cc/documentation/llama-3.1/en)|`g4f.Provider.NexraLLaMA31`|`llama-3.1-8b`|❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/midjourney](https://nexra.aryahcr.cc/documentation/midjourney/en)|`g4f.Provider.NexraMidjourney`|❌|✔|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/prodia](https://nexra.aryahcr.cc/documentation/prodia/en)|`g4f.Provider.NexraProdiaAI`|❌|✔|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/qwen](https://nexra.aryahcr.cc/documentation/qwen/en)|`g4f.Provider.NexraQwen`|`qwen`|❌|❌|✔||❌|
|
||||
|[nexra.aryahcr.cc/qwen](https://nexra.aryahcr.cc/documentation/qwen/en)|`g4f.Provider.NexraQwen`|`qwen`|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSD15`|`sd-1.5`|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSD21`|`sd-2.1`|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSDLora`|`sdxl-lora`|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSDTurbo`|`sdxl-turbo`|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSD15`|✔|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSD21`|✔|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSDLora`|✔|❌|❌|✔||
|
||||
|[nexra.aryahcr.cc/stable-diffusion](https://nexra.aryahcr.cc/documentation/stable-diffusion/en)|`g4f.Provider.NexraSDTurbo`|✔|❌|❌|✔||
|
||||
|[openrouter.ai](https://openrouter.ai)|`g4f.Provider.OpenRouter`|✔|❌|?|?||❌|
|
||||
|[platform.openai.com](https://platform.openai.com/)|`g4f.Provider.Openai`|✔|❌|✔|||✔|
|
||||
|[chatgpt.com](https://chatgpt.com/)|`g4f.Provider.OpenaiChat`|`gpt-4o, gpt-4o-mini, gpt-4`|❌|✔|||✔|
|
||||
|
|
@ -109,19 +106,17 @@
|
|||
|--|--|--|-|
|
||||
|gpt-3|OpenAI|1+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-base)|
|
||||
|gpt-3.5-turbo|OpenAI|5+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-3-5-turbo)|
|
||||
|gpt-4|OpenAI|32+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4)|
|
||||
|gpt-4-turbo|OpenAI|3+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4)|
|
||||
|gpt-4o|OpenAI|7+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4o)|
|
||||
|gpt-4o-mini|OpenAI|14+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4o-mini)|
|
||||
|gpt-4|OpenAI|8+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4)|
|
||||
|gpt-4-turbo|OpenAI|2+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4)|
|
||||
|gpt-4o|OpenAI|6+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4o)|
|
||||
|gpt-4o-mini|OpenAI|13+ Providers|[platform.openai.com](https://platform.openai.com/docs/models/gpt-4o-mini)|
|
||||
|o1|OpenAI|1+ Providers|[platform.openai.com](https://openai.com/index/introducing-openai-o1-preview/)|
|
||||
|o1-mini|OpenAI|1+ Providers|[platform.openai.com](https://openai.com/index/openai-o1-mini-advancing-cost-efficient-reasoning/)|
|
||||
|llama-2-7b|Meta Llama|1+ Providers|[huggingface.co](https://huggingface.co/meta-llama/Llama-2-7b)|
|
||||
|llama-2-13b|Meta Llama|1+ Providers|[llama.com](https://www.llama.com/llama2/)|
|
||||
|llama-3|Meta Llama|7+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3/)|
|
||||
|llama-3-8b|Meta Llama|4+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3/)|
|
||||
|llama-3-70b|Meta Llama|4+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3/)|
|
||||
|llama-3.1|Meta Llama|21+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3-1/)|
|
||||
|llama-3.1-8b|Meta Llama|6+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3-1/)|
|
||||
|llama-3.1-8b|Meta Llama|7+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3-1/)|
|
||||
|llama-3.1-70b|Meta Llama|11+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3-1/)|
|
||||
|llama-3.1-405b|Meta Llama|5+ Providers|[ai.meta.com](https://ai.meta.com/blog/meta-llama-3-1/)|
|
||||
|llama-3.2-1b|Meta Llama|1+ Providers|[huggingface.co](https://huggingface.co/meta-llama/Llama-3.2-1B)|
|
||||
|
|
@ -137,22 +132,19 @@
|
|||
|mixtral-8x7b-dpo|NousResearch|1+ Providers|[huggingface.co](https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO)|
|
||||
|yi-34b|NousResearch|1+ Providers|[huggingface.co](https://huggingface.co/NousResearch/Nous-Hermes-2-Yi-34B)|
|
||||
|hermes-3|NousResearch|1+ Providers|[huggingface.co](https://huggingface.co/NousResearch/Hermes-3-Llama-3.1-8B)|
|
||||
|gemini|Google DeepMind|13+ Providers|[deepmind.google](http://deepmind.google/technologies/gemini/)|
|
||||
|gemini|Google DeepMind|1+ Providers|[deepmind.google](http://deepmind.google/technologies/gemini/)|
|
||||
|gemini-flash|Google DeepMind|3+ Providers|[deepmind.google](https://deepmind.google/technologies/gemini/flash/)|
|
||||
|gemini-pro|Google DeepMind|9+ Providers|[deepmind.google](https://deepmind.google/technologies/gemini/pro/)|
|
||||
|gemini-pro|Google DeepMind|8+ Providers|[deepmind.google](https://deepmind.google/technologies/gemini/pro/)|
|
||||
|gemma-2b|Google|5+ Providers|[huggingface.co](https://huggingface.co/google/gemma-2b)|
|
||||
|gemma-2b-9b|Google|1+ Providers|[huggingface.co](https://huggingface.co/google/gemma-2-9b)|
|
||||
|gemma-2b-27b|Google|2+ Providers|[huggingface.co](https://huggingface.co/google/gemma-2-27b)|
|
||||
|gemma-7b|Google|1+ Providers|[huggingface.co](https://huggingface.co/google/gemma-7b)|
|
||||
|gemma-2|Google|2+ Providers|[huggingface.co](https://huggingface.co/blog/gemma2)|
|
||||
|gemma_2_27b|Google|1+ Providers|[huggingface.co](https://huggingface.co/blog/gemma2)|
|
||||
|claude-2|Anthropic|2+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-2)|
|
||||
|claude-2.1|Anthropic|1+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-2)|
|
||||
|claude-3|Anthropic|7+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-3-family)|
|
||||
|claude-3-haiku|Anthropic|3+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-3-haiku)|
|
||||
|claude-3-sonnet|Anthropic|2+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-3-family)|
|
||||
|claude-3-opus|Anthropic|2+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-3-family)|
|
||||
|claude-3.5|Anthropic|5+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-3-5-sonnet)|
|
||||
|claude-3.5-sonnet|Anthropic|4+ Providers|[anthropic.com](https://www.anthropic.com/news/claude-3-5-sonnet)|
|
||||
|blackboxai|Blackbox AI|2+ Providers|[docs.blackbox.chat](https://docs.blackbox.chat/blackbox-ai-1)|
|
||||
|blackboxai-pro|Blackbox AI|1+ Providers|[docs.blackbox.chat](https://docs.blackbox.chat/blackbox-ai-1)|
|
||||
|
|
@ -163,7 +155,7 @@
|
|||
|dbrx-instruct|Databricks|1+ Providers|[huggingface.co](https://huggingface.co/databricks/dbrx-instruct)|
|
||||
|command-r-plus|CohereForAI|1+ Providers|[docs.cohere.com](https://docs.cohere.com/docs/command-r-plus)|
|
||||
|sparkdesk-v1.1|iFlytek|1+ Providers|[xfyun.cn](https://www.xfyun.cn/doc/spark/Guide.html)|
|
||||
|qwen|Qwen|10+ Providers|[huggingface.co](https://huggingface.co/Qwen)|
|
||||
|qwen|Qwen|1+ Providers|[huggingface.co](https://huggingface.co/Qwen)|
|
||||
|qwen-1.5-0.5b|Qwen|1+ Providers|[huggingface.co](https://huggingface.co/Qwen/Qwen1.5-0.5B)|
|
||||
|qwen-1.5-7b|Qwen|2+ Providers|[huggingface.co](https://huggingface.co/Qwen/Qwen1.5-7B)|
|
||||
|qwen-1.5-14b|Qwen|3+ Providers|[huggingface.co](https://huggingface.co/Qwen/Qwen1.5-14B)|
|
||||
|
|
@ -201,12 +193,7 @@
|
|||
### ImageModel
|
||||
|Model|Base Provider|Provider|Website|
|
||||
|--|--|--|-|
|
||||
|sdxl|Stability AI|1+ Providers|[huggingface.co](https://huggingface.co/docs/diffusers/en/using-diffusers/sdxl)|
|
||||
|sdxl-lora|Stability AI|1+ Providers|[huggingface.co](https://huggingface.co/blog/lora)|
|
||||
|sdxl-turbo|Stability AI|1+ Providers|[huggingface.co](https://huggingface.co/stabilityai/sdxl-turbo)|
|
||||
|sd|Stability AI|3+ Providers|[huggingface.co](https://huggingface.co/stabilityai/)|
|
||||
|sd-1.5|Stability AI|1+ Providers|[github.com](https://github.com/Stability-AI/StableDiffusion)|
|
||||
|sd-2.1|Stability AI|1+ Providers|[huggingface.co](https://huggingface.co/stabilityai/stable-diffusion-2-1)|
|
||||
|sdxl|Stability AI|3+ Providers|[huggingface.co](https://huggingface.co/docs/diffusers/en/using-diffusers/sdxl)|
|
||||
|sd-3|Stability AI|1+ Providers|[huggingface.co](https://huggingface.co/docs/diffusers/main/en/api/pipelines/stable_diffusion/stable_diffusion_3)|
|
||||
|playground-v2.5|Playground AI|1+ Providers|[huggingface.co](https://huggingface.co/playgroundai/playground-v2.5-1024px-aesthetic)|
|
||||
|flux|Black Forest Labs|2+ Providers|[github.com/black-forest-labs/flux](https://github.com/black-forest-labs/flux)|
|
||||
|
|
@ -218,11 +205,9 @@
|
|||
|flux-pixel|Flux AI|1+ Providers|[]()|
|
||||
|flux-4o|Flux AI|1+ Providers|[]()|
|
||||
|flux-schnell|Black Forest Labs|1+ Providers|[huggingface.co](https://huggingface.co/black-forest-labs/FLUX.1-schnell)|
|
||||
|dalle|OpenAI|4+ Providers|[openai.com](https://openai.com/index/dall-e/)|
|
||||
|dalle|OpenAI|1+ Providers|[openai.com](https://openai.com/index/dall-e/)|
|
||||
|dalle-2|OpenAI|1+ Providers|[openai.com](https://openai.com/index/dall-e-2/)|
|
||||
|dalle-3|OpenAI|2+ Providers|[openai.com](https://openai.com/index/dall-e-3/)|
|
||||
|dalle-mini||1+ Providers|[huggingface.co](https://huggingface.co/dalle-mini/dalle-mini)|
|
||||
|animagine-xl|Cagliostro Research Lab|1+ Providers|[huggingface.co](https://huggingface.co/cagliostrolab/animagine-xl-3.1)|
|
||||
|midjourney|Midjourney|1+ Providers|[docs.midjourney.com](https://docs.midjourney.com/docs/model-versions)|
|
||||
|emi||1+ Providers|[]()|
|
||||
|any-dark||1+ Providers|[]()|
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class Airforce(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-0125',
|
||||
'gpt-3.5-turbo-1106',
|
||||
'llama-3-70b-chat',
|
||||
default_model,
|
||||
'llama-3-70b-chat-turbo',
|
||||
'llama-3-8b-chat',
|
||||
'llama-3-8b-chat-turbo',
|
||||
|
|
|
|||
|
|
@ -1,65 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
from ..requests import StreamSession
|
||||
|
||||
from ..typing import AsyncResult, Messages
|
||||
from .base_provider import AsyncGeneratorProvider, format_prompt
|
||||
|
||||
|
||||
class Binjie(AsyncGeneratorProvider):
|
||||
url = "https://chat18.aichatos8.com"
|
||||
working = True
|
||||
supports_gpt_4 = True
|
||||
supports_stream = True
|
||||
supports_system_message = True
|
||||
supports_message_history = True
|
||||
|
||||
@staticmethod
|
||||
async def create_async_generator(
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
timeout: int = 120,
|
||||
**kwargs,
|
||||
) -> AsyncResult:
|
||||
async with StreamSession(
|
||||
headers=_create_header(), proxies={"https": proxy}, timeout=timeout
|
||||
) as session:
|
||||
payload = _create_payload(messages, **kwargs)
|
||||
async with session.post("https://api.binjie.fun/api/generateStream", json=payload) as response:
|
||||
response.raise_for_status()
|
||||
async for chunk in response.iter_content():
|
||||
if chunk:
|
||||
chunk = chunk.decode()
|
||||
if "sorry, 您的ip已由于触发防滥用检测而被封禁" in chunk:
|
||||
raise RuntimeError("IP address is blocked by abuse detection.")
|
||||
yield chunk
|
||||
|
||||
|
||||
def _create_header():
|
||||
return {
|
||||
"accept" : "application/json, text/plain, */*",
|
||||
"content-type" : "application/json",
|
||||
"origin" : "https://chat18.aichatos8.com",
|
||||
"referer" : "https://chat18.aichatos8.com/"
|
||||
}
|
||||
|
||||
|
||||
def _create_payload(
|
||||
messages: Messages,
|
||||
system_message: str = "",
|
||||
user_id: int = None,
|
||||
**kwargs
|
||||
):
|
||||
if not user_id:
|
||||
user_id = random.randint(1690000544336, 2093025544336)
|
||||
return {
|
||||
"prompt": format_prompt(messages),
|
||||
"network": True,
|
||||
"system": system_message,
|
||||
"withoutContext": False,
|
||||
"stream": True,
|
||||
"userId": f"#/chat/{user_id}"
|
||||
}
|
||||
|
||||
|
|
@ -80,7 +80,6 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
"gpt-4o": "gpt-4o",
|
||||
"gemini-pro": "gemini-pro",
|
||||
'claude-sonnet-3.5': "claude-sonnet-3.5",
|
||||
'blackboxai-pro': "blackboxai-pro",
|
||||
}
|
||||
|
||||
model_prefixes = {
|
||||
|
|
@ -107,7 +106,6 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
|
||||
model_referers = {
|
||||
"blackboxai": f"{url}/?model=blackboxai",
|
||||
"blackboxai-pro": f"{url}/?model=blackboxai-pro",
|
||||
"gpt-4o": f"{url}/?model=gpt-4o",
|
||||
"gemini-pro": f"{url}/?model=gemini-pro",
|
||||
"claude-sonnet-3.5": f"{url}/?model=claude-sonnet-3.5"
|
||||
|
|
|
|||
|
|
@ -17,9 +17,17 @@ class ChatifyAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
|
||||
default_model = 'llama-3.1'
|
||||
models = [default_model]
|
||||
model_aliases = {
|
||||
"llama-3.1-8b": "llama-3.1",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_model(cls, model: str) -> str:
|
||||
if model in cls.models:
|
||||
return model
|
||||
elif model in cls.model_aliases:
|
||||
return cls.model_aliases.get(model, cls.default_model)
|
||||
else:
|
||||
return cls.default_model
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -1,132 +0,0 @@
|
|||
from __future__ import annotations
|
||||
import base64
|
||||
import re
|
||||
from aiohttp import ClientSession, ClientResponseError
|
||||
from ..typing import AsyncResult, Messages
|
||||
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||
from .helper import format_prompt
|
||||
|
||||
class LiteIcoding(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
url = "https://lite.icoding.ink"
|
||||
api_endpoint = "/api/v1/gpt/message"
|
||||
working = True
|
||||
supports_gpt_4 = True
|
||||
default_model = "gpt-4o"
|
||||
models = [
|
||||
'gpt-4o',
|
||||
'gpt-4-turbo',
|
||||
'claude-3',
|
||||
'claude-3.5',
|
||||
'gemini-1.5',
|
||||
]
|
||||
|
||||
model_aliases = {
|
||||
"gpt-4o-mini": "gpt-4o",
|
||||
"gemini-pro": "gemini-1.5",
|
||||
}
|
||||
|
||||
bearer_tokens = [
|
||||
"NWQ2OWNkMjcxYjE0NDIyNmFjMTE5OWIzYzg0OWE1NjY=",
|
||||
]
|
||||
current_token_index = 0
|
||||
|
||||
@classmethod
|
||||
def decode_token(cls, encoded_token: str) -> str:
|
||||
return base64.b64decode(encoded_token).decode('utf-8')
|
||||
|
||||
@classmethod
|
||||
def get_next_bearer_token(cls):
|
||||
encoded_token = cls.bearer_tokens[cls.current_token_index]
|
||||
cls.current_token_index = (cls.current_token_index + 1) % len(cls.bearer_tokens)
|
||||
return cls.decode_token(encoded_token)
|
||||
|
||||
@classmethod
|
||||
async def create_async_generator(
|
||||
cls,
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
bearer_token = cls.get_next_bearer_token()
|
||||
headers = {
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Authorization": f"Bearer {bearer_token}",
|
||||
"Connection": "keep-alive",
|
||||
"Content-Type": "application/json;charset=utf-8",
|
||||
"DNT": "1",
|
||||
"Origin": cls.url,
|
||||
"Referer": f"{cls.url}/",
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "same-origin",
|
||||
"User-Agent": (
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
"Chrome/126.0.0.0 Safari/537.36"
|
||||
),
|
||||
"sec-ch-ua": '"Not/A)Brand";v="8", "Chromium";v="126"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"Linux"',
|
||||
}
|
||||
|
||||
data = {
|
||||
"model": model,
|
||||
"chatId": "-1",
|
||||
"messages": [
|
||||
{
|
||||
"role": msg["role"],
|
||||
"content": msg["content"],
|
||||
"time": msg.get("time", ""),
|
||||
"attachments": msg.get("attachments", []),
|
||||
}
|
||||
for msg in messages
|
||||
],
|
||||
"plugins": [],
|
||||
"systemPrompt": "",
|
||||
"temperature": 0.5,
|
||||
}
|
||||
|
||||
async with ClientSession(headers=headers) as session:
|
||||
try:
|
||||
async with session.post(
|
||||
f"{cls.url}{cls.api_endpoint}", json=data, proxy=proxy
|
||||
) as response:
|
||||
response.raise_for_status()
|
||||
buffer = ""
|
||||
full_response = ""
|
||||
|
||||
def decode_content(data):
|
||||
bytes_array = bytes([int(b, 16) ^ 255 for b in data.split()])
|
||||
return bytes_array.decode('utf-8')
|
||||
|
||||
async for chunk in response.content.iter_any():
|
||||
if chunk:
|
||||
buffer += chunk.decode()
|
||||
while "\n\n" in buffer:
|
||||
part, buffer = buffer.split("\n\n", 1)
|
||||
if part.startswith("data: "):
|
||||
content = part[6:].strip()
|
||||
if content and content != "[DONE]":
|
||||
content = content.strip('"')
|
||||
decoded_content = decode_content(content)
|
||||
full_response += decoded_content
|
||||
full_response = (
|
||||
full_response.replace('""', '')
|
||||
.replace('" "', ' ')
|
||||
.replace("\\n\\n", "\n\n")
|
||||
.replace("\\n", "\n")
|
||||
.replace('\\"', '"')
|
||||
.strip()
|
||||
)
|
||||
filtered_response = re.sub(r'\n---\n.*', '', full_response, flags=re.DOTALL)
|
||||
cleaned_response = filtered_response.strip().strip('"')
|
||||
yield cleaned_response
|
||||
|
||||
except ClientResponseError as e:
|
||||
raise RuntimeError(
|
||||
f"ClientResponseError {e.status}: {e.message}, url={e.request_info.url}, data={data}"
|
||||
) from e
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Unexpected error: {str(e)}") from e
|
||||
|
|
@ -22,7 +22,6 @@ from .Airforce import Airforce
|
|||
from .Aura import Aura
|
||||
from .Bing import Bing
|
||||
from .BingCreateImages import BingCreateImages
|
||||
from .Binjie import Binjie
|
||||
from .Blackbox import Blackbox
|
||||
from .ChatGot import ChatGot
|
||||
from .ChatGpt import ChatGpt
|
||||
|
|
@ -50,7 +49,6 @@ from .HuggingChat import HuggingChat
|
|||
from .HuggingFace import HuggingFace
|
||||
from .Koala import Koala
|
||||
from .Liaobots import Liaobots
|
||||
from .LiteIcoding import LiteIcoding
|
||||
from .Local import Local
|
||||
from .MagickPen import MagickPen
|
||||
from .MetaAI import MetaAI
|
||||
|
|
|
|||
|
|
@ -1,66 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientSession
|
||||
import json
|
||||
|
||||
from ...typing import AsyncResult, Messages
|
||||
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||
from ...image import ImageResponse
|
||||
|
||||
|
||||
class NexraAnimagineXL(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
label = "Nexra Animagine XL"
|
||||
url = "https://nexra.aryahcr.cc/documentation/midjourney/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||
working = True
|
||||
|
||||
default_model = 'animagine-xl'
|
||||
models = [default_model]
|
||||
|
||||
@classmethod
|
||||
def get_model(cls, model: str) -> str:
|
||||
return cls.default_model
|
||||
|
||||
@classmethod
|
||||
async def create_async_generator(
|
||||
cls,
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
response: str = "url", # base64 or url
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
# Retrieve the correct model to use
|
||||
model = cls.get_model(model)
|
||||
|
||||
# Format the prompt from the messages
|
||||
prompt = messages[0]['content']
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
payload = {
|
||||
"prompt": prompt,
|
||||
"model": model,
|
||||
"response": response
|
||||
}
|
||||
|
||||
async with ClientSession(headers=headers) as session:
|
||||
async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response:
|
||||
response.raise_for_status()
|
||||
text_data = await response.text()
|
||||
|
||||
try:
|
||||
# Parse the JSON response
|
||||
json_start = text_data.find('{')
|
||||
json_data = text_data[json_start:]
|
||||
data = json.loads(json_data)
|
||||
|
||||
# Check if the response contains images
|
||||
if 'images' in data and len(data['images']) > 0:
|
||||
image_url = data['images'][0]
|
||||
yield ImageResponse(image_url, prompt)
|
||||
else:
|
||||
yield ImageResponse("No images found in the response.", prompt)
|
||||
except json.JSONDecodeError:
|
||||
yield ImageResponse("Failed to parse JSON. Response might not be in JSON format.", prompt)
|
||||
|
|
@ -13,7 +13,7 @@ class NexraBing(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Bing"
|
||||
url = "https://nexra.aryahcr.cc/documentation/bing/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements"
|
||||
working = True
|
||||
working = False
|
||||
supports_gpt_4 = False
|
||||
supports_stream = False
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class NexraGeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Gemini PRO"
|
||||
url = "https://nexra.aryahcr.cc/documentation/gemini-pro/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements"
|
||||
working = True
|
||||
working = False
|
||||
supports_stream = True
|
||||
|
||||
default_model = 'gemini-pro'
|
||||
|
|
|
|||
|
|
@ -17,9 +17,17 @@ class NexraLLaMA31(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
|
||||
default_model = 'llama-3.1'
|
||||
models = [default_model]
|
||||
model_aliases = {
|
||||
"llama-3.1-8b": "llama-3.1",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_model(cls, model: str) -> str:
|
||||
if model in cls.models:
|
||||
return model
|
||||
elif model in cls.model_aliases:
|
||||
return cls.model_aliases.get(model, cls.default_model)
|
||||
else:
|
||||
return cls.default_model
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class NexraMidjourney(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Midjourney"
|
||||
url = "https://nexra.aryahcr.cc/documentation/midjourney/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||
working = True
|
||||
working = False
|
||||
|
||||
default_model = 'midjourney'
|
||||
models = [default_model]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class NexraProdiaAI(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Prodia AI"
|
||||
url = "https://nexra.aryahcr.cc/documentation/prodia/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||
working = True
|
||||
working = False
|
||||
|
||||
default_model = 'absolutereality_v181.safetensors [3d9d4d2b]'
|
||||
models = [
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class NexraSD15(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Stable Diffusion 1.5"
|
||||
url = "https://nexra.aryahcr.cc/documentation/stable-diffusion/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||
working = True
|
||||
working = False
|
||||
|
||||
default_model = 'stablediffusion-1.5'
|
||||
models = [default_model]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class NexraSD21(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Stable Diffusion 2.1"
|
||||
url = "https://nexra.aryahcr.cc/documentation/stable-diffusion/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||
working = True
|
||||
working = False
|
||||
|
||||
default_model = 'stablediffusion-2.1'
|
||||
models = [default_model]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class NexraSDLora(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Stable Diffusion Lora"
|
||||
url = "https://nexra.aryahcr.cc/documentation/stable-diffusion/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||
working = True
|
||||
working = False
|
||||
|
||||
default_model = 'sdxl-lora'
|
||||
models = [default_model]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class NexraSDTurbo(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
label = "Nexra Stable Diffusion Turbo"
|
||||
url = "https://nexra.aryahcr.cc/documentation/stable-diffusion/en"
|
||||
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
|
||||
working = True
|
||||
working = False
|
||||
|
||||
default_model = 'sdxl-turbo'
|
||||
models = [default_model]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from .NexraAnimagineXL import NexraAnimagineXL
|
||||
from .NexraBing import NexraBing
|
||||
from .NexraBlackbox import NexraBlackbox
|
||||
from .NexraChatGPT import NexraChatGPT
|
||||
|
|
|
|||
236
g4f/models.py
236
g4f/models.py
|
|
@ -9,7 +9,6 @@ from .Provider import (
|
|||
Allyfy,
|
||||
AmigoChat,
|
||||
Bing,
|
||||
Binjie,
|
||||
Blackbox,
|
||||
ChatGpt,
|
||||
Chatgpt4Online,
|
||||
|
|
@ -35,11 +34,8 @@ from .Provider import (
|
|||
HuggingFace,
|
||||
Koala,
|
||||
Liaobots,
|
||||
LiteIcoding,
|
||||
MagickPen,
|
||||
MetaAI,
|
||||
NexraAnimagineXL,
|
||||
NexraBing,
|
||||
NexraBlackbox,
|
||||
NexraChatGPT,
|
||||
NexraChatGPT4o,
|
||||
|
|
@ -50,15 +46,8 @@ from .Provider import (
|
|||
NexraDalleMini,
|
||||
NexraEmi,
|
||||
NexraFluxPro,
|
||||
NexraGeminiPro,
|
||||
NexraLLaMA31,
|
||||
NexraMidjourney,
|
||||
NexraProdiaAI,
|
||||
NexraQwen,
|
||||
NexraSD15,
|
||||
NexraSD21,
|
||||
NexraSDLora,
|
||||
NexraSDTurbo,
|
||||
OpenaiChat,
|
||||
PerplexityLabs,
|
||||
Pi,
|
||||
|
|
@ -68,7 +57,6 @@ from .Provider import (
|
|||
ReplicateHome,
|
||||
TeachAnything,
|
||||
Upstage,
|
||||
You,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -102,11 +90,9 @@ default = Model(
|
|||
ReplicateHome,
|
||||
Upstage,
|
||||
Blackbox,
|
||||
Binjie,
|
||||
Free2GPT,
|
||||
MagickPen,
|
||||
DeepInfraChat,
|
||||
LiteIcoding,
|
||||
Airforce,
|
||||
ChatHub,
|
||||
ChatGptEs,
|
||||
|
|
@ -133,58 +119,45 @@ gpt_3 = Model(
|
|||
gpt_35_turbo = Model(
|
||||
name = 'gpt-3.5-turbo',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([
|
||||
Allyfy, NexraChatGPT, Airforce, DarkAI, Liaobots,
|
||||
])
|
||||
best_provider = IterListProvider([Allyfy, NexraChatGPT, Airforce, DarkAI, Liaobots])
|
||||
)
|
||||
|
||||
# gpt-4
|
||||
gpt_4o = Model(
|
||||
name = 'gpt-4o',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([
|
||||
NexraChatGPT4o, ChatGptEs, AmigoChat, DarkAI, Liaobots, Airforce,
|
||||
OpenaiChat
|
||||
])
|
||||
best_provider = IterListProvider([NexraChatGPT4o, ChatGptEs, AmigoChat, DarkAI, Liaobots, Airforce, OpenaiChat])
|
||||
)
|
||||
|
||||
gpt_4o_mini = Model(
|
||||
name = 'gpt-4o-mini',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([
|
||||
DDG, ChatGptEs, You, FreeNetfly, Pizzagpt, LiteIcoding, MagickPen, AmigoChat, Liaobots, Airforce, ChatgptFree, Koala,
|
||||
OpenaiChat, ChatGpt
|
||||
])
|
||||
best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, AmigoChat, Liaobots, Airforce, ChatgptFree, Koala, OpenaiChat, ChatGpt])
|
||||
)
|
||||
|
||||
gpt_4_turbo = Model(
|
||||
name = 'gpt-4-turbo',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([
|
||||
Liaobots, Airforce, Bing
|
||||
])
|
||||
best_provider = IterListProvider([Liaobots, Airforce, Bing])
|
||||
)
|
||||
|
||||
gpt_4 = Model(
|
||||
name = 'gpt-4',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([
|
||||
NexraBing, NexraChatGPT, NexraChatGptV2, NexraChatGptWeb, Binjie, Airforce, Chatgpt4Online, Bing, OpenaiChat,
|
||||
gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider
|
||||
])
|
||||
best_provider = IterListProvider([NexraChatGPT, NexraChatGptV2, NexraChatGptWeb, Airforce, Chatgpt4Online, Bing, OpenaiChat])
|
||||
)
|
||||
|
||||
# o1
|
||||
o1 = Model(
|
||||
name = 'o1',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([AmigoChat])
|
||||
best_provider = AmigoChat
|
||||
)
|
||||
|
||||
o1_mini = Model(
|
||||
name = 'o1-mini',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([AmigoChat])
|
||||
best_provider = AmigoChat
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -229,17 +202,11 @@ llama_3_70b = Model(
|
|||
best_provider = IterListProvider([ReplicateHome, Airforce, DeepInfra, Replicate])
|
||||
)
|
||||
|
||||
llama_3 = Model(
|
||||
name = "llama-3",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([llama_3_8b.best_provider, llama_3_70b.best_provider])
|
||||
)
|
||||
|
||||
# llama 3.1
|
||||
llama_3_1_8b = Model(
|
||||
name = "llama-3.1-8b",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, Airforce, PerplexityLabs])
|
||||
best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, NexraLLaMA31, Airforce, PerplexityLabs])
|
||||
)
|
||||
|
||||
llama_3_1_70b = Model(
|
||||
|
|
@ -254,23 +221,17 @@ llama_3_1_405b = Model(
|
|||
best_provider = IterListProvider([DeepInfraChat, Blackbox, AmigoChat, DarkAI, Airforce])
|
||||
)
|
||||
|
||||
llama_3_1 = Model(
|
||||
name = "llama-3.1",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([NexraLLaMA31, ChatifyAI, llama_3_1_8b.best_provider, llama_3_1_70b.best_provider, llama_3_1_405b.best_provider,])
|
||||
)
|
||||
|
||||
# llama 3.2
|
||||
llama_3_2_1b = Model(
|
||||
name = "llama-3.2-1b",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([Cloudflare])
|
||||
best_provider = Cloudflare
|
||||
)
|
||||
|
||||
llama_3_2_3b = Model(
|
||||
name = "llama-3.2-3b",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([Cloudflare])
|
||||
best_provider = Cloudflare
|
||||
)
|
||||
|
||||
llama_3_2_11b = Model(
|
||||
|
|
@ -285,22 +246,18 @@ llama_3_2_90b = Model(
|
|||
best_provider = IterListProvider([AmigoChat, Airforce])
|
||||
)
|
||||
|
||||
llama_3_2 = Model(
|
||||
name = "llama-3.2",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([llama_3_2_1b.best_provider, llama_3_2_3b.best_provider, llama_3_2_11b.best_provider, llama_3_2_90b.best_provider])
|
||||
)
|
||||
|
||||
# llamaguard
|
||||
llamaguard_7b = Model(
|
||||
name = "llamaguard-7b",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
)
|
||||
|
||||
llamaguard_2_8b = Model(
|
||||
name = "llamaguard-2-8b",
|
||||
base_provider = "Meta Llama",
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -334,13 +291,13 @@ mistral_nemo = Model(
|
|||
mixtral_8x7b_dpo = Model(
|
||||
name = "mixtral-8x7b-dpo",
|
||||
base_provider = "NousResearch",
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
)
|
||||
|
||||
yi_34b = Model(
|
||||
name = "yi-34b",
|
||||
base_provider = "NousResearch",
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
)
|
||||
|
||||
hermes_3 = Model(
|
||||
|
|
@ -374,7 +331,7 @@ phi_3_5_mini = Model(
|
|||
gemini_pro = Model(
|
||||
name = 'gemini-pro',
|
||||
base_provider = 'Google DeepMind',
|
||||
best_provider = IterListProvider([GeminiPro, LiteIcoding, Blackbox, AIChatFree, GPROChat, NexraGeminiPro, AmigoChat, Liaobots, Airforce])
|
||||
best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, GPROChat, AmigoChat, Liaobots, Airforce])
|
||||
)
|
||||
|
||||
gemini_flash = Model(
|
||||
|
|
@ -386,7 +343,7 @@ gemini_flash = Model(
|
|||
gemini = Model(
|
||||
name = 'gemini',
|
||||
base_provider = 'Google DeepMind',
|
||||
best_provider = IterListProvider([Gemini, gemini_flash.best_provider, gemini_pro.best_provider])
|
||||
best_provider = Gemini
|
||||
)
|
||||
|
||||
# gemma
|
||||
|
|
@ -405,16 +362,13 @@ gemma_2b_27b = Model(
|
|||
gemma_2b = Model(
|
||||
name = 'gemma-2b',
|
||||
base_provider = 'Google',
|
||||
best_provider = IterListProvider([
|
||||
ReplicateHome, Airforce,
|
||||
gemma_2b_9b.best_provider, gemma_2b_27b.best_provider,
|
||||
])
|
||||
best_provider = IterListProvider([ReplicateHome, Airforce])
|
||||
)
|
||||
|
||||
gemma_7b = Model(
|
||||
name = 'gemma-7b',
|
||||
base_provider = 'Google',
|
||||
best_provider = IterListProvider([Cloudflare])
|
||||
best_provider = Cloudflare
|
||||
)
|
||||
|
||||
# gemma 2
|
||||
|
|
@ -427,10 +381,7 @@ gemma_2_27b = Model(
|
|||
gemma_2 = Model(
|
||||
name = 'gemma-2',
|
||||
base_provider = 'Google',
|
||||
best_provider = IterListProvider([
|
||||
ChatHub,
|
||||
gemma_2_27b.best_provider,
|
||||
])
|
||||
best_provider = ChatHub
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -441,15 +392,6 @@ claude_2_1 = Model(
|
|||
best_provider = Liaobots
|
||||
)
|
||||
|
||||
claude_2 = Model(
|
||||
name = 'claude-2',
|
||||
base_provider = 'Anthropic',
|
||||
best_provider = IterListProvider([
|
||||
You,
|
||||
claude_2_1.best_provider,
|
||||
])
|
||||
)
|
||||
|
||||
# claude 3
|
||||
claude_3_opus = Model(
|
||||
name = 'claude-3-opus',
|
||||
|
|
@ -469,14 +411,6 @@ claude_3_haiku = Model(
|
|||
best_provider = IterListProvider([DDG, Airforce, Liaobots])
|
||||
)
|
||||
|
||||
claude_3 = Model(
|
||||
name = 'claude-3',
|
||||
base_provider = 'Anthropic',
|
||||
best_provider = IterListProvider([
|
||||
claude_3_opus.best_provider, claude_3_sonnet.best_provider, claude_3_haiku.best_provider
|
||||
])
|
||||
)
|
||||
|
||||
# claude 3.5
|
||||
claude_3_5_sonnet = Model(
|
||||
name = 'claude-3.5-sonnet',
|
||||
|
|
@ -484,16 +418,6 @@ claude_3_5_sonnet = Model(
|
|||
best_provider = IterListProvider([Blackbox, Airforce, AmigoChat, Liaobots])
|
||||
)
|
||||
|
||||
claude_3_5 = Model(
|
||||
name = 'claude-3.5',
|
||||
base_provider = 'Anthropic',
|
||||
best_provider = IterListProvider([
|
||||
LiteIcoding,
|
||||
claude_3_5_sonnet.best_provider
|
||||
])
|
||||
)
|
||||
|
||||
|
||||
|
||||
### Reka AI ###
|
||||
reka_core = Model(
|
||||
|
|
@ -513,7 +437,7 @@ blackboxai = Model(
|
|||
blackboxai_pro = Model(
|
||||
name = 'blackboxai-pro',
|
||||
base_provider = 'Blackbox AI',
|
||||
best_provider = IterListProvider([Blackbox])
|
||||
best_provider = Blackbox
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -537,7 +461,7 @@ command_r_plus = Model(
|
|||
sparkdesk_v1_1 = Model(
|
||||
name = 'sparkdesk-v1.1',
|
||||
base_provider = 'iFlytek',
|
||||
best_provider = IterListProvider([FreeChatgpt])
|
||||
best_provider = FreeChatgpt
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -589,7 +513,7 @@ qwen_2_72b = Model(
|
|||
qwen = Model(
|
||||
name = 'qwen',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = IterListProvider([NexraQwen, qwen_1_5_0_5b.best_provider, qwen_1_5_14b.best_provider, qwen_1_5_72b.best_provider, qwen_1_5_110b.best_provider, qwen_1_5_1_8b.best_provider, qwen_2_72b.best_provider])
|
||||
best_provider = NexraQwen
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -747,7 +671,7 @@ sonar_chat = Model(
|
|||
mythomax_l2_13b = Model(
|
||||
name = 'mythomax-l2-13b',
|
||||
base_provider = 'Gryphe',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -755,7 +679,7 @@ mythomax_l2_13b = Model(
|
|||
cosmosrp = Model(
|
||||
name = 'cosmosrp',
|
||||
base_provider = 'Pawan',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -763,7 +687,7 @@ cosmosrp = Model(
|
|||
german_7b = Model(
|
||||
name = 'german-7b',
|
||||
base_provider = 'TheBloke',
|
||||
best_provider = IterListProvider([Cloudflare])
|
||||
best_provider = Cloudflare
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -771,7 +695,7 @@ german_7b = Model(
|
|||
tinyllama_1_1b = Model(
|
||||
name = 'tinyllama-1.1b',
|
||||
base_provider = 'Tinyllama',
|
||||
best_provider = IterListProvider([Cloudflare])
|
||||
best_provider = Cloudflare
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -779,7 +703,7 @@ tinyllama_1_1b = Model(
|
|||
cybertron_7b = Model(
|
||||
name = 'cybertron-7b',
|
||||
base_provider = 'Fblgit',
|
||||
best_provider = IterListProvider([Cloudflare])
|
||||
best_provider = Cloudflare
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -789,64 +713,25 @@ cybertron_7b = Model(
|
|||
#############
|
||||
|
||||
### Stability AI ###
|
||||
sdxl_lora = Model(
|
||||
name = 'sdxl-lora',
|
||||
base_provider = 'Stability AI',
|
||||
best_provider = IterListProvider([NexraSDLora])
|
||||
|
||||
)
|
||||
|
||||
sdxl_turbo = Model(
|
||||
name = 'sdxl-turbo',
|
||||
base_provider = 'Stability AI',
|
||||
best_provider = IterListProvider([NexraSDTurbo])
|
||||
|
||||
)
|
||||
|
||||
sdxl = Model(
|
||||
name = 'sdxl',
|
||||
base_provider = 'Stability AI',
|
||||
best_provider = IterListProvider([
|
||||
ReplicateHome, NexraSD21, DeepInfraImage,
|
||||
sdxl_lora.best_provider, sdxl_turbo.best_provider,
|
||||
])
|
||||
|
||||
)
|
||||
|
||||
sd_1_5 = Model(
|
||||
name = 'sd-1.5',
|
||||
base_provider = 'Stability AI',
|
||||
best_provider = IterListProvider([NexraSD15])
|
||||
|
||||
)
|
||||
|
||||
sd_2_1 = Model(
|
||||
name = 'sd-2.1',
|
||||
base_provider = 'Stability AI',
|
||||
best_provider = IterListProvider([NexraSD21])
|
||||
best_provider = IterListProvider([ReplicateHome, DeepInfraImage])
|
||||
|
||||
)
|
||||
|
||||
sd_3 = Model(
|
||||
name = 'sd-3',
|
||||
base_provider = 'Stability AI',
|
||||
best_provider = IterListProvider([ReplicateHome])
|
||||
best_provider = ReplicateHome
|
||||
|
||||
)
|
||||
|
||||
sd = Model(
|
||||
name = 'sd',
|
||||
base_provider = 'Stability AI',
|
||||
best_provider = IterListProvider([sd_1_5.best_provider, sd_2_1.best_provider, sd_3.best_provider])
|
||||
|
||||
)
|
||||
|
||||
|
||||
### Playground ###
|
||||
playground_v2_5 = Model(
|
||||
name = 'playground-v2.5',
|
||||
base_provider = 'Playground AI',
|
||||
best_provider = IterListProvider([ReplicateHome])
|
||||
best_provider = ReplicateHome
|
||||
|
||||
)
|
||||
|
||||
|
|
@ -876,42 +761,42 @@ flux_realism = Model(
|
|||
flux_anime = Model(
|
||||
name = 'flux-anime',
|
||||
base_provider = 'Flux AI',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
|
||||
)
|
||||
|
||||
flux_3d = Model(
|
||||
name = 'flux-3d',
|
||||
base_provider = 'Flux AI',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
|
||||
)
|
||||
|
||||
flux_disney = Model(
|
||||
name = 'flux-disney',
|
||||
base_provider = 'Flux AI',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
|
||||
)
|
||||
|
||||
flux_pixel = Model(
|
||||
name = 'flux-pixel',
|
||||
base_provider = 'Flux AI',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
|
||||
)
|
||||
|
||||
flux_4o = Model(
|
||||
name = 'flux-4o',
|
||||
base_provider = 'Flux AI',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
|
||||
)
|
||||
|
||||
flux_schnell = Model(
|
||||
name = 'flux-schnell',
|
||||
base_provider = 'Flux AI',
|
||||
best_provider = IterListProvider([ReplicateHome])
|
||||
best_provider = ReplicateHome
|
||||
|
||||
)
|
||||
|
||||
|
|
@ -920,59 +805,43 @@ flux_schnell = Model(
|
|||
dalle_2 = Model(
|
||||
name = 'dalle-2',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([NexraDallE2])
|
||||
best_provider = NexraDallE2
|
||||
|
||||
)
|
||||
dalle_3 = Model(
|
||||
name = 'dalle-3',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
|
||||
)
|
||||
|
||||
dalle = Model(
|
||||
name = 'dalle',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([NexraDallE, dalle_2.best_provider, dalle_3.best_provider])
|
||||
best_provider = NexraDallE
|
||||
|
||||
)
|
||||
|
||||
dalle_mini = Model(
|
||||
name = 'dalle-mini',
|
||||
base_provider = 'OpenAI',
|
||||
best_provider = IterListProvider([NexraDalleMini])
|
||||
best_provider = NexraDalleMini
|
||||
|
||||
)
|
||||
|
||||
|
||||
### Cagliostro Research Lab ###
|
||||
animagine_xl = Model(
|
||||
name = 'animagine-xl',
|
||||
base_provider = 'Cagliostro Research Lab',
|
||||
best_provider = IterListProvider([NexraAnimagineXL])
|
||||
|
||||
)
|
||||
|
||||
### Midjourney ###
|
||||
midjourney = Model(
|
||||
name = 'midjourney',
|
||||
base_provider = 'Midjourney',
|
||||
best_provider = IterListProvider([NexraMidjourney])
|
||||
|
||||
)
|
||||
|
||||
### Other ###
|
||||
emi = Model(
|
||||
name = 'emi',
|
||||
base_provider = '',
|
||||
best_provider = IterListProvider([NexraEmi])
|
||||
best_provider = NexraEmi
|
||||
|
||||
)
|
||||
|
||||
any_dark = Model(
|
||||
name = 'any-dark',
|
||||
base_provider = '',
|
||||
best_provider = IterListProvider([Airforce])
|
||||
best_provider = Airforce
|
||||
|
||||
)
|
||||
|
||||
|
|
@ -1015,18 +884,15 @@ class ModelUtils:
|
|||
'llama-2-13b': llama_2_13b,
|
||||
|
||||
# llama-3
|
||||
'llama-3': llama_3,
|
||||
'llama-3-8b': llama_3_8b,
|
||||
'llama-3-70b': llama_3_70b,
|
||||
|
||||
# llama-3.1
|
||||
'llama-3.1': llama_3_1,
|
||||
'llama-3.1-8b': llama_3_1_8b,
|
||||
'llama-3.1-70b': llama_3_1_70b,
|
||||
'llama-3.1-405b': llama_3_1_405b,
|
||||
|
||||
# llama-3.2
|
||||
'llama-3.2': llama_3_2,
|
||||
'llama-3.2-1b': llama_3_2_1b,
|
||||
'llama-3.2-3b': llama_3_2_3b,
|
||||
'llama-3.2-11b': llama_3_2_11b,
|
||||
|
|
@ -1074,17 +940,14 @@ class ModelUtils:
|
|||
|
||||
|
||||
### Anthropic ###
|
||||
'claude-2': claude_2,
|
||||
'claude-2.1': claude_2_1,
|
||||
|
||||
# claude 3
|
||||
'claude-3': claude_3,
|
||||
'claude-3-opus': claude_3_opus,
|
||||
'claude-3-sonnet': claude_3_sonnet,
|
||||
'claude-3-haiku': claude_3_haiku,
|
||||
|
||||
# claude 3.5
|
||||
'claude-3.5': claude_3_5,
|
||||
'claude-3.5-sonnet': claude_3_5_sonnet,
|
||||
|
||||
|
||||
|
|
@ -1213,11 +1076,6 @@ class ModelUtils:
|
|||
|
||||
### Stability AI ###
|
||||
'sdxl': sdxl,
|
||||
'sdxl-lora': sdxl_lora,
|
||||
'sdxl-turbo': sdxl_turbo,
|
||||
'sd': sd,
|
||||
'sd-1.5': sd_1_5,
|
||||
'sd-2.1': sd_2_1,
|
||||
'sd-3': sd_3,
|
||||
|
||||
|
||||
|
|
@ -1244,14 +1102,6 @@ class ModelUtils:
|
|||
'dalle-mini': dalle_mini,
|
||||
|
||||
|
||||
### Cagliostro Research Lab ###
|
||||
'animagine-xl': animagine_xl,
|
||||
|
||||
|
||||
### Midjourney ###
|
||||
'midjourney': midjourney,
|
||||
|
||||
|
||||
### Other ###
|
||||
'emi': emi,
|
||||
'any-dark': any_dark,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue