mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Add DeepSeek Provider
Add get api keys urls to GUI
This commit is contained in:
parent
ef642f5baf
commit
f05d069896
13 changed files with 69 additions and 64 deletions
|
|
@ -6,7 +6,6 @@ from ..typing import AsyncResult, Messages
|
||||||
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||||
from .helper import format_prompt
|
from .helper import format_prompt
|
||||||
|
|
||||||
|
|
||||||
class GizAI(AsyncGeneratorProvider, ProviderModelMixin):
|
class GizAI(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
url = "https://app.giz.ai/assistant"
|
url = "https://app.giz.ai/assistant"
|
||||||
api_endpoint = "https://app.giz.ai/api/data/users/inferenceServer.infer"
|
api_endpoint = "https://app.giz.ai/api/data/users/inferenceServer.infer"
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@ class Mhystical(OpenaiAPI):
|
||||||
label = "Mhystical"
|
label = "Mhystical"
|
||||||
url = "https://mhystical.cc"
|
url = "https://mhystical.cc"
|
||||||
api_endpoint = "https://api.mhystical.cc/v1/completions"
|
api_endpoint = "https://api.mhystical.cc/v1/completions"
|
||||||
|
login_url = "https://mhystical.cc/dashboard"
|
||||||
working = True
|
working = True
|
||||||
needs_auth = False
|
needs_auth = False
|
||||||
supports_stream = False # Set to False, as streaming is not specified in ChatifyAI
|
supports_stream = False # Set to False, as streaming is not specified in ChatifyAI
|
||||||
|
|
@ -37,11 +38,12 @@ class Mhystical(OpenaiAPI):
|
||||||
model: str,
|
model: str,
|
||||||
messages: Messages,
|
messages: Messages,
|
||||||
stream: bool = False,
|
stream: bool = False,
|
||||||
|
api_key: str = "mhystical",
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> AsyncResult:
|
) -> AsyncResult:
|
||||||
model = cls.get_model(model)
|
model = cls.get_model(model)
|
||||||
headers = {
|
headers = {
|
||||||
"x-api-key": "mhystical",
|
"x-api-key": api_key,
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"accept": "*/*",
|
"accept": "*/*",
|
||||||
"cache-control": "no-cache",
|
"cache-control": "no-cache",
|
||||||
|
|
|
||||||
|
|
@ -7,12 +7,12 @@ from urllib.parse import quote
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
|
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||||
from ..requests.raise_for_status import raise_for_status
|
from ..requests.raise_for_status import raise_for_status
|
||||||
from ..typing import AsyncResult, Messages
|
from ..typing import AsyncResult, Messages
|
||||||
from ..image import ImageResponse
|
from ..image import ImageResponse
|
||||||
from .needs_auth.OpenaiAPI import OpenaiAPI
|
|
||||||
|
|
||||||
class PollinationsAI(OpenaiAPI):
|
class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
label = "Pollinations AI"
|
label = "Pollinations AI"
|
||||||
url = "https://pollinations.ai"
|
url = "https://pollinations.ai"
|
||||||
|
|
||||||
|
|
@ -21,21 +21,21 @@ class PollinationsAI(OpenaiAPI):
|
||||||
supports_stream = True
|
supports_stream = True
|
||||||
supports_system_message = True
|
supports_system_message = True
|
||||||
supports_message_history = True
|
supports_message_history = True
|
||||||
|
|
||||||
# API endpoints base
|
# API endpoints base
|
||||||
api_base = "https://text.pollinations.ai/openai"
|
api_base = "https://text.pollinations.ai/openai"
|
||||||
|
|
||||||
# API endpoints
|
# API endpoints
|
||||||
text_api_endpoint = "https://text.pollinations.ai"
|
text_api_endpoint = "https://text.pollinations.ai"
|
||||||
image_api_endpoint = "https://image.pollinations.ai"
|
image_api_endpoint = "https://image.pollinations.ai"
|
||||||
|
|
||||||
# Models configuration
|
# Models configuration
|
||||||
default_model = "openai"
|
default_model = "openai"
|
||||||
default_image_model = "flux"
|
default_image_model = "flux"
|
||||||
|
|
||||||
image_models = []
|
image_models = []
|
||||||
models = []
|
models = []
|
||||||
|
|
||||||
additional_models_image = ["midjourney", "dall-e-3"]
|
additional_models_image = ["midjourney", "dall-e-3"]
|
||||||
additional_models_text = ["sur", "sur-mistral", "claude"]
|
additional_models_text = ["sur", "sur-mistral", "claude"]
|
||||||
model_aliases = {
|
model_aliases = {
|
||||||
|
|
@ -100,7 +100,7 @@ class PollinationsAI(OpenaiAPI):
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> AsyncResult:
|
) -> AsyncResult:
|
||||||
model = cls.get_model(model)
|
model = cls.get_model(model)
|
||||||
|
|
||||||
# Check if models
|
# Check if models
|
||||||
# Image generation
|
# Image generation
|
||||||
if model in cls.image_models:
|
if model in cls.image_models:
|
||||||
|
|
@ -151,7 +151,6 @@ class PollinationsAI(OpenaiAPI):
|
||||||
if seed is None:
|
if seed is None:
|
||||||
seed = random.randint(0, 10000)
|
seed = random.randint(0, 10000)
|
||||||
|
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
'Accept': '*/*',
|
'Accept': '*/*',
|
||||||
'Accept-Language': 'en-US,en;q=0.9',
|
'Accept-Language': 'en-US,en;q=0.9',
|
||||||
|
|
@ -177,7 +176,7 @@ class PollinationsAI(OpenaiAPI):
|
||||||
|
|
||||||
async with session.head(url, proxy=proxy) as response:
|
async with session.head(url, proxy=proxy) as response:
|
||||||
if response.status == 200:
|
if response.status == 200:
|
||||||
image_response = ImageResponse(images=url, alt=messages[-1]["content"])
|
image_response = ImageResponse(images=url, alt=messages[-1]["content"] if prompt is None else prompt)
|
||||||
yield image_response
|
yield image_response
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -195,7 +194,7 @@ class PollinationsAI(OpenaiAPI):
|
||||||
) -> AsyncResult:
|
) -> AsyncResult:
|
||||||
if api_key is None:
|
if api_key is None:
|
||||||
api_key = "dummy" # Default value if api_key is not provided
|
api_key = "dummy" # Default value if api_key is not provided
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"accept": "*/*",
|
"accept": "*/*",
|
||||||
"accept-language": "en-US,en;q=0.9",
|
"accept-language": "en-US,en;q=0.9",
|
||||||
|
|
@ -215,7 +214,7 @@ class PollinationsAI(OpenaiAPI):
|
||||||
"jsonMode": False,
|
"jsonMode": False,
|
||||||
"stream": stream
|
"stream": stream
|
||||||
}
|
}
|
||||||
|
|
||||||
async with session.post(cls.text_api_endpoint, json=data, proxy=proxy) as response:
|
async with session.post(cls.text_api_endpoint, json=data, proxy=proxy) as response:
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
async for chunk in response.content:
|
async for chunk in response.content:
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ from .OpenaiAPI import OpenaiAPI
|
||||||
class DeepInfra(OpenaiAPI):
|
class DeepInfra(OpenaiAPI):
|
||||||
label = "DeepInfra"
|
label = "DeepInfra"
|
||||||
url = "https://deepinfra.com"
|
url = "https://deepinfra.com"
|
||||||
|
login_url = "https://deepinfra.com/dash/api_keys"
|
||||||
working = True
|
working = True
|
||||||
api_base = "https://api.deepinfra.com/v1/openai",
|
api_base = "https://api.deepinfra.com/v1/openai",
|
||||||
needs_auth = True
|
needs_auth = True
|
||||||
|
|
|
||||||
14
g4f/Provider/needs_auth/DeepSeek.py
Normal file
14
g4f/Provider/needs_auth/DeepSeek.py
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .OpenaiAPI import OpenaiAPI
|
||||||
|
|
||||||
|
class DeepSeek(OpenaiAPI):
|
||||||
|
label = "DeepSeek"
|
||||||
|
url = "https://platform.deepseek.com"
|
||||||
|
login_url = "https://platform.deepseek.com/api_keys"
|
||||||
|
working = True
|
||||||
|
api_base = "https://api.deepseek.com"
|
||||||
|
needs_auth = True
|
||||||
|
supports_stream = True
|
||||||
|
supports_message_history = True
|
||||||
|
default_model = "deepseek-chat"
|
||||||
|
|
@ -5,6 +5,7 @@ from .OpenaiAPI import OpenaiAPI
|
||||||
class GlhfChat(OpenaiAPI):
|
class GlhfChat(OpenaiAPI):
|
||||||
label = "GlhfChat"
|
label = "GlhfChat"
|
||||||
url = "https://glhf.chat"
|
url = "https://glhf.chat"
|
||||||
|
login_url = "https://glhf.chat/users/settings/api"
|
||||||
api_base = "https://glhf.chat/api/openai/v1"
|
api_base = "https://glhf.chat/api/openai/v1"
|
||||||
working = True
|
working = True
|
||||||
model_aliases = {
|
model_aliases = {
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin, RaiseErr
|
||||||
from ...typing import Union, Optional, AsyncResult, Messages, ImagesType
|
from ...typing import Union, Optional, AsyncResult, Messages, ImagesType
|
||||||
from ...requests import StreamSession, raise_for_status
|
from ...requests import StreamSession, raise_for_status
|
||||||
from ...providers.response import FinishReason, ToolCalls, Usage
|
from ...providers.response import FinishReason, ToolCalls, Usage
|
||||||
from ...errors import MissingAuthError, ResponseError
|
from ...errors import MissingAuthError
|
||||||
from ...image import to_data_uri
|
from ...image import to_data_uri
|
||||||
from ... import debug
|
from ... import debug
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ from .Cerebras import Cerebras
|
||||||
from .CopilotAccount import CopilotAccount
|
from .CopilotAccount import CopilotAccount
|
||||||
from .DeepInfra import DeepInfra
|
from .DeepInfra import DeepInfra
|
||||||
from .DeepInfraImage import DeepInfraImage
|
from .DeepInfraImage import DeepInfraImage
|
||||||
|
from .DeepSeek import DeepSeek
|
||||||
from .Gemini import Gemini
|
from .Gemini import Gemini
|
||||||
from .GeminiPro import GeminiPro
|
from .GeminiPro import GeminiPro
|
||||||
from .GithubCopilot import GithubCopilot
|
from .GithubCopilot import GithubCopilot
|
||||||
|
|
|
||||||
|
|
@ -154,54 +154,6 @@
|
||||||
<label for="BingCreateImages-api_key" class="label" title="">Microsoft Designer in Bing:</label>
|
<label for="BingCreateImages-api_key" class="label" title="">Microsoft Designer in Bing:</label>
|
||||||
<textarea id="BingCreateImages-api_key" name="BingCreateImages[api_key]" placeholder=""_U" cookie"></textarea>
|
<textarea id="BingCreateImages-api_key" name="BingCreateImages[api_key]" placeholder=""_U" cookie"></textarea>
|
||||||
</div>
|
</div>
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="Cerebras-api_key" class="label" title="">Cerebras Inference:</label>
|
|
||||||
<textarea id="Cerebras-api_key" name="Cerebras[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="Anthropic-api_key" class="label" title="">Anthropic API:</label>
|
|
||||||
<textarea id="Anthropic-api_key" name="Anthropic[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="DeepInfra-api_key" class="label" title="">DeepInfra:</label>
|
|
||||||
<textarea id="DeepInfra-api_key" name="DeepInfra[api_key]" class="DeepInfraImage-api_key" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="GeminiPro-api_key" class="label" title="">Gemini API:</label>
|
|
||||||
<textarea id="GeminiPro-api_key" name="GeminiPro[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="Groq-api_key" class="label" title="">Groq:</label>
|
|
||||||
<textarea id="Groq-api_key" name="Groq[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="GlhfChat-api_key" class="label" title="">GlhfChat:</label>
|
|
||||||
<textarea id="GlhfChat-api_key" name="GlhfChat[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="HuggingFace-api_key" class="label" title="">HuggingFace:</label>
|
|
||||||
<textarea id="HuggingFace-api_key" name="HuggingFace[api_key]" class="HuggingFaceAPI-api_key" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="OpenaiAPI-api_key" class="label" title="">OpenAI API:</label>
|
|
||||||
<textarea id="OpenaiAPI-api_key" name="OpenaiAPI[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="OpenRouter-api_key" class="label" title="">OpenRouter:</label>
|
|
||||||
<textarea id="OpenRouter-api_key" name="OpenRouter[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="PerplexityApi-api_key" class="label" title="">Perplexity API:</label>
|
|
||||||
<textarea id="PerplexityApi-api_key" name="PerplexityApi[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="Replicate-api_key" class="label" title="">Replicate:</label>
|
|
||||||
<textarea id="Replicate-api_key" name="Replicate[api_key]" class="ReplicateImage-api_key" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
<div class="field box hidden">
|
|
||||||
<label for="xAI-api_key" class="label" title="">xAI:</label>
|
|
||||||
<textarea id="xAI-api_key" name="xAI[api_key]" placeholder="api_key"></textarea>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="bottom_buttons">
|
<div class="bottom_buttons">
|
||||||
<button onclick="delete_conversations()">
|
<button onclick="delete_conversations()">
|
||||||
|
|
|
||||||
|
|
@ -1312,6 +1312,10 @@ form textarea {
|
||||||
padding: 6px;
|
padding: 6px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.field a {
|
||||||
|
text-wrap: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
form .field .fa-xmark {
|
form .field .fa-xmark {
|
||||||
line-height: 20px;
|
line-height: 20px;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
|
|
|
||||||
|
|
@ -1710,6 +1710,7 @@ async function on_api() {
|
||||||
});
|
});
|
||||||
providers = await api("providers")
|
providers = await api("providers")
|
||||||
providers.sort((a, b) => a.label.localeCompare(b.label));
|
providers.sort((a, b) => a.label.localeCompare(b.label));
|
||||||
|
let login_urls = {};
|
||||||
providers.forEach((provider) => {
|
providers.forEach((provider) => {
|
||||||
let option = document.createElement("option");
|
let option = document.createElement("option");
|
||||||
option.value = provider.name;
|
option.value = provider.name;
|
||||||
|
|
@ -1723,6 +1724,36 @@ async function on_api() {
|
||||||
option.dataset.parent = provider.parent;
|
option.dataset.parent = provider.parent;
|
||||||
providerSelect.appendChild(option);
|
providerSelect.appendChild(option);
|
||||||
|
|
||||||
|
if (provider.login_url) {
|
||||||
|
if (!login_urls[provider.name]) {
|
||||||
|
login_urls[provider.name] = [provider.label, provider.login_url, []];
|
||||||
|
} else {
|
||||||
|
login_urls[provider.name][0] = provider.label;
|
||||||
|
login_urls[provider.name][1] = provider.login_url;
|
||||||
|
}
|
||||||
|
} else if (provider.parent) {
|
||||||
|
if (!login_urls[provider.parent]) {
|
||||||
|
login_urls[provider.parent] = [provider.label, provider.login_url, [provider.name]];
|
||||||
|
} else {
|
||||||
|
login_urls[provider.parent][2].push(provider.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
for (let [name, [label, login_url, childs]] of Object.entries(login_urls)) {
|
||||||
|
if (!login_url) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
option = document.createElement("div");
|
||||||
|
option.classList.add("field", "box", "hidden");
|
||||||
|
childs = childs.map((child)=>`${child}-api_key`).join(" ");
|
||||||
|
option.innerHTML = `
|
||||||
|
<label for="${name}-api_key" class="label" title="">${label}:</label>
|
||||||
|
<textarea id="${name}-api_key" name="${name}[api_key]" class="${childs}" placeholder="api_key"></textarea>
|
||||||
|
<a href="${login_url}" target="_blank" title="Login to ${label}">Get API key</a>
|
||||||
|
`;
|
||||||
|
settings.querySelector(".paper").appendChild(option);
|
||||||
|
}
|
||||||
|
providers.forEach((provider) => {
|
||||||
if (!provider.parent) {
|
if (!provider.parent) {
|
||||||
option = document.createElement("div");
|
option = document.createElement("div");
|
||||||
option.classList.add("field");
|
option.classList.add("field");
|
||||||
|
|
|
||||||
|
|
@ -65,6 +65,7 @@ class Api:
|
||||||
"image": getattr(provider, "image_models", None) is not None,
|
"image": getattr(provider, "image_models", None) is not None,
|
||||||
"vision": getattr(provider, "default_vision_model", None) is not None,
|
"vision": getattr(provider, "default_vision_model", None) is not None,
|
||||||
"auth": provider.needs_auth,
|
"auth": provider.needs_auth,
|
||||||
|
"login_url": getattr(provider, "login_url", None),
|
||||||
} for provider in __providers__ if provider.working]
|
} for provider in __providers__ if provider.working]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
||||||
|
|
@ -168,7 +168,7 @@ llama_3_1_8b = Model(
|
||||||
llama_3_1_70b = Model(
|
llama_3_1_70b = Model(
|
||||||
name = "llama-3.1-70b",
|
name = "llama-3.1-70b",
|
||||||
base_provider = "Meta Llama",
|
base_provider = "Meta Llama",
|
||||||
best_provider = IterListProvider([DDG, DeepInfraChat, Blackbox, BlackboxCreateAgent, TeachAnything, PollinationsAI, DarkAI, Airforce, RubiksAI, PerplexityLabs])
|
best_provider = IterListProvider([DDG, DeepInfraChat, Blackbox, BlackboxCreateAgent, TeachAnything, DarkAI, Airforce, RubiksAI, PerplexityLabs])
|
||||||
)
|
)
|
||||||
|
|
||||||
# llama 3.2
|
# llama 3.2
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue