Disable GptOss provider by moving to not_working directory

Co-authored-by: hlohaus <983577+hlohaus@users.noreply.github.com>
This commit is contained in:
copilot-swe-agent[bot] 2025-08-23 09:10:15 +00:00
parent 91d630cc4b
commit 24d2a29090
5 changed files with 10 additions and 12 deletions

View file

@ -45,7 +45,7 @@ from .Copilot import Copilot
from .DeepInfraChat import DeepInfraChat
from .EasyChat import EasyChat
from .GLM import GLM
from .GptOss import GptOss
from .ImageLabs import ImageLabs
from .Kimi import Kimi
from .LambdaChat import LambdaChat

View file

@ -1,18 +1,18 @@
from __future__ import annotations
from ..typing import AsyncResult, Messages, MediaListType
from ..providers.response import JsonConversation, Reasoning, TitleGeneration
from ..requests import StreamSession, raise_for_status
from ..config import DEFAULT_MODEL
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import get_last_user_message
from ...typing import AsyncResult, Messages, MediaListType
from ...providers.response import JsonConversation, Reasoning, TitleGeneration
from ...requests import StreamSession, raise_for_status
from ...config import DEFAULT_MODEL
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..helper import get_last_user_message
class GptOss(AsyncGeneratorProvider, ProviderModelMixin):
label = "gpt-oss (playground)"
url = "https://gpt-oss.com"
api_endpoint = "https://api.gpt-oss.com/chatkit"
working = True
working = False
active_by_default = True
default_model = "gpt-oss-120b"

View file

@ -35,4 +35,5 @@ from .TypeGPT import TypeGPT
from .Upstage import Upstage
from .Vercel import Vercel
from .Websim import Websim
from .GptOss import GptOss

View file

@ -12,7 +12,6 @@ from .Provider import (
Copilot,
DeepInfraChat,
Free2GPT,
GptOss,
HuggingSpace,
Grok,
DeepseekAI_JanusPro7b,
@ -293,7 +292,7 @@ gpt_oss_120b = Model(
name = 'gpt-oss-120b',
long_name = 'openai/gpt-oss-120b',
base_provider = 'OpenAI',
best_provider = IterListProvider([GptOss, Together, DeepInfraChat, HuggingFace, OpenRouter, Groq])
best_provider = IterListProvider([Together, DeepInfraChat, HuggingFace, OpenRouter, Groq])
)
# dall-e

View file

@ -172,7 +172,6 @@ model_map = {
]
},
"gpt-oss-120b": {
"GptOss": "gpt-oss-120b",
"Together": "openai/gpt-oss-120b",
"DeepInfraChat": "openai/gpt-oss-120b",
"HuggingFace": "openai/gpt-oss-120b",
@ -1641,7 +1640,6 @@ model_map = {
"ApiAirforce": "gpt-oss-20b",
"DeepInfra": "openai/gpt-oss-20b",
"FenayAI": "gpt-oss-20b",
"GptOss": "gpt-oss-20b",
"Groq": "openai/gpt-oss-20b",
"HuggingFaceAPI": "openai/gpt-oss-20b",
"Nvidia": "openai/gpt-oss-20b",