mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Fix unittests
This commit is contained in:
parent
3775c1e06d
commit
082c152248
6 changed files with 31 additions and 19 deletions
|
|
@ -31,7 +31,7 @@ from g4f import debug
|
|||
debug.logging = True
|
||||
|
||||
# Constants
|
||||
DEFAULT_MODEL = "claude-3.7-sonnet"
|
||||
DEFAULT_MODEL = "gpt-4o"
|
||||
FALLBACK_MODELS = []
|
||||
MAX_DIFF_SIZE = None # Set to None to disable truncation, or a number for character limit
|
||||
MAX_RETRIES = 3
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ class TestProviderHasModel(unittest.TestCase):
|
|||
for model, providers in __models__.values():
|
||||
for provider in providers:
|
||||
if issubclass(provider, ProviderModelMixin):
|
||||
provider.get_models() # Update models
|
||||
if model.name in provider.model_aliases:
|
||||
model_name = provider.model_aliases[model.name]
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -303,7 +303,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
# xAI
|
||||
"grok-3-reason": "grok-3-reason",
|
||||
"o3-mini": "o3-mini-2025-01-31",
|
||||
"qwen-3-235b": "qwen3-235b-a22b",
|
||||
"qwen3-235b": "qwen3-235b-a22b",
|
||||
}
|
||||
|
||||
_auth_code = None
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import uuid
|
|||
from ...typing import AsyncResult, Messages
|
||||
from ...providers.response import Reasoning, JsonConversation
|
||||
from ...requests.raise_for_status import raise_for_status
|
||||
from ...errors import ModelNotSupportedError
|
||||
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||
from ..helper import get_last_user_message
|
||||
from ... import debug
|
||||
|
|
@ -32,17 +33,25 @@ class Qwen_Qwen_3(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
"qwen3-1.7b",
|
||||
"qwen3-0.6b",
|
||||
}
|
||||
model_aliases = {
|
||||
"qwen3-235b": default_model,
|
||||
"qwen3-30b": "qwen3-30b-a3b"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
async def create_async_generator(
|
||||
cls,
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
conversation: JsonConversation = None,
|
||||
thinking_budget: int = 38,
|
||||
**kwargs
|
||||
cls,
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
conversation: JsonConversation = None,
|
||||
thinking_budget: int = 38,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
try:
|
||||
model = cls.get_model(model)
|
||||
except ModelNotSupportedError:
|
||||
pass
|
||||
if conversation is None:
|
||||
conversation = JsonConversation(session_hash=str(uuid.uuid4()).replace('-', ''))
|
||||
|
||||
|
|
@ -73,7 +82,7 @@ class Qwen_Qwen_3(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Send join request
|
||||
async with session.post(cls.api_endpoint, headers=headers_join, json=payload_join) as response:
|
||||
async with session.post(cls.api_endpoint, headers=headers_join, json=payload_join, proxy=proxy) as response:
|
||||
await raise_for_status(response)
|
||||
(await response.json())['event_id']
|
||||
|
||||
|
|
@ -92,7 +101,7 @@ class Qwen_Qwen_3(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
}
|
||||
|
||||
# Send data stream request
|
||||
async with session.get(url_data, headers=headers_data, params=params_data) as response:
|
||||
async with session.get(url_data, headers=headers_data, params=params_data, proxy=proxy) as response:
|
||||
is_thinking = False
|
||||
async for line in response.content:
|
||||
decoded_line = line.decode('utf-8')
|
||||
|
|
|
|||
|
|
@ -55,11 +55,13 @@ class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
models = []
|
||||
image_models = []
|
||||
vision_models = []
|
||||
cls.model_aliases = {}
|
||||
for provider in cls.providers:
|
||||
models.extend(provider.get_models(**kwargs))
|
||||
models.extend(provider.model_aliases.keys())
|
||||
image_models.extend(provider.image_models)
|
||||
vision_models.extend(provider.vision_models)
|
||||
cls.model_aliases.update(provider.model_aliases)
|
||||
models = list(set(models))
|
||||
models.sort()
|
||||
cls.models = models
|
||||
|
|
|
|||
|
|
@ -631,45 +631,45 @@ qwen_2_5_vl_72b = Model(
|
|||
best_provider = Blackbox
|
||||
)
|
||||
|
||||
# qwen-3
|
||||
# qwen3
|
||||
qwen_3_235b = Model(
|
||||
name = 'qwen-3-235b',
|
||||
name = 'qwen3-235b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = IterListProvider([DeepInfraChat, HuggingSpace, Liaobots])
|
||||
)
|
||||
|
||||
qwen_3_32b = Model(
|
||||
name = 'qwen-3-32b',
|
||||
name = 'qwen3-32b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = IterListProvider([DeepInfraChat, HuggingSpace])
|
||||
)
|
||||
|
||||
qwen_3_30b = Model(
|
||||
name = 'qwen-3-30b',
|
||||
name = 'qwen3-30b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = IterListProvider([DeepInfraChat, HuggingSpace])
|
||||
)
|
||||
|
||||
qwen_3_14b = Model(
|
||||
name = 'qwen-3-14b',
|
||||
name = 'qwen3-14b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = IterListProvider([DeepInfraChat, HuggingSpace])
|
||||
)
|
||||
|
||||
qwen_3_4b = Model(
|
||||
name = 'qwen-3-4b',
|
||||
name = 'qwen3-4b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = HuggingSpace
|
||||
)
|
||||
|
||||
qwen_3_1_7b = Model(
|
||||
name = 'qwen-3-1.7b',
|
||||
name = 'qwen3-1.7b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = HuggingSpace
|
||||
)
|
||||
|
||||
qwen_3_0_6b = Model(
|
||||
name = 'qwen-3-0.6b',
|
||||
name = 'qwen3-0.6b',
|
||||
base_provider = 'Qwen',
|
||||
best_provider = HuggingSpace
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue