mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Fix code review issues in BAAI_Ling provider
Co-authored-by: hlohaus <983577+hlohaus@users.noreply.github.com>
This commit is contained in:
parent
c364425250
commit
04e300d7a6
1 changed files with 6 additions and 5 deletions
|
|
@ -5,7 +5,7 @@ import json
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from ...typing import AsyncResult, Messages
|
from ...typing import AsyncResult, Messages
|
||||||
from ...providers.response import JsonConversation, Reasoning
|
from ...providers.response import JsonConversation
|
||||||
from ...requests.raise_for_status import raise_for_status
|
from ...requests.raise_for_status import raise_for_status
|
||||||
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||||
from ..helper import format_prompt, get_last_user_message
|
from ..helper import format_prompt, get_last_user_message
|
||||||
|
|
@ -22,11 +22,10 @@ class BAAI_Ling(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
supports_message_history = False
|
supports_message_history = False
|
||||||
|
|
||||||
default_model = "ling-1t"
|
default_model = "ling-1t"
|
||||||
models = [default_model]
|
|
||||||
model_aliases = {
|
model_aliases = {
|
||||||
"ling-1t": default_model,
|
|
||||||
"ling": default_model,
|
"ling": default_model,
|
||||||
}
|
}
|
||||||
|
models = [default_model]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def create_async_generator(
|
async def create_async_generator(
|
||||||
|
|
@ -37,10 +36,11 @@ class BAAI_Ling(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
conversation: JsonConversation = None,
|
conversation: JsonConversation = None,
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> AsyncResult:
|
) -> AsyncResult:
|
||||||
if conversation is None or not hasattr(conversation, 'session_hash'):
|
is_new_conversation = conversation is None or not hasattr(conversation, 'session_hash')
|
||||||
|
if is_new_conversation:
|
||||||
conversation = JsonConversation(session_hash=str(uuid.uuid4()).replace('-', '')[:12])
|
conversation = JsonConversation(session_hash=str(uuid.uuid4()).replace('-', '')[:12])
|
||||||
|
|
||||||
prompt = format_prompt(messages) if conversation is None else get_last_user_message(messages)
|
prompt = format_prompt(messages) if is_new_conversation else get_last_user_message(messages)
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
'accept': '*/*',
|
'accept': '*/*',
|
||||||
|
|
@ -62,6 +62,7 @@ class BAAI_Ling(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
async with session.post(cls.api_endpoint, headers=headers, json=payload, proxy=proxy) as response:
|
async with session.post(cls.api_endpoint, headers=headers, json=payload, proxy=proxy) as response:
|
||||||
await raise_for_status(response)
|
await raise_for_status(response)
|
||||||
|
# Response body must be consumed for the request to complete
|
||||||
await response.json()
|
await response.json()
|
||||||
|
|
||||||
data_url = f'{cls.url}/gradio_api/queue/data?session_hash={conversation.session_hash}'
|
data_url = f'{cls.url}/gradio_api/queue/data?session_hash={conversation.session_hash}'
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue