gpt4free/g4f/requests/aiohttp.py
hlohaus 67231e8c40 refactor: remove cache parameter and update conversation handling
- Removed the `cache` parameter from the `PollinationsAI` class in `PollinationsAI.py`.
- Added a `parent_message_id` parameter to the `chat_completion` method call in the `DeepSeekAPI` class in `DeepSeekAPI.py`.
- Updated the handling of `conversation` in `DeepSeekAPI.py` to yield the `conversation` object at the end of the method.
- Set `conversation.parent_id` to `chunk['message_id']` when present in the response in `DeepSeekAPI.py`.
- Adjusted the method signatures in `aiohttp.py` to remove unnecessary type hints for `ClientSession` and `None`.
2025-06-12 16:43:14 +02:00

81 lines
No EOL
2.7 KiB
Python

from __future__ import annotations
import json
from aiohttp import ClientSession, ClientResponse, ClientTimeout, BaseConnector, FormData
from typing import AsyncIterator, Any, Optional
from .defaults import DEFAULT_HEADERS
from ..errors import MissingRequirementsError
class StreamResponse(ClientResponse):
async def iter_lines(self) -> AsyncIterator[bytes]:
async for line in self.content:
yield line.rstrip(b"\r\n")
async def iter_content(self) -> AsyncIterator[bytes]:
async for chunk in self.content.iter_any():
yield chunk
async def json(self, content_type: str = None) -> Any:
return await super().json(content_type=content_type)
async def sse(self) -> AsyncIterator[dict]:
"""Asynchronously iterate over the Server-Sent Events of the response."""
async for line in self.content:
if line.startswith(b"data: "):
chunk = line[6:]
if chunk.startswith(b"[DONE]"):
break
try:
yield json.loads(chunk)
except json.JSONDecodeError:
continue
class StreamSession():
def __init__(
self,
headers: dict = {},
timeout: int = None,
connector: BaseConnector = None,
proxy: str = None,
proxies: dict = {},
impersonate = None,
**kwargs
):
if impersonate:
headers = {
**DEFAULT_HEADERS,
**headers
}
connect = None
if isinstance(timeout, tuple):
connect, timeout = timeout;
if timeout is not None:
timeout = ClientTimeout(timeout, connect)
if proxy is None:
proxy = proxies.get("all", proxies.get("https"))
self.inner = ClientSession(
**kwargs,
timeout=timeout,
response_class=StreamResponse,
connector=get_connector(connector, proxy),
headers=headers
)
async def __aenter__(self) -> ClientSession:
return self.inner
async def __aexit__(self, *args, **kwargs) -> None:
await self.inner.close()
def get_connector(connector: BaseConnector = None, proxy: str = None, rdns: bool = False) -> Optional[BaseConnector]:
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
if proxy.startswith("socks5h://"):
proxy = proxy.replace("socks5h://", "socks5://")
rdns = True
connector = ProxyConnector.from_url(proxy, rdns=rdns)
except ImportError:
raise MissingRequirementsError('Install "aiohttp_socks" package for proxy support')
return connector