' if self.b64_json else None})"
-
-class ImagesResponse:
- def __init__(self, data: list[Image]):
- self.data = data
-
- def __repr__(self):
- return f"ImagesResponse(data={self.data})"
-
-class Images:
- def __init__(self, client: 'Client', provider: 'ImageProvider' = None):
- self.client: 'Client' = client
- self.provider: 'ImageProvider' = provider
- self.models: ImageModels = ImageModels(client)
-
- def generate(self, prompt: str, model: str = None, response_format: str = "url", **kwargs) -> ImagesResponse:
- """
- Synchronous generate method that runs the async_generate method in an event loop.
- """
- return asyncio.run(self.async_generate(prompt, model, response_format=response_format, **kwargs))
-
- async def async_generate(self, prompt: str, model: str = None, response_format: str = "url", **kwargs) -> ImagesResponse:
- provider = self.models.get(model, self.provider)
- if provider is None:
- raise ValueError(f"Unknown model: {model}")
-
- if isinstance(provider, IterListProvider):
- if provider.providers:
- provider = provider.providers[0]
- else:
- raise ValueError(f"IterListProvider for model {model} has no providers")
-
- if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider):
- messages = [{"role": "user", "content": prompt}]
- async for response in provider.create_async_generator(model, messages, **kwargs):
- if isinstance(response, ImageResponse):
- return await self._process_image_response(response, response_format)
- elif isinstance(response, str):
- image_response = ImageResponse([response], prompt)
- return await self._process_image_response(image_response, response_format)
- elif hasattr(provider, 'create'):
- if asyncio.iscoroutinefunction(provider.create):
- response = await provider.create(prompt)
- else:
- response = provider.create(prompt)
-
- if isinstance(response, ImageResponse):
- return await self._process_image_response(response, response_format)
- elif isinstance(response, str):
- image_response = ImageResponse([response], prompt)
- return await self._process_image_response(image_response, response_format)
- else:
- raise ValueError(f"Provider {provider} does not support image generation")
-
- raise NoImageResponseError(f"Unexpected response type: {type(response)}")
-
- async def _process_image_response(self, response: ImageResponse, response_format: str) -> ImagesResponse:
- processed_images = []
-
- for image_data in response.get_list():
- if image_data.startswith('http://') or image_data.startswith('https://'):
- if response_format == "url":
- processed_images.append(Image(url=image_data))
- elif response_format == "b64_json":
- # Fetch the image data and convert it to base64
- image_content = await self._fetch_image(image_data)
- b64_json = base64.b64encode(image_content).decode('utf-8')
- processed_images.append(Image(b64_json=b64_json))
- else:
- # Assume image_data is base64 data or binary
- if response_format == "url":
- if image_data.startswith('data:image'):
- # Remove the data URL scheme and get the base64 data
- header, base64_data = image_data.split(',', 1)
- else:
- base64_data = image_data
- # Decode the base64 data
- image_data_bytes = base64.b64decode(base64_data)
- # Convert bytes to an image
- image = to_image(image_data_bytes)
- file_name = self._save_image(image)
- processed_images.append(Image(url=file_name))
- elif response_format == "b64_json":
- if isinstance(image_data, bytes):
- b64_json = base64.b64encode(image_data).decode('utf-8')
- else:
- b64_json = image_data # If already base64-encoded string
- processed_images.append(Image(b64_json=b64_json))
-
- return ImagesResponse(processed_images)
-
- async def _fetch_image(self, url: str) -> bytes:
- # Asynchronously fetch image data from the URL
- async with aiohttp.ClientSession() as session:
- async with session.get(url) as resp:
- if resp.status == 200:
- return await resp.read()
- else:
- raise Exception(f"Failed to fetch image from {url}, status code {resp.status}")
-
- def _save_image(self, image: 'PILImage') -> str:
- os.makedirs('generated_images', exist_ok=True)
- file_name = f"generated_images/image_{int(time.time())}_{random.randint(0, 10000)}.png"
- image.save(file_name)
- return file_name
-
- async def create_variation(self, image: Union[str, bytes], model: str = None, response_format: str = "url", **kwargs):
- # Existing implementation, adjust if you want to support b64_json here as well
- pass
diff --git a/g4f/client/helper.py b/g4f/client/helper.py
index c502d478..71bfd38a 100644
--- a/g4f/client/helper.py
+++ b/g4f/client/helper.py
@@ -1,7 +1,12 @@
from __future__ import annotations
import re
-from typing import Iterable, AsyncIterator
+import queue
+import threading
+import logging
+import asyncio
+
+from typing import AsyncIterator, Iterator, AsyncGenerator
def filter_json(text: str) -> str:
"""
@@ -42,6 +47,40 @@ def filter_none(**kwargs) -> dict:
if value is not None
}
-async def cast_iter_async(iter: Iterable) -> AsyncIterator:
- for chunk in iter:
- yield chunk
\ No newline at end of file
+async def safe_aclose(generator: AsyncGenerator) -> None:
+ try:
+ await generator.aclose()
+ except Exception as e:
+ logging.warning(f"Error while closing generator: {e}")
+
+# Helper function to convert an async generator to a synchronous iterator
+def to_sync_iter(async_gen: AsyncIterator) -> Iterator:
+ q = queue.Queue()
+ loop = asyncio.new_event_loop()
+ done = object()
+
+ def _run():
+ asyncio.set_event_loop(loop)
+
+ async def iterate():
+ try:
+ async for item in async_gen:
+ q.put(item)
+ finally:
+ q.put(done)
+
+ loop.run_until_complete(iterate())
+ loop.close()
+
+ threading.Thread(target=_run).start()
+
+ while True:
+ item = q.get()
+ if item is done:
+ break
+ yield item
+
+# Helper function to convert a synchronous iterator to an async iterator
+async def to_async_iterator(iterator: Iterator) -> AsyncIterator:
+ for item in iterator:
+ yield item
\ No newline at end of file
diff --git a/g4f/client/service.py b/g4f/client/service.py
index 5fdb150c..aa209b22 100644
--- a/g4f/client/service.py
+++ b/g4f/client/service.py
@@ -55,7 +55,6 @@ def get_model_and_provider(model : Union[Model, str],
provider = convert_to_provider(provider)
if isinstance(model, str):
-
if model in ModelUtils.convert:
model = ModelUtils.convert[model]
@@ -75,11 +74,11 @@ def get_model_and_provider(model : Union[Model, str],
if not ignore_working and not provider.working:
raise ProviderNotWorkingError(f'{provider.__name__} is not working')
- if not ignore_working and isinstance(provider, BaseRetryProvider):
- provider.providers = [p for p in provider.providers if p.working]
-
- if ignored and isinstance(provider, BaseRetryProvider):
- provider.providers = [p for p in provider.providers if p.__name__ not in ignored]
+ if isinstance(provider, BaseRetryProvider):
+ if not ignore_working:
+ provider.providers = [p for p in provider.providers if p.working]
+ if ignored:
+ provider.providers = [p for p in provider.providers if p.__name__ not in ignored]
if not ignore_stream and not provider.supports_stream and stream:
raise StreamNotSupportedError(f'{provider.__name__} does not support "stream" argument')
@@ -95,7 +94,7 @@ def get_model_and_provider(model : Union[Model, str],
return model, provider
-def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, str]]:
+def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, str], None]:
"""
Retrieves the last used provider.
@@ -108,11 +107,14 @@ def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, st
last = debug.last_provider
if isinstance(last, BaseRetryProvider):
last = last.last_provider
- if last and as_dict:
- return {
- "name": last.__name__,
- "url": last.url,
- "model": debug.last_model,
- "label": last.label if hasattr(last, "label") else None
- }
+ if as_dict:
+ if last:
+ return {
+ "name": last.__name__,
+ "url": last.url,
+ "model": debug.last_model,
+ "label": getattr(last, "label", None) if hasattr(last, "label") else None
+ }
+ else:
+ return {}
return last
\ No newline at end of file
diff --git a/g4f/client/stubs.py b/g4f/client/stubs.py
index 8cf2bcba..b38c9f6c 100644
--- a/g4f/client/stubs.py
+++ b/g4f/client/stubs.py
@@ -1,6 +1,7 @@
from __future__ import annotations
from typing import Union
+from time import time
class Model():
...
@@ -108,8 +109,18 @@ class Image(Model):
return self.__dict__
class ImagesResponse(Model):
- def __init__(self, data: list[Image], created: int = 0) -> None:
+ data: list[Image]
+ model: str
+ provider: str
+ created: int
+
+ def __init__(self, data: list[Image], created: int = None, model: str = None, provider: str = None) -> None:
self.data = data
+ if created is None:
+ created = int(time())
+ self.model = model
+ if provider is not None:
+ self.provider = provider
self.created = created
def to_json(self):
diff --git a/g4f/client/types.py b/g4f/client/types.py
index 100be432..4f252ba9 100644
--- a/g4f/client/types.py
+++ b/g4f/client/types.py
@@ -11,7 +11,17 @@ Proxies = Union[dict, str]
IterResponse = Iterator[Union[ChatCompletion, ChatCompletionChunk]]
AsyncIterResponse = AsyncIterator[Union[ChatCompletion, ChatCompletionChunk]]
-class ClientProxyMixin():
+class Client():
+ def __init__(
+ self,
+ api_key: str = None,
+ proxies: Proxies = None,
+ **kwargs
+ ) -> None:
+ self.api_key: str = api_key
+ self.proxies= proxies
+ self.proxy: str = self.get_proxy()
+
def get_proxy(self) -> Union[str, None]:
if isinstance(self.proxies, str):
return self.proxies
@@ -20,14 +30,4 @@ class ClientProxyMixin():
elif "all" in self.proxies:
return self.proxies["all"]
elif "https" in self.proxies:
- return self.proxies["https"]
-
-class Client(ClientProxyMixin):
- def __init__(
- self,
- api_key: str = None,
- proxies: Proxies = None,
- **kwargs
- ) -> None:
- self.api_key: str = api_key
- self.proxies: Proxies = proxies
\ No newline at end of file
+ return self.proxies["https"]
\ No newline at end of file
diff --git a/g4f/gui/server/internet.py b/g4f/gui/server/internet.py
index 1e366e46..b41b5eae 100644
--- a/g4f/gui/server/internet.py
+++ b/g4f/gui/server/internet.py
@@ -96,7 +96,7 @@ async def fetch_and_scrape(session: ClientSession, url: str, max_words: int = No
async def search(query: str, n_results: int = 5, max_words: int = 2500, add_text: bool = True) -> SearchResults:
if not has_requirements:
- raise MissingRequirementsError('Install "duckduckgo-search" and "beautifulsoup4" package')
+ raise MissingRequirementsError('Install "duckduckgo-search" and "beautifulsoup4" package | pip install -U g4f[search]')
with DDGS() as ddgs:
results = []
for result in ddgs.text(
diff --git a/g4f/models.py b/g4f/models.py
index 0a7eed35..8825242f 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -2,8 +2,6 @@ from __future__ import annotations
from dataclasses import dataclass
-from .Provider.not_working import Ai4Chat
-
from .Provider import IterListProvider, ProviderType
from .Provider import (
AIChatFree,
@@ -19,12 +17,10 @@ from .Provider import (
DDG,
DeepInfraChat,
Free2GPT,
- FreeGpt,
FreeNetfly,
+ GigaChat,
Gemini,
GeminiPro,
- GizAI,
- GigaChat,
HuggingChat,
HuggingFace,
Liaobots,
@@ -42,7 +38,6 @@ from .Provider import (
Upstage,
)
-
@dataclass(unsafe_hash=True)
class Model:
"""
@@ -62,7 +57,6 @@ class Model:
"""Returns a list of all model names."""
return _all_models
-
### Default ###
default = Model(
name = "",
@@ -85,8 +79,6 @@ default = Model(
])
)
-
-
############
### Text ###
############
@@ -115,29 +107,15 @@ gpt_4o_mini = Model(
gpt_4_turbo = Model(
name = 'gpt-4-turbo',
base_provider = 'OpenAI',
- best_provider = IterListProvider([ChatGpt, Airforce, Liaobots, Bing])
+ best_provider = IterListProvider([Liaobots, Bing])
)
gpt_4 = Model(
name = 'gpt-4',
base_provider = 'OpenAI',
- best_provider = IterListProvider([Mhystical, Chatgpt4Online, ChatGpt, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider])
+ best_provider = IterListProvider([Chatgpt4Online, Bing, OpenaiChat, DDG, Liaobots, Airforce])
)
-# o1
-o1 = Model(
- name = 'o1',
- base_provider = 'OpenAI',
- best_provider = None
-)
-
-o1_mini = Model(
- name = 'o1-mini',
- base_provider = 'OpenAI',
- best_provider = None
-)
-
-
### GigaChat ###
gigachat = Model(
name = 'GigaChat:latest',
@@ -145,7 +123,6 @@ gigachat = Model(
best_provider = GigaChat
)
-
### Meta ###
meta = Model(
name = "meta-ai",
@@ -157,13 +134,13 @@ meta = Model(
llama_2_7b = Model(
name = "llama-2-7b",
base_provider = "Meta Llama",
- best_provider = IterListProvider([Cloudflare, Airforce])
+ best_provider = Cloudflare
)
# llama 3
llama_3_8b = Model(
name = "llama-3-8b",
base_provider = "Meta Llama",
- best_provider = IterListProvider([Cloudflare])
+ best_provider = Cloudflare
)
# llama 3.1
@@ -198,13 +175,6 @@ llama_3_2_11b = Model(
best_provider = IterListProvider([HuggingChat, HuggingFace])
)
-### Mistral ###
-mistral_7b = Model(
- name = "mistral-7b",
- base_provider = "Mistral",
- best_provider = IterListProvider([Free2GPT])
-)
-
mixtral_8x7b = Model(
name = "mixtral-8x7b",
base_provider = "Mistral",
@@ -217,27 +187,12 @@ mistral_nemo = Model(
best_provider = IterListProvider([HuggingChat, HuggingFace])
)
-
-### NousResearch ###
-hermes_2_pro = Model(
- name = "hermes-2-pro",
- base_provider = "NousResearch",
- best_provider = Airforce
-)
-
-hermes_2_dpo = Model(
- name = "hermes-2-dpo",
- base_provider = "NousResearch",
- best_provider = Airforce
-)
-
hermes_3 = Model(
name = "hermes-3",
base_provider = "NousResearch",
best_provider = IterListProvider([HuggingChat, HuggingFace])
)
-
### Microsoft ###
phi_2 = Model(
name = "phi-2",
@@ -256,13 +211,13 @@ phi_3_5_mini = Model(
gemini_pro = Model(
name = 'gemini-pro',
base_provider = 'Google DeepMind',
- best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, FreeGpt, Liaobots])
+ best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, Liaobots])
)
gemini_flash = Model(
name = 'gemini-flash',
base_provider = 'Google DeepMind',
- best_provider = IterListProvider([Blackbox, GizAI, Liaobots])
+ best_provider = IterListProvider([Blackbox, Liaobots])
)
gemini = Model(
@@ -278,7 +233,6 @@ gemma_2b = Model(
best_provider = ReplicateHome
)
-
### Anthropic ###
claude_2_1 = Model(
name = 'claude-2.1',
@@ -290,13 +244,13 @@ claude_2_1 = Model(
claude_3_opus = Model(
name = 'claude-3-opus',
base_provider = 'Anthropic',
- best_provider = IterListProvider([Liaobots])
+ best_provider = Liaobots
)
claude_3_sonnet = Model(
name = 'claude-3-sonnet',
base_provider = 'Anthropic',
- best_provider = IterListProvider([Liaobots])
+ best_provider = Liaobots
)
claude_3_haiku = Model(
@@ -312,7 +266,6 @@ claude_3_5_sonnet = Model(
best_provider = IterListProvider([Blackbox, Liaobots])
)
-
### Reka AI ###
reka_core = Model(
name = 'reka-core',
@@ -320,7 +273,6 @@ reka_core = Model(
best_provider = Reka
)
-
### Blackbox AI ###
blackboxai = Model(
name = 'blackboxai',
@@ -341,7 +293,6 @@ command_r_plus = Model(
best_provider = HuggingChat
)
-
### Qwen ###
# qwen 1_5
qwen_1_5_7b = Model(
@@ -477,7 +428,6 @@ german_7b = Model(
best_provider = Airforce
)
-
### HuggingFaceH4 ###
zephyr_7b = Model(
name = 'zephyr-7b',
@@ -492,8 +442,6 @@ neural_7b = Model(
best_provider = Airforce
)
-
-
#############
### Image ###
#############
@@ -527,66 +475,55 @@ flux = Model(
name = 'flux',
base_provider = 'Flux AI',
best_provider = IterListProvider([Blackbox, AIUncensored, Airforce])
-
)
flux_pro = Model(
name = 'flux-pro',
base_provider = 'Flux AI',
- best_provider = IterListProvider([Airforce])
-
+ best_provider = Airforce
)
flux_realism = Model(
name = 'flux-realism',
base_provider = 'Flux AI',
- best_provider = IterListProvider([Airforce])
-
+ best_provider = Airforce
)
flux_anime = Model(
name = 'flux-anime',
base_provider = 'Flux AI',
best_provider = Airforce
-
)
flux_3d = Model(
name = 'flux-3d',
base_provider = 'Flux AI',
best_provider = Airforce
-
)
flux_disney = Model(
name = 'flux-disney',
base_provider = 'Flux AI',
best_provider = Airforce
-
)
flux_pixel = Model(
name = 'flux-pixel',
base_provider = 'Flux AI',
best_provider = Airforce
-
)
flux_4o = Model(
name = 'flux-4o',
base_provider = 'Flux AI',
best_provider = Airforce
-
)
-
-
### Other ###
any_dark = Model(
name = 'any-dark',
base_provider = '',
best_provider = Airforce
-
)
class ModelUtils:
@@ -596,13 +533,15 @@ class ModelUtils:
Attributes:
convert (dict[str, Model]): Dictionary mapping model string identifiers to Model instances.
"""
- convert: dict[str, Model] = {
-
+ convert: dict[str, Model] = {
############
### Text ###
############
-
+
### OpenAI ###
+ # gpt-3
+ 'gpt-3': gpt_35_turbo,
+
# gpt-3.5
'gpt-3.5-turbo': gpt_35_turbo,
@@ -612,11 +551,6 @@ class ModelUtils:
'gpt-4': gpt_4,
'gpt-4-turbo': gpt_4_turbo,
- # o1
- 'o1': o1,
- 'o1-mini': o1_mini,
-
-
### Meta ###
"meta-ai": meta,
@@ -636,32 +570,25 @@ class ModelUtils:
'llama-3.2-11b': llama_3_2_11b,
### Mistral ###
- 'mistral-7b': mistral_7b,
'mixtral-8x7b': mixtral_8x7b,
'mistral-nemo': mistral_nemo,
-
-
- ### NousResearch ###
- 'hermes-2-pro': hermes_2_pro,
- 'hermes-2-dpo': hermes_2_dpo,
- 'hermes-3': hermes_3,
-
+ ### NousResearch ###
+ 'hermes-3': hermes_3,
+
### Microsoft ###
'phi-2': phi_2,
'phi-3.5-mini': phi_3_5_mini,
-
### Google ###
# gemini
'gemini': gemini,
'gemini-pro': gemini_pro,
'gemini-flash': gemini_flash,
-
+
# gemma
'gemma-2b': gemma_2b,
-
### Anthropic ###
'claude-2.1': claude_2_1,
@@ -672,115 +599,64 @@ class ModelUtils:
# claude 3.5
'claude-3.5-sonnet': claude_3_5_sonnet,
-
-
+
### Reka AI ###
'reka-core': reka_core,
-
-
+
### Blackbox AI ###
'blackboxai': blackboxai,
'blackboxai-pro': blackboxai_pro,
-
-
+
### CohereForAI ###
'command-r+': command_r_plus,
-
### GigaChat ###
'gigachat': gigachat,
-
-
-
- ### Qwen ###
- # qwen 1.5
+
'qwen-1.5-7b': qwen_1_5_7b,
-
- # qwen 2
'qwen-2-72b': qwen_2_72b,
-
- # qwen 2.5
- 'qwen-2.5-coder-32b': qwen_2_5_coder_32b,
-
### Upstage ###
- 'solar-mini': solar_mini,
'solar-pro': solar_pro,
-
### Inflection ###
'pi': pi,
-
- ### DeepSeek ###
- 'deepseek-coder': deepseek_coder,
-
-
### Yorickvp ###
'llava-13b': llava_13b,
-
### WizardLM ###
'wizardlm-2-8x22b': wizardlm_2_8x22b,
-
-
+
### OpenChat ###
'openchat-3.5': openchat_3_5,
-
-
+
### x.ai ###
'grok-2': grok_2,
'grok-2-mini': grok_2_mini,
'grok-beta': grok_beta,
-
-
+
### Perplexity AI ###
'sonar-online': sonar_online,
'sonar-chat': sonar_chat,
-
-
+
### TheBloke ###
'german-7b': german_7b,
-
-
+
### Nvidia ###
'nemotron-70b': nemotron_70b,
-
- ### Teknium ###
- 'openhermes-2.5': openhermes_2_5,
-
-
- ### Liquid ###
- 'lfm-40b': lfm_40b,
-
-
- ### DiscoResearch ###
- 'german-7b': german_7b,
-
-
- ### HuggingFaceH4 ###
- 'zephyr-7b': zephyr_7b,
-
-
- ### Inferless ###
- 'neural-7b': neural_7b,
-
-
-
#############
### Image ###
#############
-
+
### Stability AI ###
'sdxl': sdxl,
'sd-3': sd_3,
-
-
+
### Playground ###
'playground-v2.5': playground_v2_5,
-
### Flux AI ###
'flux': flux,
'flux-pro': flux_pro,
@@ -791,9 +667,8 @@ class ModelUtils:
'flux-pixel': flux_pixel,
'flux-4o': flux_4o,
-
### Other ###
'any-dark': any_dark,
}
-_all_models = list(ModelUtils.convert.keys())
+_all_models = list(ModelUtils.convert.keys())
\ No newline at end of file
diff --git a/g4f/providers/base_provider.py b/g4f/providers/base_provider.py
index 5d48f2e0..128fb5a0 100644
--- a/g4f/providers/base_provider.py
+++ b/g4f/providers/base_provider.py
@@ -2,11 +2,13 @@ from __future__ import annotations
import sys
import asyncio
+
from asyncio import AbstractEventLoop
from concurrent.futures import ThreadPoolExecutor
from abc import abstractmethod
from inspect import signature, Parameter
from typing import Callable, Union
+
from ..typing import CreateResult, AsyncResult, Messages
from .types import BaseProvider, FinishReason
from ..errors import NestAsyncioError, ModelNotSupportedError
@@ -17,6 +19,17 @@ if sys.version_info < (3, 10):
else:
from types import NoneType
+try:
+ import nest_asyncio
+ has_nest_asyncio = True
+except ImportError:
+ has_nest_asyncio = False
+try:
+ import uvloop
+ has_uvloop = True
+except ImportError:
+ has_uvloop = False
+
# Set Windows event loop policy for better compatibility with asyncio and curl_cffi
if sys.platform == 'win32':
try:
@@ -31,18 +44,14 @@ def get_running_loop(check_nested: bool) -> Union[AbstractEventLoop, None]:
try:
loop = asyncio.get_running_loop()
# Do not patch uvloop loop because its incompatible.
- try:
- import uvloop
+ if has_uvloop:
if isinstance(loop, uvloop.Loop):
- return loop
- except (ImportError, ModuleNotFoundError):
- pass
- if check_nested and not hasattr(loop.__class__, "_nest_patched"):
- try:
- import nest_asyncio
+ return loop
+ if not hasattr(loop.__class__, "_nest_patched"):
+ if has_nest_asyncio:
nest_asyncio.apply(loop)
- except ImportError:
- raise NestAsyncioError('Install "nest_asyncio" package')
+ elif check_nested:
+ raise NestAsyncioError('Install "nest_asyncio" package | pip install -U nest_asyncio')
return loop
except RuntimeError:
pass
@@ -154,7 +163,7 @@ class AsyncProvider(AbstractProvider):
Returns:
CreateResult: The result of the completion creation.
"""
- get_running_loop(check_nested=True)
+ get_running_loop(check_nested=False)
yield asyncio.run(cls.create_async(model, messages, **kwargs))
@staticmethod
@@ -208,7 +217,7 @@ class AsyncGeneratorProvider(AsyncProvider):
Returns:
CreateResult: The result of the streaming completion creation.
"""
- loop = get_running_loop(check_nested=True)
+ loop = get_running_loop(check_nested=False)
new_loop = False
if loop is None:
loop = asyncio.new_event_loop()
@@ -222,7 +231,7 @@ class AsyncGeneratorProvider(AsyncProvider):
while True:
yield loop.run_until_complete(await_callback(gen.__anext__))
except StopAsyncIteration:
- ...
+ pass
finally:
if new_loop:
loop.close()
diff --git a/g4f/providers/types.py b/g4f/providers/types.py
index 69941a26..e7ca32ee 100644
--- a/g4f/providers/types.py
+++ b/g4f/providers/types.py
@@ -3,6 +3,7 @@ from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Union, Dict, Type
from ..typing import Messages, CreateResult
+from .conversation import BaseConversation
class BaseProvider(ABC):
"""
diff --git a/g4f/requests/__init__.py b/g4f/requests/__init__.py
index 80fc44b3..a8c0e286 100644
--- a/g4f/requests/__init__.py
+++ b/g4f/requests/__init__.py
@@ -1,5 +1,8 @@
from __future__ import annotations
+from urllib.parse import urlparse
+from typing import Iterator
+from http.cookies import Morsel
try:
from curl_cffi.requests import Session, Response
from .curl_cffi import StreamResponse, StreamSession, FormData
@@ -14,11 +17,19 @@ try:
has_webview = True
except ImportError:
has_webview = False
+try:
+ import nodriver
+ from nodriver.cdp.network import CookieParam
+ has_nodriver = True
+except ImportError:
+ has_nodriver = False
+from .. import debug
from .raise_for_status import raise_for_status
from ..webdriver import WebDriver, WebDriverSession
from ..webdriver import bypass_cloudflare, get_driver_cookies
from ..errors import MissingRequirementsError
+from ..typing import Cookies
from .defaults import DEFAULT_HEADERS, WEBVIEW_HAEDERS
async def get_args_from_webview(url: str) -> dict:
@@ -105,4 +116,53 @@ def get_session_from_browser(url: str, webdriver: WebDriver = None, proxy: str =
proxies={"https": proxy, "http": proxy},
timeout=timeout,
impersonate="chrome"
- )
\ No newline at end of file
+ )
+def get_cookie_params_from_dict(cookies: Cookies, url: str = None, domain: str = None) -> list[CookieParam]:
+ [CookieParam.from_json({
+ "name": key,
+ "value": value,
+ "url": url,
+ "domain": domain
+ }) for key, value in cookies.items()]
+
+async def get_args_from_nodriver(
+ url: str,
+ proxy: str = None,
+ timeout: int = 120,
+ cookies: Cookies = None
+) -> dict:
+ if not has_nodriver:
+ raise MissingRequirementsError('Install "nodriver" package | pip install -U nodriver')
+ if debug.logging:
+ print(f"Open nodriver with url: {url}")
+ browser = await nodriver.start(
+ browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
+ )
+ domain = urlparse(url).netloc
+ if cookies is None:
+ cookies = {}
+ else:
+ await browser.cookies.set_all(get_cookie_params_from_dict(cookies, url=url, domain=domain))
+ page = await browser.get(url)
+ for c in await browser.cookies.get_all():
+ if c.domain.endswith(domain):
+ cookies[c.name] = c.value
+ user_agent = await page.evaluate("window.navigator.userAgent")
+ await page.wait_for("body:not(.no-js)", timeout=timeout)
+ await page.close()
+ browser.stop()
+ return {
+ "cookies": cookies,
+ "headers": {
+ **DEFAULT_HEADERS,
+ "user-agent": user_agent,
+ "referer": url,
+ },
+ "proxy": proxy
+ }
+
+def merge_cookies(cookies: Iterator[Morsel], response: Response) -> Cookies:
+ if cookies is None:
+ cookies = {}
+ for cookie in response.cookies.jar:
+ cookies[cookie.name] = cookie.value
\ No newline at end of file
diff --git a/g4f/requests/raise_for_status.py b/g4f/requests/raise_for_status.py
index 0e91505e..1699d9a4 100644
--- a/g4f/requests/raise_for_status.py
+++ b/g4f/requests/raise_for_status.py
@@ -11,6 +11,8 @@ class CloudflareError(ResponseStatusError):
...
def is_cloudflare(text: str) -> bool:
+ if "Attention Required! | Cloudflare" in text:
+ return True
return '' in text or "
Just a moment..." in text
def is_openai(text: str) -> bool:
diff --git a/requirements-min.txt b/requirements-min.txt
index 483e4c7c..3923c556 100644
--- a/requirements-min.txt
+++ b/requirements-min.txt
@@ -2,6 +2,4 @@ requests
aiohttp
brotli
pycryptodome
-curl_cffi>=0.6.2
-nest_asyncio
-cloudscraper
+nest_asyncio
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 48bbede9..b35f9754 100644
--- a/setup.py
+++ b/setup.py
@@ -13,8 +13,7 @@ INSTALL_REQUIRE = [
"aiohttp",
"brotli",
"pycryptodome",
- "curl_cffi>=0.6.2",
- "cloudscraper" # Cloudflare
+ "nest_asyncio",
]
EXTRA_REQUIRE = {
@@ -22,18 +21,10 @@ EXTRA_REQUIRE = {
"curl_cffi>=0.6.2",
"certifi",
"browser_cookie3", # get_cookies
- "PyExecJS", # GptForLove, Vercel
"duckduckgo-search>=5.0" ,# internet.search
"beautifulsoup4", # internet.search and bing.create_images
"brotli", # openai, bing
- # webdriver
- #"undetected-chromedriver>=3.5.5",
- #"setuptools",
- #"selenium-wire"
- # webview
- "pywebview",
"platformdirs",
- "plyer",
"cryptography",
"aiohttp_socks", # proxy
"pillow", # image
@@ -41,7 +32,8 @@ EXTRA_REQUIRE = {
"werkzeug", "flask", # gui
"fastapi", # api
"uvicorn", "nest_asyncio", # api
- "pycryptodome" # openai
+ "pycryptodome", # openai
+ "nodriver",
],
"image": [
"pillow",
@@ -60,12 +52,9 @@ EXTRA_REQUIRE = {
"plyer",
"cryptography"
],
- "openai": [
- "pycryptodome"
- ],
"api": [
"loguru", "fastapi",
- "uvicorn", "nest_asyncio"
+ "uvicorn",
],
"gui": [
"werkzeug", "flask",