diff --git a/g4f/Provider/FreeRouter.py b/g4f/Provider/FreeRouter.py index e41b5de1..f7283895 100644 --- a/g4f/Provider/FreeRouter.py +++ b/g4f/Provider/FreeRouter.py @@ -6,4 +6,4 @@ class FreeRouter(OpenaiTemplate): label = "CablyAI FreeRouter" url = "https://freerouter.cablyai.com" api_base = "https://freerouter.cablyai.com/v1" - working = False \ No newline at end of file + working = True \ No newline at end of file diff --git a/g4f/Provider/hf_space/LMArenaProvider.py b/g4f/Provider/hf_space/LMArenaProvider.py index 653954ec..43dae0f7 100644 --- a/g4f/Provider/hf_space/LMArenaProvider.py +++ b/g4f/Provider/hf_space/LMArenaProvider.py @@ -229,7 +229,9 @@ class LMArenaProvider(AsyncGeneratorProvider, ProviderModelMixin, AuthFileMixin) if len(data) > 2: if isinstance(data[2], list): data[2] = data[2][-1] - content = data[2][text_position:].rstrip("▌") + content = data[2][text_position:] + if content.endswith("▌"): + content = content[:-2] if content: count += 1 yield count, content diff --git a/g4f/Provider/hf_space/__init__.py b/g4f/Provider/hf_space/__init__.py index 0b6d7a78..1eda91fe 100644 --- a/g4f/Provider/hf_space/__init__.py +++ b/g4f/Provider/hf_space/__init__.py @@ -88,7 +88,7 @@ class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin): for provider in cls.providers: if model in provider.get_models(): try: - async for chunk in provider.create_async_generator(model, messages, images=images, **kwargs): + async for chunk in provider.create_async_generator(model, messages, media=media, **kwargs): is_started = True yield chunk if is_started: diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py index 1c6dcfac..05eba5c3 100644 --- a/g4f/Provider/needs_auth/OpenaiChat.py +++ b/g4f/Provider/needs_auth/OpenaiChat.py @@ -447,7 +447,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin): link = sources.list[int(match.group(1))]["url"] return f"[[{int(match.group(1))+1}]]({link})" return f" [{int(match.group(1))+1}]" - buffer = re.sub(r'(?:cite\nturn0search|cite\nturn0news|turn0news)(\d+)', replacer, buffer) + buffer = re.sub(r'(?:cite\nturn[0-9]+search|cite\nturn[0-9]+news|turn[0-9]+news|turn[0-9]+search)(\d+)', replacer, buffer) else: continue yield buffer @@ -501,23 +501,27 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin): return if "v" in line: v = line.get("v") - if isinstance(v, str) and fields.is_recipient: + if isinstance(v, str) and fields.recipient == "all": if "p" not in line or line.get("p") == "/message/content/parts/0": yield Reasoning(token=v) if fields.is_thinking else v elif isinstance(v, list): for m in v: - if m.get("p") == "/message/content/parts/0" and fields.is_recipient: + if m.get("p") == "/message/content/parts/0" and fields.recipient == "all": yield m.get("v") elif m.get("p") == "/message/metadata/search_result_groups": for entry in [p.get("entries") for p in m.get("v")]: for link in entry: sources.add_source(link) + elif m.get("p") == "/message/metadata/content_references": + for entry in m.get("v"): + for link in entry.get("sources", []): + sources.add_source(link) elif m.get("p") and re.match(r"^/message/metadata/content_references/\d+$", m.get("p")): sources.add_source(m.get("v")) elif m.get("p") == "/message/metadata/finished_text": fields.is_thinking = False yield Reasoning(status=m.get("v")) - elif m.get("p") == "/message/metadata": + elif m.get("p") == "/message/metadata" and fields.recipient == "all": fields.finish_reason = m.get("v", {}).get("finish_details", {}).get("type") break elif isinstance(v, dict): @@ -525,8 +529,8 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin): fields.conversation_id = v.get("conversation_id") debug.log(f"OpenaiChat: New conversation: {fields.conversation_id}") m = v.get("message", {}) - fields.is_recipient = m.get("recipient", "all") == "all" - if fields.is_recipient: + fields.recipient = m.get("recipient", fields.recipient) + if fields.recipient == "all": c = m.get("content", {}) if c.get("content_type") == "text" and m.get("author", {}).get("role") == "tool" and "initial_text" in m.get("metadata", {}): fields.is_thinking = True @@ -598,7 +602,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin): raise NoValidHarFileError(f"Access token is not valid: {cls.request_config.access_token}") except NoValidHarFileError: if has_nodriver: - if cls._api_key is None: + if cls.request_config.access_token is None: yield RequestLogin(cls.label, os.environ.get("G4F_LOGIN_URL", "")) await cls.nodriver_auth(proxy) else: @@ -717,7 +721,7 @@ class Conversation(JsonConversation): self.conversation_id = conversation_id self.message_id = message_id self.finish_reason = finish_reason - self.is_recipient = False + self.recipient = "all" self.parent_message_id = message_id if parent_message_id is None else parent_message_id self.user_id = user_id self.is_thinking = is_thinking diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js index bca9aeff..bae64d7c 100644 --- a/g4f/gui/client/static/js/chat.v1.js +++ b/g4f/gui/client/static/js/chat.v1.js @@ -469,7 +469,7 @@ const register_message_buttons = async () => { el.dataset.click = true; const message_el = get_message_el(el); el.addEventListener("click", async () => { - iframe.src = `/qrcode/${window.conversation_id}#${message_el.dataset.index}`; + iframe.src = window.conversation_id ? `/qrcode/${window.conversation_id}#${message_el.dataset.index}` : '/qrcode'; iframe_container.classList.remove("hidden"); }); }); diff --git a/g4f/requests/raise_for_status.py b/g4f/requests/raise_for_status.py index 5bad765b..c2160095 100644 --- a/g4f/requests/raise_for_status.py +++ b/g4f/requests/raise_for_status.py @@ -44,7 +44,7 @@ async def raise_for_status_async(response: Union[StreamResponse, ClientResponse] if response.status == 403 and is_cloudflare(message): raise CloudflareError(f"Response {response.status}: Cloudflare detected") elif response.status == 403 and is_openai(message): - raise ResponseStatusError(f"Response {response.status}: OpenAI Bot detected") + raise MissingAuthError(f"Response {response.status}: OpenAI Bot detected") elif response.status == 502: raise ResponseStatusError(f"Response {response.status}: Bad Gateway") elif response.status == 504: @@ -71,7 +71,7 @@ def raise_for_status(response: Union[Response, StreamResponse, ClientResponse, R if response.status_code == 403 and is_cloudflare(response.text): raise CloudflareError(f"Response {response.status_code}: Cloudflare detected") elif response.status_code == 403 and is_openai(response.text): - raise ResponseStatusError(f"Response {response.status_code}: OpenAI Bot detected") + raise MissingAuthError(f"Response {response.status_code}: OpenAI Bot detected") elif response.status_code == 502: raise ResponseStatusError(f"Response {response.status_code}: Bad Gateway") elif response.status_code == 504: