fix: update provider integrations, recipient logic, and auth handling

- In **FreeRouter.py**, change the `working` flag from `False` to `True`.
- In **LMArenaProvider.py**, replace the `.rstrip("▌")` call with a manual check that, if the content ends with `▌`, slices off the trailing characters.
- In **hf_space/__init__.py**, update the async generator call to pass the `media` parameter instead of `images`.
- In **OpenaiChat.py**:
  - Modify the citation replacement regex to use `[0-9]+` (supporting any turn number) instead of a hardcoded `0`.
  - Replace `fields.is_recipient` boolean checks with comparisons against `fields.recipient == "all"` for processing text and metadata.
  - Add a new branch to process `/message/metadata/content_references` for adding source links.
  - Update the conversation initialization by replacing `self.is_recipient` with setting `self.recipient` to `"all"`.
  - Change the auth check from using `cls._api_key` to checking `cls.request_config.access_token`.
- In **chat.v1.js**, adjust the QR code URL assignment to use `window.conversation_id` if available, else default to `/qrcode`.
- In **raise_for_status.py**, update error handling by replacing `ResponseStatusError` with `MissingAuthError` for 403 responses detected as OpenAI Bot.
This commit is contained in:
hlohaus 2025-04-17 03:26:50 +02:00
parent 06546649db
commit 90ef870345
6 changed files with 20 additions and 14 deletions

View file

@ -6,4 +6,4 @@ class FreeRouter(OpenaiTemplate):
label = "CablyAI FreeRouter"
url = "https://freerouter.cablyai.com"
api_base = "https://freerouter.cablyai.com/v1"
working = False
working = True

View file

@ -229,7 +229,9 @@ class LMArenaProvider(AsyncGeneratorProvider, ProviderModelMixin, AuthFileMixin)
if len(data) > 2:
if isinstance(data[2], list):
data[2] = data[2][-1]
content = data[2][text_position:].rstrip("")
content = data[2][text_position:]
if content.endswith(""):
content = content[:-2]
if content:
count += 1
yield count, content

View file

@ -88,7 +88,7 @@ class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
for provider in cls.providers:
if model in provider.get_models():
try:
async for chunk in provider.create_async_generator(model, messages, images=images, **kwargs):
async for chunk in provider.create_async_generator(model, messages, media=media, **kwargs):
is_started = True
yield chunk
if is_started:

View file

@ -447,7 +447,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
link = sources.list[int(match.group(1))]["url"]
return f"[[{int(match.group(1))+1}]]({link})"
return f" [{int(match.group(1))+1}]"
buffer = re.sub(r'(?:cite\nturn0search|cite\nturn0news|turn0news)(\d+)', replacer, buffer)
buffer = re.sub(r'(?:cite\nturn[0-9]+search|cite\nturn[0-9]+news|turn[0-9]+news|turn[0-9]+search)(\d+)', replacer, buffer)
else:
continue
yield buffer
@ -501,23 +501,27 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
return
if "v" in line:
v = line.get("v")
if isinstance(v, str) and fields.is_recipient:
if isinstance(v, str) and fields.recipient == "all":
if "p" not in line or line.get("p") == "/message/content/parts/0":
yield Reasoning(token=v) if fields.is_thinking else v
elif isinstance(v, list):
for m in v:
if m.get("p") == "/message/content/parts/0" and fields.is_recipient:
if m.get("p") == "/message/content/parts/0" and fields.recipient == "all":
yield m.get("v")
elif m.get("p") == "/message/metadata/search_result_groups":
for entry in [p.get("entries") for p in m.get("v")]:
for link in entry:
sources.add_source(link)
elif m.get("p") == "/message/metadata/content_references":
for entry in m.get("v"):
for link in entry.get("sources", []):
sources.add_source(link)
elif m.get("p") and re.match(r"^/message/metadata/content_references/\d+$", m.get("p")):
sources.add_source(m.get("v"))
elif m.get("p") == "/message/metadata/finished_text":
fields.is_thinking = False
yield Reasoning(status=m.get("v"))
elif m.get("p") == "/message/metadata":
elif m.get("p") == "/message/metadata" and fields.recipient == "all":
fields.finish_reason = m.get("v", {}).get("finish_details", {}).get("type")
break
elif isinstance(v, dict):
@ -525,8 +529,8 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
fields.conversation_id = v.get("conversation_id")
debug.log(f"OpenaiChat: New conversation: {fields.conversation_id}")
m = v.get("message", {})
fields.is_recipient = m.get("recipient", "all") == "all"
if fields.is_recipient:
fields.recipient = m.get("recipient", fields.recipient)
if fields.recipient == "all":
c = m.get("content", {})
if c.get("content_type") == "text" and m.get("author", {}).get("role") == "tool" and "initial_text" in m.get("metadata", {}):
fields.is_thinking = True
@ -598,7 +602,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
raise NoValidHarFileError(f"Access token is not valid: {cls.request_config.access_token}")
except NoValidHarFileError:
if has_nodriver:
if cls._api_key is None:
if cls.request_config.access_token is None:
yield RequestLogin(cls.label, os.environ.get("G4F_LOGIN_URL", ""))
await cls.nodriver_auth(proxy)
else:
@ -717,7 +721,7 @@ class Conversation(JsonConversation):
self.conversation_id = conversation_id
self.message_id = message_id
self.finish_reason = finish_reason
self.is_recipient = False
self.recipient = "all"
self.parent_message_id = message_id if parent_message_id is None else parent_message_id
self.user_id = user_id
self.is_thinking = is_thinking

View file

@ -469,7 +469,7 @@ const register_message_buttons = async () => {
el.dataset.click = true;
const message_el = get_message_el(el);
el.addEventListener("click", async () => {
iframe.src = `/qrcode/${window.conversation_id}#${message_el.dataset.index}`;
iframe.src = window.conversation_id ? `/qrcode/${window.conversation_id}#${message_el.dataset.index}` : '/qrcode';
iframe_container.classList.remove("hidden");
});
});

View file

@ -44,7 +44,7 @@ async def raise_for_status_async(response: Union[StreamResponse, ClientResponse]
if response.status == 403 and is_cloudflare(message):
raise CloudflareError(f"Response {response.status}: Cloudflare detected")
elif response.status == 403 and is_openai(message):
raise ResponseStatusError(f"Response {response.status}: OpenAI Bot detected")
raise MissingAuthError(f"Response {response.status}: OpenAI Bot detected")
elif response.status == 502:
raise ResponseStatusError(f"Response {response.status}: Bad Gateway")
elif response.status == 504:
@ -71,7 +71,7 @@ def raise_for_status(response: Union[Response, StreamResponse, ClientResponse, R
if response.status_code == 403 and is_cloudflare(response.text):
raise CloudflareError(f"Response {response.status_code}: Cloudflare detected")
elif response.status_code == 403 and is_openai(response.text):
raise ResponseStatusError(f"Response {response.status_code}: OpenAI Bot detected")
raise MissingAuthError(f"Response {response.status_code}: OpenAI Bot detected")
elif response.status_code == 502:
raise ResponseStatusError(f"Response {response.status_code}: Bad Gateway")
elif response.status_code == 504: