mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-11 04:40:53 -08:00
fix: update provider integrations, recipient logic, and auth handling
- In **FreeRouter.py**, change the `working` flag from `False` to `True`.
- In **LMArenaProvider.py**, replace the `.rstrip("▌")` call with a manual check that, if the content ends with `▌`, slices off the trailing characters.
- In **hf_space/__init__.py**, update the async generator call to pass the `media` parameter instead of `images`.
- In **OpenaiChat.py**:
- Modify the citation replacement regex to use `[0-9]+` (supporting any turn number) instead of a hardcoded `0`.
- Replace `fields.is_recipient` boolean checks with comparisons against `fields.recipient == "all"` for processing text and metadata.
- Add a new branch to process `/message/metadata/content_references` for adding source links.
- Update the conversation initialization by replacing `self.is_recipient` with setting `self.recipient` to `"all"`.
- Change the auth check from using `cls._api_key` to checking `cls.request_config.access_token`.
- In **chat.v1.js**, adjust the QR code URL assignment to use `window.conversation_id` if available, else default to `/qrcode`.
- In **raise_for_status.py**, update error handling by replacing `ResponseStatusError` with `MissingAuthError` for 403 responses detected as OpenAI Bot.
This commit is contained in:
parent
06546649db
commit
90ef870345
6 changed files with 20 additions and 14 deletions
|
|
@ -6,4 +6,4 @@ class FreeRouter(OpenaiTemplate):
|
||||||
label = "CablyAI FreeRouter"
|
label = "CablyAI FreeRouter"
|
||||||
url = "https://freerouter.cablyai.com"
|
url = "https://freerouter.cablyai.com"
|
||||||
api_base = "https://freerouter.cablyai.com/v1"
|
api_base = "https://freerouter.cablyai.com/v1"
|
||||||
working = False
|
working = True
|
||||||
|
|
@ -229,7 +229,9 @@ class LMArenaProvider(AsyncGeneratorProvider, ProviderModelMixin, AuthFileMixin)
|
||||||
if len(data) > 2:
|
if len(data) > 2:
|
||||||
if isinstance(data[2], list):
|
if isinstance(data[2], list):
|
||||||
data[2] = data[2][-1]
|
data[2] = data[2][-1]
|
||||||
content = data[2][text_position:].rstrip("▌")
|
content = data[2][text_position:]
|
||||||
|
if content.endswith("▌"):
|
||||||
|
content = content[:-2]
|
||||||
if content:
|
if content:
|
||||||
count += 1
|
count += 1
|
||||||
yield count, content
|
yield count, content
|
||||||
|
|
|
||||||
|
|
@ -88,7 +88,7 @@ class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
for provider in cls.providers:
|
for provider in cls.providers:
|
||||||
if model in provider.get_models():
|
if model in provider.get_models():
|
||||||
try:
|
try:
|
||||||
async for chunk in provider.create_async_generator(model, messages, images=images, **kwargs):
|
async for chunk in provider.create_async_generator(model, messages, media=media, **kwargs):
|
||||||
is_started = True
|
is_started = True
|
||||||
yield chunk
|
yield chunk
|
||||||
if is_started:
|
if is_started:
|
||||||
|
|
|
||||||
|
|
@ -447,7 +447,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
||||||
link = sources.list[int(match.group(1))]["url"]
|
link = sources.list[int(match.group(1))]["url"]
|
||||||
return f"[[{int(match.group(1))+1}]]({link})"
|
return f"[[{int(match.group(1))+1}]]({link})"
|
||||||
return f" [{int(match.group(1))+1}]"
|
return f" [{int(match.group(1))+1}]"
|
||||||
buffer = re.sub(r'(?:cite\nturn0search|cite\nturn0news|turn0news)(\d+)', replacer, buffer)
|
buffer = re.sub(r'(?:cite\nturn[0-9]+search|cite\nturn[0-9]+news|turn[0-9]+news|turn[0-9]+search)(\d+)', replacer, buffer)
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
yield buffer
|
yield buffer
|
||||||
|
|
@ -501,23 +501,27 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
||||||
return
|
return
|
||||||
if "v" in line:
|
if "v" in line:
|
||||||
v = line.get("v")
|
v = line.get("v")
|
||||||
if isinstance(v, str) and fields.is_recipient:
|
if isinstance(v, str) and fields.recipient == "all":
|
||||||
if "p" not in line or line.get("p") == "/message/content/parts/0":
|
if "p" not in line or line.get("p") == "/message/content/parts/0":
|
||||||
yield Reasoning(token=v) if fields.is_thinking else v
|
yield Reasoning(token=v) if fields.is_thinking else v
|
||||||
elif isinstance(v, list):
|
elif isinstance(v, list):
|
||||||
for m in v:
|
for m in v:
|
||||||
if m.get("p") == "/message/content/parts/0" and fields.is_recipient:
|
if m.get("p") == "/message/content/parts/0" and fields.recipient == "all":
|
||||||
yield m.get("v")
|
yield m.get("v")
|
||||||
elif m.get("p") == "/message/metadata/search_result_groups":
|
elif m.get("p") == "/message/metadata/search_result_groups":
|
||||||
for entry in [p.get("entries") for p in m.get("v")]:
|
for entry in [p.get("entries") for p in m.get("v")]:
|
||||||
for link in entry:
|
for link in entry:
|
||||||
sources.add_source(link)
|
sources.add_source(link)
|
||||||
|
elif m.get("p") == "/message/metadata/content_references":
|
||||||
|
for entry in m.get("v"):
|
||||||
|
for link in entry.get("sources", []):
|
||||||
|
sources.add_source(link)
|
||||||
elif m.get("p") and re.match(r"^/message/metadata/content_references/\d+$", m.get("p")):
|
elif m.get("p") and re.match(r"^/message/metadata/content_references/\d+$", m.get("p")):
|
||||||
sources.add_source(m.get("v"))
|
sources.add_source(m.get("v"))
|
||||||
elif m.get("p") == "/message/metadata/finished_text":
|
elif m.get("p") == "/message/metadata/finished_text":
|
||||||
fields.is_thinking = False
|
fields.is_thinking = False
|
||||||
yield Reasoning(status=m.get("v"))
|
yield Reasoning(status=m.get("v"))
|
||||||
elif m.get("p") == "/message/metadata":
|
elif m.get("p") == "/message/metadata" and fields.recipient == "all":
|
||||||
fields.finish_reason = m.get("v", {}).get("finish_details", {}).get("type")
|
fields.finish_reason = m.get("v", {}).get("finish_details", {}).get("type")
|
||||||
break
|
break
|
||||||
elif isinstance(v, dict):
|
elif isinstance(v, dict):
|
||||||
|
|
@ -525,8 +529,8 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
||||||
fields.conversation_id = v.get("conversation_id")
|
fields.conversation_id = v.get("conversation_id")
|
||||||
debug.log(f"OpenaiChat: New conversation: {fields.conversation_id}")
|
debug.log(f"OpenaiChat: New conversation: {fields.conversation_id}")
|
||||||
m = v.get("message", {})
|
m = v.get("message", {})
|
||||||
fields.is_recipient = m.get("recipient", "all") == "all"
|
fields.recipient = m.get("recipient", fields.recipient)
|
||||||
if fields.is_recipient:
|
if fields.recipient == "all":
|
||||||
c = m.get("content", {})
|
c = m.get("content", {})
|
||||||
if c.get("content_type") == "text" and m.get("author", {}).get("role") == "tool" and "initial_text" in m.get("metadata", {}):
|
if c.get("content_type") == "text" and m.get("author", {}).get("role") == "tool" and "initial_text" in m.get("metadata", {}):
|
||||||
fields.is_thinking = True
|
fields.is_thinking = True
|
||||||
|
|
@ -598,7 +602,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
||||||
raise NoValidHarFileError(f"Access token is not valid: {cls.request_config.access_token}")
|
raise NoValidHarFileError(f"Access token is not valid: {cls.request_config.access_token}")
|
||||||
except NoValidHarFileError:
|
except NoValidHarFileError:
|
||||||
if has_nodriver:
|
if has_nodriver:
|
||||||
if cls._api_key is None:
|
if cls.request_config.access_token is None:
|
||||||
yield RequestLogin(cls.label, os.environ.get("G4F_LOGIN_URL", ""))
|
yield RequestLogin(cls.label, os.environ.get("G4F_LOGIN_URL", ""))
|
||||||
await cls.nodriver_auth(proxy)
|
await cls.nodriver_auth(proxy)
|
||||||
else:
|
else:
|
||||||
|
|
@ -717,7 +721,7 @@ class Conversation(JsonConversation):
|
||||||
self.conversation_id = conversation_id
|
self.conversation_id = conversation_id
|
||||||
self.message_id = message_id
|
self.message_id = message_id
|
||||||
self.finish_reason = finish_reason
|
self.finish_reason = finish_reason
|
||||||
self.is_recipient = False
|
self.recipient = "all"
|
||||||
self.parent_message_id = message_id if parent_message_id is None else parent_message_id
|
self.parent_message_id = message_id if parent_message_id is None else parent_message_id
|
||||||
self.user_id = user_id
|
self.user_id = user_id
|
||||||
self.is_thinking = is_thinking
|
self.is_thinking = is_thinking
|
||||||
|
|
|
||||||
|
|
@ -469,7 +469,7 @@ const register_message_buttons = async () => {
|
||||||
el.dataset.click = true;
|
el.dataset.click = true;
|
||||||
const message_el = get_message_el(el);
|
const message_el = get_message_el(el);
|
||||||
el.addEventListener("click", async () => {
|
el.addEventListener("click", async () => {
|
||||||
iframe.src = `/qrcode/${window.conversation_id}#${message_el.dataset.index}`;
|
iframe.src = window.conversation_id ? `/qrcode/${window.conversation_id}#${message_el.dataset.index}` : '/qrcode';
|
||||||
iframe_container.classList.remove("hidden");
|
iframe_container.classList.remove("hidden");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ async def raise_for_status_async(response: Union[StreamResponse, ClientResponse]
|
||||||
if response.status == 403 and is_cloudflare(message):
|
if response.status == 403 and is_cloudflare(message):
|
||||||
raise CloudflareError(f"Response {response.status}: Cloudflare detected")
|
raise CloudflareError(f"Response {response.status}: Cloudflare detected")
|
||||||
elif response.status == 403 and is_openai(message):
|
elif response.status == 403 and is_openai(message):
|
||||||
raise ResponseStatusError(f"Response {response.status}: OpenAI Bot detected")
|
raise MissingAuthError(f"Response {response.status}: OpenAI Bot detected")
|
||||||
elif response.status == 502:
|
elif response.status == 502:
|
||||||
raise ResponseStatusError(f"Response {response.status}: Bad Gateway")
|
raise ResponseStatusError(f"Response {response.status}: Bad Gateway")
|
||||||
elif response.status == 504:
|
elif response.status == 504:
|
||||||
|
|
@ -71,7 +71,7 @@ def raise_for_status(response: Union[Response, StreamResponse, ClientResponse, R
|
||||||
if response.status_code == 403 and is_cloudflare(response.text):
|
if response.status_code == 403 and is_cloudflare(response.text):
|
||||||
raise CloudflareError(f"Response {response.status_code}: Cloudflare detected")
|
raise CloudflareError(f"Response {response.status_code}: Cloudflare detected")
|
||||||
elif response.status_code == 403 and is_openai(response.text):
|
elif response.status_code == 403 and is_openai(response.text):
|
||||||
raise ResponseStatusError(f"Response {response.status_code}: OpenAI Bot detected")
|
raise MissingAuthError(f"Response {response.status_code}: OpenAI Bot detected")
|
||||||
elif response.status_code == 502:
|
elif response.status_code == 502:
|
||||||
raise ResponseStatusError(f"Response {response.status_code}: Bad Gateway")
|
raise ResponseStatusError(f"Response {response.status_code}: Bad Gateway")
|
||||||
elif response.status_code == 504:
|
elif response.status_code == 504:
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue