mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Hide request login in api
Add Grok provider Update liaobots provider
This commit is contained in:
parent
70712708ea
commit
23814d3c8c
8 changed files with 216 additions and 40 deletions
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
import json
|
||||
from aiohttp import ClientSession, BaseConnector
|
||||
|
||||
from ..typing import AsyncResult, Messages
|
||||
|
|
@ -290,6 +291,9 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
if b"<html coupert-item=" in chunk:
|
||||
raise RuntimeError("Invalid session")
|
||||
if chunk:
|
||||
if chunk.startswith(b"data: "):
|
||||
yield json.loads(chunk[6:]).get("content")
|
||||
else:
|
||||
yield chunk.decode(errors="ignore")
|
||||
except:
|
||||
async with session.post(
|
||||
|
|
@ -313,6 +317,9 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
if b"<html coupert-item=" in chunk:
|
||||
raise RuntimeError("Invalid session")
|
||||
if chunk:
|
||||
if chunk.startswith(b"data: "):
|
||||
yield json.loads(chunk[6:]).get("content")
|
||||
else:
|
||||
yield chunk.decode(errors="ignore")
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
153
g4f/Provider/needs_auth/Grok.py
Normal file
153
g4f/Provider/needs_auth/Grok.py
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
import os
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import base64
|
||||
import asyncio
|
||||
import time
|
||||
from urllib.parse import quote_plus, unquote_plus
|
||||
from pathlib import Path
|
||||
from aiohttp import ClientSession, BaseConnector
|
||||
from typing import Dict, Any, Optional, AsyncIterator, List
|
||||
|
||||
from ... import debug
|
||||
from ...typing import Messages, Cookies, ImagesType, AsyncResult
|
||||
from ...providers.response import JsonConversation, Reasoning, ImagePreview, ImageResponse, TitleGeneration
|
||||
from ...requests.raise_for_status import raise_for_status
|
||||
from ...requests.aiohttp import get_connector
|
||||
from ...requests import get_nodriver
|
||||
from ...errors import MissingAuthError
|
||||
from ...cookies import get_cookies_dir
|
||||
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||
from ..helper import format_prompt, get_cookies, get_last_user_message
|
||||
|
||||
class Conversation(JsonConversation):
|
||||
def __init__(self,
|
||||
conversation_id: str,
|
||||
response_id: str,
|
||||
choice_id: str,
|
||||
model: str
|
||||
) -> None:
|
||||
self.conversation_id = conversation_id
|
||||
self.response_id = response_id
|
||||
self.choice_id = choice_id
|
||||
self.model = model
|
||||
|
||||
class Grok(AsyncGeneratorProvider, ProviderModelMixin):
|
||||
label = "Grok AI"
|
||||
url = "https://grok.com"
|
||||
assets_url = "https://assets.grok.com"
|
||||
conversation_url = "https://grok.com/rest/app-chat/conversations"
|
||||
|
||||
needs_auth = True
|
||||
working = False
|
||||
|
||||
default_model = "grok-3"
|
||||
models = [default_model, "grok-3-thinking", "grok-2"]
|
||||
|
||||
_cookies: Cookies = None
|
||||
|
||||
@classmethod
|
||||
async def _prepare_payload(cls, model: str, message: str) -> Dict[str, Any]:
|
||||
return {
|
||||
"temporary": False,
|
||||
"modelName": "grok-latest" if model == "grok-2" else "grok-3",
|
||||
"message": message,
|
||||
"fileAttachments": [],
|
||||
"imageAttachments": [],
|
||||
"disableSearch": False,
|
||||
"enableImageGeneration": True,
|
||||
"returnImageBytes": False,
|
||||
"returnRawGrokInXaiRequest": False,
|
||||
"enableImageStreaming": True,
|
||||
"imageGenerationCount": 2,
|
||||
"forceConcise": False,
|
||||
"toolOverrides": {},
|
||||
"enableSideBySide": True,
|
||||
"isPreset": False,
|
||||
"sendFinalMetadata": True,
|
||||
"customInstructions": "",
|
||||
"deepsearchPreset": "",
|
||||
"isReasoning": model.endswith("-thinking"),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
async def create_async_generator(
|
||||
cls,
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
cookies: Cookies = None,
|
||||
connector: BaseConnector = None,
|
||||
images: ImagesType = None,
|
||||
return_conversation: bool = False,
|
||||
conversation: Optional[Conversation] = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
cls._cookies = cookies or cls._cookies or get_cookies(".grok.com", False, True)
|
||||
if not cls._cookies:
|
||||
raise MissingAuthError("Missing required cookies")
|
||||
|
||||
prompt = format_prompt(messages) if conversation is None else get_last_user_message(messages)
|
||||
base_connector = get_connector(connector, proxy)
|
||||
|
||||
headers = {
|
||||
"accept": "*/*",
|
||||
"accept-language": "en-GB,en;q=0.9",
|
||||
"content-type": "application/json",
|
||||
"origin": "https://grok.com",
|
||||
"priority": "u=1, i",
|
||||
"referer": "https://grok.com/",
|
||||
"sec-ch-ua": '"Not/A)Brand";v="8", "Chromium";v="126", "Brave";v="126"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"macOS"',
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"sec-gpc": "1",
|
||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
|
||||
}
|
||||
|
||||
async with ClientSession(
|
||||
headers=headers,
|
||||
cookies=cls._cookies,
|
||||
connector=base_connector
|
||||
) as session:
|
||||
payload = await cls._prepare_payload(model, prompt)
|
||||
response = await session.post(f"{cls.conversation_url}/new", json=payload)
|
||||
await raise_for_status(response)
|
||||
|
||||
thinking_duration = None
|
||||
async for line in response.content:
|
||||
if line:
|
||||
try:
|
||||
json_data = json.loads(line)
|
||||
result = json_data.get("result", {})
|
||||
response_data = result.get("response", {})
|
||||
image = response_data.get("streamingImageGenerationResponse", None)
|
||||
if image is not None:
|
||||
yield ImagePreview(f'{cls.assets_url}/{image["imageUrl"]}', "", {"cookies": cookies, "headers": headers})
|
||||
token = response_data.get("token", "")
|
||||
is_thinking = response_data.get("isThinking", False)
|
||||
if token:
|
||||
if is_thinking:
|
||||
if thinking_duration is None:
|
||||
thinking_duration = time.time()
|
||||
yield Reasoning(status="🤔 Is thinking...")
|
||||
yield Reasoning(token)
|
||||
else:
|
||||
if thinking_duration is not None:
|
||||
thinking_duration = time.time() - thinking_duration
|
||||
status = f"Thought for {thinking_duration:.2f}s" if thinking_duration > 1 else "Finished"
|
||||
thinking_duration = None
|
||||
yield Reasoning(status=status)
|
||||
yield token
|
||||
generated_images = response_data.get("modelResponse", {}).get("generatedImageUrls", None)
|
||||
if generated_images:
|
||||
yield ImageResponse([f'{cls.assets_url}/{image}' for image in generated_images], "", {"cookies": cookies, "headers": headers})
|
||||
title = response_data.get("title", {}).get("newTitle", "")
|
||||
if title:
|
||||
yield TitleGeneration(title)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
|
@ -104,6 +104,9 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
synthesize_content_type = "audio/aac"
|
||||
request_config = RequestConfig()
|
||||
|
||||
_api_key: str = None
|
||||
_headers: dict = None
|
||||
_cookies: Cookies = None
|
||||
_expires: int = None
|
||||
|
||||
@classmethod
|
||||
|
|
@ -111,9 +114,9 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
async for chunk in cls.login(proxy=proxy):
|
||||
yield chunk
|
||||
yield AuthResult(
|
||||
api_key=cls.request_config.access_token,
|
||||
cookies=cls.request_config.cookies or {},
|
||||
headers=cls.request_config.headers or cls.get_default_headers(),
|
||||
api_key=cls._api_key,
|
||||
cookies=cls._cookies or cls.request_config.cookies or {},
|
||||
headers=cls._headers or cls.request_config.headers or cls.get_default_headers(),
|
||||
expires=cls._expires,
|
||||
proof_token=cls.request_config.proof_token,
|
||||
turnstile_token=cls.request_config.turnstile_token
|
||||
|
|
@ -303,17 +306,17 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
) as session:
|
||||
image_requests = None
|
||||
if not cls.needs_auth:
|
||||
if not cls.request_config.headers:
|
||||
cls._create_request_args(cls.request_config.cookies)
|
||||
if cls._headers is None:
|
||||
cls._create_request_args(cls._cookies)
|
||||
async with session.get(cls.url, headers=INIT_HEADERS) as response:
|
||||
cls._update_request_args(auth_result, session)
|
||||
await raise_for_status(response)
|
||||
else:
|
||||
if not cls.request_config.headers and getattr(auth_result, "cookies", None):
|
||||
if cls._headers is None and getattr(auth_result, "cookies", None):
|
||||
cls._create_request_args(auth_result.cookies, auth_result.headers)
|
||||
if not cls._set_api_key(getattr(auth_result, "api_key", None)):
|
||||
raise MissingAuthError("Access token is not valid")
|
||||
async with session.get(cls.url, headers=cls.request_config.headers) as response:
|
||||
async with session.get(cls.url, headers=cls._headers) as response:
|
||||
cls._update_request_args(auth_result, session)
|
||||
await raise_for_status(response)
|
||||
try:
|
||||
|
|
@ -328,20 +331,20 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
conversation = copy(conversation)
|
||||
if getattr(auth_result, "cookies", {}).get("oai-did") != getattr(conversation, "user_id", None):
|
||||
conversation = Conversation(None, str(uuid.uuid4()))
|
||||
if cls.request_config.access_token is None:
|
||||
if cls._api_key is None:
|
||||
auto_continue = False
|
||||
conversation.finish_reason = None
|
||||
sources = Sources([])
|
||||
while conversation.finish_reason is None:
|
||||
async with session.post(
|
||||
f"{cls.url}/backend-anon/sentinel/chat-requirements"
|
||||
if cls.request_config.access_token is None else
|
||||
if cls._api_key is None else
|
||||
f"{cls.url}/backend-api/sentinel/chat-requirements",
|
||||
json={"p": None if not getattr(auth_result, "proof_token", None) else get_requirements_token(getattr(auth_result, "proof_token", None))},
|
||||
headers=cls.request_config.headers
|
||||
headers=cls._headers
|
||||
) as response:
|
||||
if response.status in (401, 403):
|
||||
auth_result.reset()
|
||||
raise MissingAuthError(f"Response status: {response.status}")
|
||||
else:
|
||||
cls._update_request_args(auth_result, session)
|
||||
await raise_for_status(response)
|
||||
|
|
@ -404,7 +407,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
messages = messages if conversation_id is None else [messages[-1]]
|
||||
data["messages"] = cls.create_messages(messages, image_requests, ["search"] if web_search else None)
|
||||
headers = {
|
||||
**cls.request_config.headers,
|
||||
**cls._headers,
|
||||
"accept": "text/event-stream",
|
||||
"content-type": "application/json",
|
||||
"openai-sentinel-chat-requirements-token": chat_token,
|
||||
|
|
@ -417,7 +420,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
headers['openai-sentinel-turnstile-token'] = auth_result.turnstile_token
|
||||
async with session.post(
|
||||
f"{cls.url}/backend-anon/conversation"
|
||||
if cls.request_config.access_token is None else
|
||||
if cls._api_key is None else
|
||||
f"{cls.url}/backend-api/conversation",
|
||||
json=data,
|
||||
headers=headers
|
||||
|
|
@ -547,7 +550,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
async with session.get(
|
||||
f"{cls.url}/backend-api/synthesize",
|
||||
params=params,
|
||||
headers=cls.request_config.headers
|
||||
headers=cls._headers
|
||||
) as response:
|
||||
await raise_for_status(response)
|
||||
async for chunk in response.iter_content():
|
||||
|
|
@ -557,25 +560,33 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
async def login(
|
||||
cls,
|
||||
proxy: str = None,
|
||||
api_key: str = None,
|
||||
proof_token: str = None,
|
||||
cookies: Cookies = None,
|
||||
headers: dict = None,
|
||||
**kwargs
|
||||
) -> AsyncIterator:
|
||||
if cls._expires is not None and (cls._expires - 60*10) < time.time():
|
||||
cls.request_config.headers = cls.request_config.access_token = None
|
||||
if cls.request_config.headers is None:
|
||||
cls.request_config.headers = {}
|
||||
if cls.request_config.access_token is not None:
|
||||
cls._headers = cls._api_key = None
|
||||
if cls._headers is None or headers is not None:
|
||||
cls._headers = {} if headers is None else headers
|
||||
if proof_token is not None:
|
||||
cls.request_config.proof_token = proof_token
|
||||
if cookies is not None:
|
||||
cls.request_config.cookies = cookies
|
||||
if api_key is not None:
|
||||
cls._create_request_args(cls.request_config.cookies, cls.request_config.headers)
|
||||
cls._set_api_key(cls.request_config.access_token)
|
||||
cls._set_api_key(api_key)
|
||||
else:
|
||||
try:
|
||||
cls.request_config = await get_request_config(cls.request_config, proxy)
|
||||
await get_request_config(cls.request_config, proxy)
|
||||
cls._create_request_args(cls.request_config.cookies, cls.request_config.headers)
|
||||
if cls.request_config.access_token is not None or cls.needs_auth:
|
||||
if not cls._set_api_key(cls.request_config.access_token):
|
||||
raise NoValidHarFileError(f"Access token is not valid: {cls.request_config.access_token}")
|
||||
except NoValidHarFileError:
|
||||
if has_nodriver:
|
||||
if cls.request_config.access_token is None:
|
||||
if cls._api_key is None:
|
||||
yield RequestLogin(cls.label, os.environ.get("G4F_LOGIN_URL", ""))
|
||||
await cls.nodriver_auth(proxy)
|
||||
else:
|
||||
|
|
@ -598,7 +609,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
if "OpenAI-Sentinel-Turnstile-Token" in event.request.headers:
|
||||
cls.request_config.turnstile_token = event.request.headers["OpenAI-Sentinel-Turnstile-Token"]
|
||||
if "Authorization" in event.request.headers:
|
||||
cls.request_config.access_token = event.request.headers["Authorization"].split()[-1]
|
||||
cls._api_key = event.request.headers["Authorization"].split()[-1]
|
||||
elif event.request.url == arkose_url:
|
||||
cls.request_config.arkose_request = arkReq(
|
||||
arkURL=event.request.url,
|
||||
|
|
@ -615,13 +626,13 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
await page.evaluate("document.getElementById('prompt-textarea').innerText = 'Hello'")
|
||||
await page.evaluate("document.querySelector('[data-testid=\"send-button\"]').click()")
|
||||
while True:
|
||||
if cls.request_config.access_token is not None or not cls.needs_auth:
|
||||
if cls._api_key is not None or not cls.needs_auth:
|
||||
break
|
||||
body = await page.evaluate("JSON.stringify(window.__remixContext)")
|
||||
if body:
|
||||
match = re.search(r'"accessToken":"(.*?)"', body)
|
||||
if match:
|
||||
cls.request_config.access_token = match.group(1)
|
||||
cls._api_key = match.group(1)
|
||||
break
|
||||
await asyncio.sleep(1)
|
||||
while True:
|
||||
|
|
@ -632,7 +643,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
cls.request_config.cookies = await page.send(get_cookies([cls.url]))
|
||||
await page.close()
|
||||
cls._create_request_args(cls.request_config.cookies, cls.request_config.headers, user_agent=user_agent)
|
||||
cls._set_api_key(cls.request_config.access_token)
|
||||
cls._set_api_key(cls._api_key)
|
||||
finally:
|
||||
stop_browser()
|
||||
|
||||
|
|
@ -645,10 +656,10 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
|
||||
@classmethod
|
||||
def _create_request_args(cls, cookies: Cookies = None, headers: dict = None, user_agent: str = None):
|
||||
cls.request_config.headers = cls.get_default_headers() if headers is None else headers
|
||||
cls._headers = cls.get_default_headers() if headers is None else headers
|
||||
if user_agent is not None:
|
||||
cls.request_config.headers["user-agent"] = user_agent
|
||||
cls.request_config.cookies = {} if cookies is None else cookies
|
||||
cls._headers["user-agent"] = user_agent
|
||||
cls._cookies = {} if cookies is None else cookies
|
||||
cls._update_cookie_header()
|
||||
|
||||
@classmethod
|
||||
|
|
@ -656,7 +667,7 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
if hasattr(auth_result, "cookies"):
|
||||
for c in session.cookie_jar if hasattr(session, "cookie_jar") else session.cookies.jar:
|
||||
auth_result.cookies[getattr(c, "key", getattr(c, "name", ""))] = c.value
|
||||
cls.request_config.cookies = auth_result.cookies
|
||||
cls._cookies = auth_result.cookies
|
||||
cls._update_cookie_header()
|
||||
|
||||
@classmethod
|
||||
|
|
@ -669,15 +680,15 @@ class OpenaiChat(AsyncAuthedProvider, ProviderModelMixin):
|
|||
if time.time() > cls._expires:
|
||||
debug.log(f"OpenaiChat: API key is expired")
|
||||
else:
|
||||
cls.request_config.access_token = api_key
|
||||
cls.request_config.headers["authorization"] = f"Bearer {api_key}"
|
||||
cls._api_key = api_key
|
||||
cls._headers["authorization"] = f"Bearer {api_key}"
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _update_cookie_header(cls):
|
||||
if cls.request_config.cookies:
|
||||
cls.request_config.headers["cookie"] = format_cookies(cls.request_config.cookies)
|
||||
if cls._cookies:
|
||||
cls._headers["cookie"] = format_cookies(cls._cookies)
|
||||
|
||||
class Conversation(JsonConversation):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from .GeminiPro import GeminiPro
|
|||
from .GigaChat import GigaChat
|
||||
from .GithubCopilot import GithubCopilot
|
||||
from .GlhfChat import GlhfChat
|
||||
from .Grok import Grok
|
||||
from .Groq import Groq
|
||||
from .MetaAI import MetaAI
|
||||
from .MetaAIAccount import MetaAIAccount
|
||||
|
|
|
|||
|
|
@ -105,6 +105,8 @@
|
|||
height: 100%;
|
||||
position: absolute;
|
||||
z-index: -1;
|
||||
object-fit: contain;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.description, form p a {
|
||||
|
|
|
|||
|
|
@ -465,7 +465,7 @@ const handle_ask = async (do_ask_gpt = true) => {
|
|||
await ask_gpt(message_id);
|
||||
}
|
||||
} else {
|
||||
await lazy_scroll_to_bottom();
|
||||
await safe_load_conversation(window.conversation_id, true);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -2490,7 +2490,7 @@ async function api(ressource, args=null, files=null, message_id=null, scroll=tru
|
|||
} else if (ressource == "conversation") {
|
||||
let body = JSON.stringify(args);
|
||||
headers.accept = 'text/event-stream';
|
||||
if (files !== null) {
|
||||
if (files.length > 0) {
|
||||
const formData = new FormData();
|
||||
for (const file of files) {
|
||||
formData.append('files', file)
|
||||
|
|
|
|||
|
|
@ -189,6 +189,8 @@ class Api:
|
|||
logger.exception(chunk)
|
||||
debug.error(chunk)
|
||||
yield self._format_json('message', get_error_message(chunk), error=type(chunk).__name__)
|
||||
elif isinstance(chunk, RequestLogin):
|
||||
yield self._format_json("preview", chunk.to_string())
|
||||
elif isinstance(chunk, PreviewResponse):
|
||||
yield self._format_json("preview", chunk.to_string())
|
||||
elif isinstance(chunk, ImagePreview):
|
||||
|
|
@ -197,7 +199,7 @@ class Api:
|
|||
images = chunk
|
||||
if download_images or chunk.get("cookies"):
|
||||
chunk.alt = format_image_prompt(kwargs.get("messages"), chunk.alt)
|
||||
images = asyncio.run(copy_images(chunk.get_list(), chunk.get("cookies"), proxy=proxy, alt=chunk.alt))
|
||||
images = asyncio.run(copy_images(chunk.get_list(), chunk.get("cookies"), chunk.get("headers"), proxy=proxy, alt=chunk.alt))
|
||||
images = ImageResponse(images, chunk.alt)
|
||||
yield self._format_json("content", str(images), images=chunk.get_list(), alt=chunk.alt)
|
||||
elif isinstance(chunk, SynthesizeData):
|
||||
|
|
|
|||
|
|
@ -187,12 +187,12 @@ class SynthesizeData(HiddenResponse, JsonMixin):
|
|||
self.provider = provider
|
||||
self.data = data
|
||||
|
||||
class RequestLogin(ResponseType):
|
||||
class RequestLogin(HiddenResponse):
|
||||
def __init__(self, label: str, login_url: str) -> None:
|
||||
self.label = label
|
||||
self.login_url = login_url
|
||||
|
||||
def __str__(self) -> str:
|
||||
def to_string(self) -> str:
|
||||
return format_link(self.login_url, f"[Login to {self.label}]") + "\n\n"
|
||||
|
||||
class ImageResponse(ResponseType):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue