No arkose token and .har files

This commit is contained in:
Heiner Lohaus 2024-03-26 06:42:47 +01:00
parent 92358bd896
commit 95bab66dad
5 changed files with 48 additions and 47 deletions

View file

@ -3,8 +3,9 @@ from __future__ import annotations
from aiohttp import ClientSession from aiohttp import ClientSession
from ...requests import raise_for_status from ...requests import raise_for_status
from ...errors import RateLimitError from ...errors import RateLimitError
from ...providers.conversation import BaseConversation
class Conversation: class Conversation(BaseConversation):
""" """
Represents a conversation with specific attributes. Represents a conversation with specific attributes.
""" """
@ -32,7 +33,7 @@ async def create_conversation(session: ClientSession, headers: dict, tone: str)
Returns: Returns:
Conversation: An instance representing the created conversation. Conversation: An instance representing the created conversation.
""" """
if tone == "copilot": if tone == "Copilot":
url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1634.3-nodesign2" url = "https://copilot.microsoft.com/turing/conversation/create?bundleVersion=1.1634.3-nodesign2"
else: else:
url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1626.1" url = "https://www.bing.com/turing/conversation/create?bundleVersion=1.1626.1"

View file

@ -3,10 +3,10 @@ from __future__ import annotations
import asyncio import asyncio
import uuid import uuid
import json import json
import os
import base64 import base64
import time import time
from aiohttp import ClientWebSocketResponse from aiohttp import ClientWebSocketResponse
from copy import copy
try: try:
import webview import webview
@ -22,13 +22,13 @@ except ImportError:
pass pass
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..helper import get_cookies
from ...webdriver import get_browser from ...webdriver import get_browser
from ...typing import AsyncResult, Messages, Cookies, ImageType, Union, AsyncIterator from ...typing import AsyncResult, Messages, Cookies, ImageType, Union, AsyncIterator
from ...requests import get_args_from_browser, raise_for_status from ...requests import get_args_from_browser, raise_for_status
from ...requests.aiohttp import StreamSession from ...requests.aiohttp import StreamSession
from ...image import to_image, to_bytes, ImageResponse, ImageRequest from ...image import to_image, to_bytes, ImageResponse, ImageRequest
from ...errors import MissingRequirementsError, MissingAuthError, ProviderNotWorkingError from ...errors import MissingAuthError
from ...providers.conversation import BaseConversation
from ..openai.har_file import getArkoseAndAccessToken from ..openai.har_file import getArkoseAndAccessToken
from ... import debug from ... import debug
@ -56,11 +56,6 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
prompt: str = None, prompt: str = None,
model: str = "", model: str = "",
messages: Messages = [], messages: Messages = [],
history_disabled: bool = False,
action: str = "next",
conversation_id: str = None,
parent_id: str = None,
image: ImageType = None,
**kwargs **kwargs
) -> Response: ) -> Response:
""" """
@ -89,12 +84,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
generator = cls.create_async_generator( generator = cls.create_async_generator(
model, model,
messages, messages,
history_disabled=history_disabled, return_conversation=True,
action=action,
conversation_id=conversation_id,
parent_id=parent_id,
image=image,
response_fields=True,
**kwargs **kwargs
) )
return Response( return Response(
@ -209,7 +199,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
} for message in messages] } for message in messages]
# Check if there is an image response # Check if there is an image response
if image_request: if image_request is not None:
# Change content in last user message # Change content in last user message
messages[-1]["content"] = { messages[-1]["content"] = {
"content_type": "multimodal_text", "content_type": "multimodal_text",
@ -308,10 +298,11 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
history_disabled: bool = True, history_disabled: bool = True,
action: str = "next", action: str = "next",
conversation_id: str = None, conversation_id: str = None,
conversation: Conversation = None,
parent_id: str = None, parent_id: str = None,
image: ImageType = None, image: ImageType = None,
image_name: str = None, image_name: str = None,
response_fields: bool = False, return_conversation: bool = False,
**kwargs **kwargs
) -> AsyncResult: ) -> AsyncResult:
""" """
@ -330,7 +321,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
conversation_id (str): ID of the conversation. conversation_id (str): ID of the conversation.
parent_id (str): ID of the parent message. parent_id (str): ID of the parent message.
image (ImageType): Image to include in the conversation. image (ImageType): Image to include in the conversation.
response_fields (bool): Flag to include response fields in the output. return_conversation (bool): Flag to include response fields in the output.
**kwargs: Additional keyword arguments. **kwargs: Additional keyword arguments.
Yields: Yields:
@ -387,6 +378,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
arkose_token, api_key, cookies = await getArkoseAndAccessToken(proxy) arkose_token, api_key, cookies = await getArkoseAndAccessToken(proxy)
cls._create_request_args(cookies) cls._create_request_args(cookies)
cls._set_api_key(api_key) cls._set_api_key(api_key)
if arkose_token is None:
raise MissingAuthError("No arkose token found in .har file")
try: try:
image_request = await cls.upload_image(session, cls._headers, image, image_name) if image else None image_request = await cls.upload_image(session, cls._headers, image, image_name) if image else None
@ -396,7 +389,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
print(f"{e.__class__.__name__}: {e}") print(f"{e.__class__.__name__}: {e}")
model = cls.get_model(model).replace("gpt-3.5-turbo", "text-davinci-002-render-sha") model = cls.get_model(model).replace("gpt-3.5-turbo", "text-davinci-002-render-sha")
fields = ResponseFields() fields = Conversation() if conversation is None else copy(conversation)
fields.finish_reason = None
while fields.finish_reason is None: while fields.finish_reason is None:
conversation_id = conversation_id if fields.conversation_id is None else fields.conversation_id conversation_id = conversation_id if fields.conversation_id is None else fields.conversation_id
parent_id = parent_id if fields.message_id is None else fields.message_id parent_id = parent_id if fields.message_id is None else fields.message_id
@ -422,8 +416,6 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
} }
if need_arkose: if need_arkose:
headers["OpenAI-Sentinel-Arkose-Token"] = arkose_token headers["OpenAI-Sentinel-Arkose-Token"] = arkose_token
headers["OpenAI-Sentinel-Chat-Requirements-Token"] = chat_token
async with session.post( async with session.post(
f"{cls.url}/backend-api/conversation", f"{cls.url}/backend-api/conversation",
json=data, json=data,
@ -432,15 +424,15 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
cls._update_request_args(session) cls._update_request_args(session)
await raise_for_status(response) await raise_for_status(response)
async for chunk in cls.iter_messages_chunk(response.iter_lines(), session, fields): async for chunk in cls.iter_messages_chunk(response.iter_lines(), session, fields):
if response_fields: if return_conversation:
response_fields = False return_conversation = False
yield fields yield fields
yield chunk yield chunk
if not auto_continue: if not auto_continue:
break break
action = "continue" action = "continue"
await asyncio.sleep(5) await asyncio.sleep(5)
if history_disabled and auto_continue: if history_disabled and auto_continue and not return_conversation:
await cls.delete_conversation(session, cls._headers, fields.conversation_id) await cls.delete_conversation(session, cls._headers, fields.conversation_id)
@staticmethod @staticmethod
@ -458,7 +450,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
cls, cls,
messages: AsyncIterator, messages: AsyncIterator,
session: StreamSession, session: StreamSession,
fields: ResponseFields fields: Conversation
) -> AsyncIterator: ) -> AsyncIterator:
last_message: int = 0 last_message: int = 0
async for message in messages: async for message in messages:
@ -487,7 +479,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
break break
@classmethod @classmethod
async def iter_messages_line(cls, session: StreamSession, line: bytes, fields: ResponseFields) -> AsyncIterator: async def iter_messages_line(cls, session: StreamSession, line: bytes, fields: Conversation) -> AsyncIterator:
if not line.startswith(b"data: "): if not line.startswith(b"data: "):
return return
elif line.startswith(b"data: [DONE]"): elif line.startswith(b"data: [DONE]"):
@ -618,7 +610,7 @@ this.fetch = async (url, options) => {
@classmethod @classmethod
def _update_request_args(cls, session: StreamSession): def _update_request_args(cls, session: StreamSession):
for c in session.cookie_jar if hasattr(session, "cookie_jar") else session.cookies.jar: for c in session.cookie_jar if hasattr(session, "cookie_jar") else session.cookies.jar:
cls._cookies[c.name if hasattr(c, "name") else c.key] = c.value cls._cookies[c.key if hasattr(c, "key") else c.name] = c.value
cls._update_cookie_header() cls._update_cookie_header()
@classmethod @classmethod
@ -631,7 +623,7 @@ this.fetch = async (url, options) => {
def _update_cookie_header(cls): def _update_cookie_header(cls):
cls._headers["Cookie"] = cls._format_cookies(cls._cookies) cls._headers["Cookie"] = cls._format_cookies(cls._cookies)
class ResponseFields: class Conversation(BaseConversation):
""" """
Class to encapsulate response fields. Class to encapsulate response fields.
""" """
@ -664,7 +656,7 @@ class Response():
self._generator = None self._generator = None
chunks = [] chunks = []
async for chunk in self._generator: async for chunk in self._generator:
if isinstance(chunk, ResponseFields): if isinstance(chunk, Conversation):
self._fields = chunk self._fields = chunk
else: else:
yield chunk yield chunk

View file

@ -11,11 +11,6 @@ from copy import deepcopy
from .crypt import decrypt, encrypt from .crypt import decrypt, encrypt
from ...requests import StreamSession from ...requests import StreamSession
arkPreURL = "https://tcr9i.chat.openai.com/fc/gt2/public_key/35536E1E-65B4-4D96-9D97-6ADB7EFF8147"
sessionUrl = "https://chat.openai.com/api/auth/session"
chatArk = None
accessToken = None
class arkReq: class arkReq:
def __init__(self, arkURL, arkBx, arkHeader, arkBody, arkCookies, userAgent): def __init__(self, arkURL, arkBx, arkHeader, arkBody, arkCookies, userAgent):
self.arkURL = arkURL self.arkURL = arkURL
@ -25,15 +20,24 @@ class arkReq:
self.arkCookies = arkCookies self.arkCookies = arkCookies
self.userAgent = userAgent self.userAgent = userAgent
arkPreURL = "https://tcr9i.chat.openai.com/fc/gt2/public_key/35536E1E-65B4-4D96-9D97-6ADB7EFF8147"
sessionUrl = "https://chat.openai.com/api/auth/session"
chatArk: arkReq = None
accessToken: str = None
cookies: dict = None
def readHAR(): def readHAR():
dirPath = "./" dirPath = "./"
harPath = [] harPath = []
chatArks = [] chatArks = []
accessToken = None accessToken = None
cookies = {}
for root, dirs, files in os.walk(dirPath): for root, dirs, files in os.walk(dirPath):
for file in files: for file in files:
if file.endswith(".har"): if file.endswith(".har"):
harPath.append(os.path.join(root, file)) harPath.append(os.path.join(root, file))
if harPath:
break
if not harPath: if not harPath:
raise RuntimeError("No .har file found") raise RuntimeError("No .har file found")
for path in harPath: for path in harPath:
@ -48,11 +52,12 @@ def readHAR():
chatArks.append(parseHAREntry(v)) chatArks.append(parseHAREntry(v))
elif v['request']['url'] == sessionUrl: elif v['request']['url'] == sessionUrl:
accessToken = json.loads(v["response"]["content"]["text"]).get("accessToken") accessToken = json.loads(v["response"]["content"]["text"]).get("accessToken")
if not chatArks: cookies = {c['name']: c['value'] for c in v['request']['cookies']}
RuntimeError("No arkose requests found in .har files")
if not accessToken: if not accessToken:
RuntimeError("No accessToken found in .har files") RuntimeError("No accessToken found in .har files")
return chatArks.pop(), accessToken if not chatArks:
return None, accessToken, cookies
return chatArks.pop(), accessToken, cookies
def parseHAREntry(entry) -> arkReq: def parseHAREntry(entry) -> arkReq:
tmpArk = arkReq( tmpArk = arkReq(
@ -60,7 +65,7 @@ def parseHAREntry(entry) -> arkReq:
arkBx="", arkBx="",
arkHeader={h['name'].lower(): h['value'] for h in entry['request']['headers'] if h['name'].lower() not in ['content-length', 'cookie'] and not h['name'].startswith(':')}, arkHeader={h['name'].lower(): h['value'] for h in entry['request']['headers'] if h['name'].lower() not in ['content-length', 'cookie'] and not h['name'].startswith(':')},
arkBody={p['name']: unquote(p['value']) for p in entry['request']['postData']['params'] if p['name'] not in ['rnd']}, arkBody={p['name']: unquote(p['value']) for p in entry['request']['postData']['params'] if p['name'] not in ['rnd']},
arkCookies=[{'name': c['name'], 'value': c['value'], 'expires': c['expires']} for c in entry['request']['cookies']], arkCookies={c['name']: c['value'] for c in entry['request']['cookies']},
userAgent="" userAgent=""
) )
tmpArk.userAgent = tmpArk.arkHeader.get('user-agent', '') tmpArk.userAgent = tmpArk.arkHeader.get('user-agent', '')
@ -81,7 +86,6 @@ def genArkReq(chatArk: arkReq) -> arkReq:
tmpArk.arkBody['bda'] = base64.b64encode(bda.encode()).decode() tmpArk.arkBody['bda'] = base64.b64encode(bda.encode()).decode()
tmpArk.arkBody['rnd'] = str(random.random()) tmpArk.arkBody['rnd'] = str(random.random())
tmpArk.arkHeader['x-ark-esync-value'] = bw tmpArk.arkHeader['x-ark-esync-value'] = bw
tmpArk.arkCookies = {cookie['name']: cookie['value'] for cookie in tmpArk.arkCookies}
return tmpArk return tmpArk
async def sendRequest(tmpArk: arkReq, proxy: str = None): async def sendRequest(tmpArk: arkReq, proxy: str = None):
@ -117,8 +121,10 @@ def getN() -> str:
return base64.b64encode(timestamp.encode()).decode() return base64.b64encode(timestamp.encode()).decode()
async def getArkoseAndAccessToken(proxy: str): async def getArkoseAndAccessToken(proxy: str):
global chatArk, accessToken global chatArk, accessToken, cookies
if chatArk is None or accessToken is None: if chatArk is None or accessToken is None:
chatArk, accessToken = readHAR() chatArk, accessToken, cookies = readHAR()
if chatArk is None:
return None, accessToken, cookies
newReq = genArkReq(chatArk) newReq = genArkReq(chatArk)
return await sendRequest(newReq, proxy), accessToken, newReq.arkCookies return await sendRequest(newReq, proxy), accessToken, cookies

View file

@ -39,9 +39,9 @@ from g4f.errors import VersionNotFoundError
from g4f.Provider import ProviderType, __providers__, __map__ from g4f.Provider import ProviderType, __providers__, __map__
from g4f.providers.base_provider import ProviderModelMixin from g4f.providers.base_provider import ProviderModelMixin
from g4f.Provider.bing.create_images import patch_provider from g4f.Provider.bing.create_images import patch_provider
from g4f.Provider.Bing import Conversation from g4f.providers.conversation import BaseConversation
conversations: dict[str, Conversation] = {} conversations: dict[str, BaseConversation] = {}
class Api(): class Api():
@ -230,14 +230,14 @@ class Api():
if first: if first:
first = False first = False
yield self._format_json("provider", get_last_provider(True)) yield self._format_json("provider", get_last_provider(True))
if isinstance(chunk, Conversation): if isinstance(chunk, BaseConversation):
conversations[conversation_id] = chunk conversations[conversation_id] = chunk
yield self._format_json("conversation", conversation_id) yield self._format_json("conversation", conversation_id)
elif isinstance(chunk, Exception): elif isinstance(chunk, Exception):
logging.exception(chunk) logging.exception(chunk)
yield self._format_json("message", get_error_message(chunk)) yield self._format_json("message", get_error_message(chunk))
else: else:
yield self._format_json("content", chunk) yield self._format_json("content", str(chunk))
except Exception as e: except Exception as e:
logging.exception(e) logging.exception(e)
yield self._format_json('error', get_error_message(e)) yield self._format_json('error', get_error_message(e))

View file

@ -0,0 +1,2 @@
class BaseConversation:
...