Improve providers with tests

This commit is contained in:
Heiner Lohaus 2023-08-22 23:27:34 +02:00
parent 847843d120
commit 98d3304108
7 changed files with 40 additions and 40 deletions

View file

@ -50,6 +50,7 @@ class DfeHub(BaseProvider):
"https://chat.dfehub.com/api/openai/v1/chat/completions",
headers=headers,
json=json_data,
timeout=3
)
for chunk in response.iter_lines():

View file

@ -6,7 +6,7 @@ from ..typing import Any, CreateResult
class FastGpt(ABC):
url: str = 'https://chat9.fastgpt.me/'
working = True
working = False
needs_auth = False
supports_stream = True
supports_gpt_35_turbo = True

View file

@ -11,6 +11,7 @@ class H2o(BaseProvider):
url = "https://gpt-gm.h2o.ai"
working = True
supports_stream = True
model = "h2oai/h2ogpt-gm-oasst1-en-2048-falcon-40b-v1"
@staticmethod
def create_completion(
@ -47,8 +48,9 @@ class H2o(BaseProvider):
"https://gpt-gm.h2o.ai/conversation",
headers=headers,
json=data,
)
conversation_id = response.json()["conversationId"]
).json()
if "conversationId" not in response:
return
data = {
"inputs": conversation,
@ -71,7 +73,7 @@ class H2o(BaseProvider):
}
response = session.post(
f"https://gpt-gm.h2o.ai/conversation/{conversation_id}",
f"https://gpt-gm.h2o.ai/conversation/{response['conversationId']}",
headers=headers,
json=data,
)

View file

@ -8,7 +8,7 @@ class V50(BaseProvider):
supports_gpt_35_turbo = True
supports_stream = False
needs_auth = False
working = True
working = False
@staticmethod
def create_completion(
@ -46,7 +46,8 @@ class V50(BaseProvider):
}
response = requests.post("https://p5.v50.ltd/api/chat-process",
json=payload, headers=headers, proxies=kwargs['proxy'] if 'proxy' in kwargs else {})
yield response.text
if "https://fk1.v50.ltd" not in response.text:
yield response.text
@classmethod
@property

View file

@ -21,11 +21,6 @@ class Wewordle(BaseProvider):
stream: bool,
**kwargs: Any,
) -> CreateResult:
base = ""
for message in messages:
base += "%s: %s\n" % (message["role"], message["content"])
base += "assistant:"
# randomize user id and app id
_user_id = "".join(
random.choices(f"{string.ascii_lowercase}{string.digits}", k=16)
@ -45,7 +40,7 @@ class Wewordle(BaseProvider):
}
data: dict[str, Any] = {
"user": _user_id,
"messages": [{"role": "user", "content": base}],
"messages": messages,
"subscriber": {
"originalPurchaseDate": None,
"originalApplicationVersion": None,

View file

@ -1,5 +1,6 @@
import re
import urllib.parse
import json
from curl_cffi import requests
@ -28,7 +29,11 @@ class You(BaseProvider):
impersonate="chrome107",
)
response.raise_for_status()
yield _parse_output(response.text)
start = 'data: {"youChatToken": '
for line in response.content.splitlines():
line = line.decode('utf-8')
if line.startswith(start):
yield json.loads(line[len(start): -1])
def _create_url_param(messages: list[dict[str, str]]):
@ -50,10 +55,4 @@ def _create_header():
return {
"accept": "text/event-stream",
"referer": "https://you.com/search?fromSearchBar=true&tbm=youchat",
}
def _parse_output(output: str) -> str:
regex = r"^data:\s{\"youChatToken\": \"(.*)\"}$"
tokens = [token for token in re.findall(regex, output, re.MULTILINE)]
return "".join(tokens)
}

View file

@ -3,50 +3,51 @@ from pathlib import Path
sys.path.append(str(Path(__file__).parent.parent))
from g4f import BaseProvider, models, provider
from g4f import BaseProvider, models, Provider
logging = False
def main():
providers = get_providers()
results: list[list[str | bool]] = []
failed_providers = []
for _provider in providers:
print("start", _provider.__name__)
actual_working = judge(_provider)
expected_working = _provider.working
match = actual_working == expected_working
if _provider.needs_auth:
continue
print("Provider:", _provider.__name__)
result = judge(_provider)
print("Result:", result)
if _provider.working and not result:
failed_providers.append([_provider, result])
results.append([_provider.__name__, expected_working, actual_working, match])
print("failed provider list")
for result in results:
if not result[3]:
print(result)
print("Failed providers:")
for _provider, result in failed_providers:
print(f"{_provider.__name__}: {result}")
def get_providers() -> list[type[BaseProvider]]:
provider_names = dir(provider)
provider_names = dir(Provider)
ignore_names = [
"base_provider",
"BaseProvider",
"BaseProvider"
]
provider_names = [
provider_name
for provider_name in provider_names
if not provider_name.startswith("__") and provider_name not in ignore_names
]
return [getattr(provider, provider_name) for provider_name in provider_names]
return [getattr(Provider, provider_name) for provider_name in provider_names]
def create_response(_provider: type[BaseProvider]) -> str:
model = (
models.gpt_35_turbo.name
if _provider is not provider.H2o
else models.falcon_7b.name
if _provider.supports_gpt_35_turbo
else _provider.model
)
response = _provider.create_completion(
model=model,
messages=[{"role": "user", "content": "Hello world!, plz yourself"}],
messages=[{"role": "user", "content": "Hello world!"}],
stream=False,
)
return "".join(response)
@ -59,9 +60,10 @@ def judge(_provider: type[BaseProvider]) -> bool:
try:
response = create_response(_provider)
assert type(response) is str
return len(response) > 1
return response
except Exception as e:
print(e)
if logging:
print(e)
return False