mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Fix GeminiPro auth for normal user
This commit is contained in:
parent
12464bfac0
commit
b80ca3b7c9
1 changed files with 10 additions and 8 deletions
|
|
@ -34,17 +34,18 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
|
|
||||||
if not api_key:
|
if not api_key:
|
||||||
raise MissingAuthError('Missing "api_key"')
|
raise MissingAuthError('Missing "api_key"')
|
||||||
|
|
||||||
|
headers = params = None
|
||||||
|
if api_base:
|
||||||
|
headers = {"Authorization": f"Bearer {api_key}"}
|
||||||
|
else:
|
||||||
|
params = {"key": api_key}
|
||||||
|
|
||||||
if not api_base:
|
if not api_base:
|
||||||
api_base = f"https://generativelanguage.googleapis.com/v1beta"
|
api_base = f"https://generativelanguage.googleapis.com/v1beta"
|
||||||
|
|
||||||
method = "streamGenerateContent" if stream else "generateContent"
|
method = "streamGenerateContent" if stream else "generateContent"
|
||||||
url = f"{api_base.rstrip('/')}/models/{model}:{method}"
|
url = f"{api_base.rstrip('/')}/models/{model}:{method}"
|
||||||
headers = None
|
|
||||||
if api_base:
|
|
||||||
headers = {f"Authorization": "Bearer {api_key}"}
|
|
||||||
else:
|
|
||||||
url += f"?key={api_key}"
|
|
||||||
|
|
||||||
async with ClientSession(headers=headers) as session:
|
async with ClientSession(headers=headers) as session:
|
||||||
contents = [
|
contents = [
|
||||||
{
|
{
|
||||||
|
|
@ -71,10 +72,11 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
"topK": kwargs.get("top_k"),
|
"topK": kwargs.get("top_k"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async with session.post(url, json=data, proxy=proxy) as response:
|
async with session.post(url, params=params, json=data, proxy=proxy) as response:
|
||||||
if not response.ok:
|
if not response.ok:
|
||||||
data = await response.json()
|
data = await response.json()
|
||||||
raise RuntimeError(data[0]["error"]["message"])
|
data = data[0] if isinstance(data, list) else data
|
||||||
|
raise RuntimeError(data["error"]["message"])
|
||||||
if stream:
|
if stream:
|
||||||
lines = []
|
lines = []
|
||||||
async for chunk in response.content:
|
async for chunk in response.content:
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue