mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Custom api_base for GeminiPro
This commit is contained in:
parent
140a1736b9
commit
307c8f53e7
1 changed files with 16 additions and 10 deletions
|
|
@ -13,6 +13,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
url = "https://ai.google.dev"
|
||||
working = True
|
||||
supports_message_history = True
|
||||
needs_auth = True
|
||||
default_model = "gemini-pro"
|
||||
models = ["gemini-pro", "gemini-pro-vision"]
|
||||
|
||||
|
|
@ -24,19 +25,24 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
stream: bool = False,
|
||||
proxy: str = None,
|
||||
api_key: str = None,
|
||||
api_base: str = None,
|
||||
image: ImageType = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
model = "gemini-pro-vision" if not model and image else model
|
||||
model = cls.get_model(model)
|
||||
if not api_key:
|
||||
raise MissingAuthError('Missing "api_key" for auth')
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
async with ClientSession(headers=headers) as session:
|
||||
|
||||
if not api_key and not api_base:
|
||||
raise MissingAuthError('Missing "api_key" or "api_base"')
|
||||
if not api_base:
|
||||
api_base = f"https://generativelanguage.googleapis.com/v1beta"
|
||||
|
||||
method = "streamGenerateContent" if stream else "generateContent"
|
||||
url = f"https://generativelanguage.googleapis.com/v1beta/models/{model}:{method}"
|
||||
url = f"{api_base.rstrip('/')}/models/{model}:{method}"
|
||||
if api_key:
|
||||
url += f"?key={api_key}"
|
||||
|
||||
async with ClientSession() as session:
|
||||
contents = [
|
||||
{
|
||||
"role": "model" if message["role"] == "assistant" else message["role"],
|
||||
|
|
@ -62,7 +68,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
"topK": kwargs.get("top_k"),
|
||||
}
|
||||
}
|
||||
async with session.post(url, params={"key": api_key}, json=data, proxy=proxy) as response:
|
||||
async with session.post(url, json=data, proxy=proxy) as response:
|
||||
if not response.ok:
|
||||
data = await response.json()
|
||||
raise RuntimeError(data[0]["error"]["message"])
|
||||
|
|
@ -78,7 +84,7 @@ class GeminiPro(AsyncGeneratorProvider, ProviderModelMixin):
|
|||
yield data["candidates"][0]["content"]["parts"][0]["text"]
|
||||
except:
|
||||
data = data.decode() if isinstance(data, bytes) else data
|
||||
raise RuntimeError(f"Read text failed. data: {data}")
|
||||
raise RuntimeError(f"Read chunk failed. data: {data}")
|
||||
lines = []
|
||||
else:
|
||||
lines.append(chunk)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue