Merge branch 'main' into fix-arena

This commit is contained in:
Ammar 2026-04-03 23:51:28 +02:00 committed by GitHub
commit 935fc142f5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 103 additions and 121 deletions

View file

@ -988,14 +988,14 @@ class AntigravityProvider:
parts.append({"text": content})
for tool_call in msg["tool_calls"]:
if tool_call.get("type") == "function":
parts.append(
{
"functionCall": {
"name": tool_call["function"]["name"],
"args": json.loads(tool_call["function"]["arguments"]),
}
}
)
func_call = {
"name": tool_call["function"]["name"],
"args": json.loads(tool_call["function"]["arguments"]),
}
# Restore thought_signature required by Gemini thinking models
if "thought_signature" in tool_call:
func_call["thoughtSignature"] = tool_call["thought_signature"]
parts.append({"functionCall": func_call})
# Handle string content
elif isinstance(content, str):
@ -1276,14 +1276,18 @@ class AntigravityProvider:
# Convert Gemini tool calls to OpenAI format
openai_tool_calls = []
for i, tc in enumerate(tool_calls):
openai_tool_calls.append({
tool_call_obj = {
"id": f"call_{i}_{tc.get('name', 'unknown')}",
"type": "function",
"function": {
"name": tc.get("name"),
"arguments": json.dumps(tc.get("args", {}))
}
})
}
# Preserve thought_signature for thinking models (Gemini 2.5+)
if "thoughtSignature" in tc:
tool_call_obj["thought_signature"] = tc["thoughtSignature"]
openai_tool_calls.append(tool_call_obj)
yield ToolCalls(openai_tool_calls)
if usage_metadata:

View file

@ -594,14 +594,14 @@ class GeminiCLIProvider():
parts.append({"text": msg["content"]})
for tool_call in msg["tool_calls"]:
if tool_call.get("type") == "function":
parts.append(
{
"functionCall": {
"name": tool_call["function"]["name"],
"args": json.loads(tool_call["function"]["arguments"]),
}
}
)
func_call = {
"name": tool_call["function"]["name"],
"args": json.loads(tool_call["function"]["arguments"]),
}
# Restore thought_signature required by Gemini thinking models
if "thought_signature" in tool_call:
func_call["thoughtSignature"] = tool_call["thought_signature"]
parts.append({"functionCall": func_call})
# Handle string content
elif isinstance(msg["content"], str):
@ -844,14 +844,18 @@ class GeminiCLIProvider():
# Convert Gemini tool calls to OpenAI format
openai_tool_calls = []
for i, tc in enumerate(tool_calls):
openai_tool_calls.append({
tool_call_obj = {
"id": f"call_{i}_{tc.get('name', 'unknown')}",
"type": "function",
"function": {
"name": tc.get("name"),
"arguments": json.dumps(tc.get("args", {}))
}
})
}
# Preserve thought_signature for thinking models (Gemini 2.5+)
if "thoughtSignature" in tc:
tool_call_obj["thought_signature"] = tc["thoughtSignature"]
openai_tool_calls.append(tool_call_obj)
yield ToolCalls(openai_tool_calls)
if usage_metadata:
yield Usage(**usage_metadata)

File diff suppressed because one or more lines are too long

View file

@ -28,13 +28,13 @@ from .. import debug
SAFE_PARAMETERS = [
"model", "messages", "stream", "timeout",
"proxy", "media", "response_format",
"media", "response_format",
"prompt", "negative_prompt", "tools", "conversation",
"history_disabled",
"temperature", "top_k", "top_p",
"frequency_penalty", "presence_penalty",
"max_tokens", "stop",
"api_key", "base_url", "seed", "width", "height",
"api_key", "seed", "width", "height",
"max_retries", "web_search", "cache",
"guidance_scale", "num_inference_steps", "randomize_seed",
"safe", "enhance", "private", "aspect_ratio", "n", "transparent"