Address review comments: remove hardcoded path and rename reasoning_content to reasoning

Co-authored-by: hlohaus <983577+hlohaus@users.noreply.github.com>
This commit is contained in:
copilot-swe-agent[bot] 2025-08-21 14:04:07 +00:00
parent 71f540add8
commit 69ee861a07
2 changed files with 6 additions and 7 deletions

View file

@ -6,7 +6,6 @@ Create a comprehensive test for reasoning field standardization
import sys import sys
import unittest import unittest
import json import json
sys.path.append('/home/runner/work/gpt4free/gpt4free')
from g4f.providers.response import Reasoning from g4f.providers.response import Reasoning
from g4f.client.stubs import ChatCompletionDelta, ChatCompletionChunk from g4f.client.stubs import ChatCompletionDelta, ChatCompletionChunk

View file

@ -67,7 +67,7 @@ def iter_response(
stop: Optional[list[str]] = None stop: Optional[list[str]] = None
) -> ChatCompletionResponseType: ) -> ChatCompletionResponseType:
content = "" content = ""
reasoning_content = [] reasoning = []
finish_reason = None finish_reason = None
tool_calls = None tool_calls = None
usage = None usage = None
@ -97,7 +97,7 @@ def iter_response(
provider = chunk provider = chunk
continue continue
elif isinstance(chunk, Reasoning): elif isinstance(chunk, Reasoning):
reasoning_content.append(chunk) reasoning.append(chunk)
elif isinstance(chunk, HiddenResponse): elif isinstance(chunk, HiddenResponse):
continue continue
elif isinstance(chunk, Exception): elif isinstance(chunk, Exception):
@ -145,7 +145,7 @@ def iter_response(
content, finish_reason, completion_id, int(time.time()), usage=usage, content, finish_reason, completion_id, int(time.time()), usage=usage,
**filter_none(tool_calls=[ToolCallModel.model_construct(**tool_call) for tool_call in tool_calls]) if tool_calls is not None else {}, **filter_none(tool_calls=[ToolCallModel.model_construct(**tool_call) for tool_call in tool_calls]) if tool_calls is not None else {},
conversation=None if conversation is None else conversation.get_dict(), conversation=None if conversation is None else conversation.get_dict(),
reasoning=reasoning_content if reasoning_content else None reasoning=reasoning if reasoning else None
) )
if provider is not None: if provider is not None:
chat_completion.provider = provider.name chat_completion.provider = provider.name
@ -172,7 +172,7 @@ async def async_iter_response(
stop: Optional[list[str]] = None stop: Optional[list[str]] = None
) -> AsyncChatCompletionResponseType: ) -> AsyncChatCompletionResponseType:
content = "" content = ""
reasoning_content = [] reasoning = []
finish_reason = None finish_reason = None
completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28)) completion_id = ''.join(random.choices(string.ascii_letters + string.digits, k=28))
idx = 0 idx = 0
@ -200,7 +200,7 @@ async def async_iter_response(
provider = chunk provider = chunk
continue continue
elif isinstance(chunk, Reasoning) and not stream: elif isinstance(chunk, Reasoning) and not stream:
reasoning_content.append(chunk) reasoning.append(chunk)
elif isinstance(chunk, HiddenResponse): elif isinstance(chunk, HiddenResponse):
continue continue
elif isinstance(chunk, Exception): elif isinstance(chunk, Exception):
@ -250,7 +250,7 @@ async def async_iter_response(
tool_calls=[ToolCallModel.model_construct(**tool_call) for tool_call in tool_calls] tool_calls=[ToolCallModel.model_construct(**tool_call) for tool_call in tool_calls]
) if tool_calls is not None else {}, ) if tool_calls is not None else {},
conversation=conversation, conversation=conversation,
reasoning=reasoning_content if reasoning_content else None reasoning=reasoning if reasoning else None
) )
if provider is not None: if provider is not None:
chat_completion.provider = provider.name chat_completion.provider = provider.name