mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Arm2 (#2414)
* Fix arm v7 build / improve api * Update stubs.py * Fix unit tests
This commit is contained in:
parent
4744d0b77d
commit
804a80bc7c
12 changed files with 248 additions and 219 deletions
|
|
@ -1,130 +1,150 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Union
|
||||
from typing import Optional, List, Dict
|
||||
from time import time
|
||||
|
||||
class Model():
|
||||
...
|
||||
from .helper import filter_none
|
||||
|
||||
class ChatCompletion(Model):
|
||||
def __init__(
|
||||
self,
|
||||
try:
|
||||
from pydantic import BaseModel, Field
|
||||
except ImportError:
|
||||
class BaseModel():
|
||||
@classmethod
|
||||
def model_construct(cls, **data):
|
||||
new = cls()
|
||||
for key, value in data.items():
|
||||
setattr(new, key, value)
|
||||
return new
|
||||
class Field():
|
||||
def __init__(self, **config):
|
||||
pass
|
||||
|
||||
class ChatCompletionChunk(BaseModel):
|
||||
id: str
|
||||
object: str
|
||||
created: int
|
||||
model: str
|
||||
provider: Optional[str]
|
||||
choices: List[ChatCompletionDeltaChoice]
|
||||
|
||||
@classmethod
|
||||
def model_construct(
|
||||
cls,
|
||||
content: str,
|
||||
finish_reason: str,
|
||||
completion_id: str = None,
|
||||
created: int = None
|
||||
):
|
||||
self.id: str = f"chatcmpl-{completion_id}" if completion_id else None
|
||||
self.object: str = "chat.completion"
|
||||
self.created: int = created
|
||||
self.model: str = None
|
||||
self.provider: str = None
|
||||
self.choices = [ChatCompletionChoice(ChatCompletionMessage(content), finish_reason)]
|
||||
self.usage: dict[str, int] = {
|
||||
"prompt_tokens": 0, #prompt_tokens,
|
||||
"completion_tokens": 0, #completion_tokens,
|
||||
"total_tokens": 0, #prompt_tokens + completion_tokens,
|
||||
}
|
||||
return super().model_construct(
|
||||
id=f"chatcmpl-{completion_id}" if completion_id else None,
|
||||
object="chat.completion.cunk",
|
||||
created=created,
|
||||
model=None,
|
||||
provider=None,
|
||||
choices=[ChatCompletionDeltaChoice.model_construct(
|
||||
ChatCompletionDelta.model_construct(content),
|
||||
finish_reason
|
||||
)]
|
||||
)
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
**self.__dict__,
|
||||
"choices": [choice.to_json() for choice in self.choices]
|
||||
}
|
||||
class ChatCompletionMessage(BaseModel):
|
||||
role: str
|
||||
content: str
|
||||
|
||||
class ChatCompletionChunk(Model):
|
||||
def __init__(
|
||||
self,
|
||||
@classmethod
|
||||
def model_construct(cls, content: str):
|
||||
return super().model_construct(role="assistant", content=content)
|
||||
|
||||
class ChatCompletionChoice(BaseModel):
|
||||
index: int
|
||||
message: ChatCompletionMessage
|
||||
finish_reason: str
|
||||
|
||||
@classmethod
|
||||
def model_construct(cls, message: ChatCompletionMessage, finish_reason: str):
|
||||
return super().model_construct(index=0, message=message, finish_reason=finish_reason)
|
||||
|
||||
class ChatCompletion(BaseModel):
|
||||
id: str
|
||||
object: str
|
||||
created: int
|
||||
model: str
|
||||
provider: Optional[str]
|
||||
choices: List[ChatCompletionChoice]
|
||||
usage: Dict[str, int] = Field(examples=[{
|
||||
"prompt_tokens": 0, #prompt_tokens,
|
||||
"completion_tokens": 0, #completion_tokens,
|
||||
"total_tokens": 0, #prompt_tokens + completion_tokens,
|
||||
}])
|
||||
|
||||
@classmethod
|
||||
def model_construct(
|
||||
cls,
|
||||
content: str,
|
||||
finish_reason: str,
|
||||
completion_id: str = None,
|
||||
created: int = None
|
||||
):
|
||||
self.id: str = f"chatcmpl-{completion_id}" if completion_id else None
|
||||
self.object: str = "chat.completion.chunk"
|
||||
self.created: int = created
|
||||
self.model: str = None
|
||||
self.provider: str = None
|
||||
self.choices = [ChatCompletionDeltaChoice(ChatCompletionDelta(content), finish_reason)]
|
||||
return super().model_construct(
|
||||
id=f"chatcmpl-{completion_id}" if completion_id else None,
|
||||
object="chat.completion",
|
||||
created=created,
|
||||
model=None,
|
||||
provider=None,
|
||||
choices=[ChatCompletionChoice.model_construct(
|
||||
ChatCompletionMessage.model_construct(content),
|
||||
finish_reason
|
||||
)],
|
||||
usage={
|
||||
"prompt_tokens": 0, #prompt_tokens,
|
||||
"completion_tokens": 0, #completion_tokens,
|
||||
"total_tokens": 0, #prompt_tokens + completion_tokens,
|
||||
}
|
||||
)
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
**self.__dict__,
|
||||
"choices": [choice.to_json() for choice in self.choices]
|
||||
}
|
||||
class ChatCompletionDelta(BaseModel):
|
||||
role: str
|
||||
content: str
|
||||
|
||||
class ChatCompletionMessage(Model):
|
||||
def __init__(self, content: Union[str, None]):
|
||||
self.role = "assistant"
|
||||
self.content = content
|
||||
@classmethod
|
||||
def model_construct(cls, content: Optional[str]):
|
||||
return super().model_construct(role="assistant", content=content)
|
||||
|
||||
def to_json(self):
|
||||
return self.__dict__
|
||||
class ChatCompletionDeltaChoice(BaseModel):
|
||||
index: int
|
||||
delta: ChatCompletionDelta
|
||||
finish_reason: Optional[str]
|
||||
|
||||
class ChatCompletionChoice(Model):
|
||||
def __init__(self, message: ChatCompletionMessage, finish_reason: str):
|
||||
self.index = 0
|
||||
self.message = message
|
||||
self.finish_reason = finish_reason
|
||||
@classmethod
|
||||
def model_construct(cls, delta: ChatCompletionDelta, finish_reason: Optional[str]):
|
||||
return super().model_construct(index=0, delta=delta, finish_reason=finish_reason)
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
**self.__dict__,
|
||||
"message": self.message.to_json()
|
||||
}
|
||||
class Image(BaseModel):
|
||||
url: Optional[str]
|
||||
b64_json: Optional[str]
|
||||
revised_prompt: Optional[str]
|
||||
|
||||
class ChatCompletionDelta(Model):
|
||||
content: Union[str, None] = None
|
||||
@classmethod
|
||||
def model_construct(cls, url: str = None, b64_json: str = None, revised_prompt: str = None):
|
||||
return super().model_construct(**filter_none(
|
||||
url=url,
|
||||
b64_json=b64_json,
|
||||
revised_prompt=revised_prompt
|
||||
))
|
||||
|
||||
def __init__(self, content: Union[str, None]):
|
||||
if content is not None:
|
||||
self.content = content
|
||||
self.role = "assistant"
|
||||
|
||||
def to_json(self):
|
||||
return self.__dict__
|
||||
|
||||
class ChatCompletionDeltaChoice(Model):
|
||||
def __init__(self, delta: ChatCompletionDelta, finish_reason: Union[str, None]):
|
||||
self.index = 0
|
||||
self.delta = delta
|
||||
self.finish_reason = finish_reason
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
**self.__dict__,
|
||||
"delta": self.delta.to_json()
|
||||
}
|
||||
|
||||
class Image(Model):
|
||||
def __init__(self, url: str = None, b64_json: str = None, revised_prompt: str = None) -> None:
|
||||
if url is not None:
|
||||
self.url = url
|
||||
if b64_json is not None:
|
||||
self.b64_json = b64_json
|
||||
if revised_prompt is not None:
|
||||
self.revised_prompt = revised_prompt
|
||||
|
||||
def to_json(self):
|
||||
return self.__dict__
|
||||
|
||||
class ImagesResponse(Model):
|
||||
class ImagesResponse(BaseModel):
|
||||
data: list[Image]
|
||||
model: str
|
||||
provider: str
|
||||
created: int
|
||||
|
||||
def __init__(self, data: list[Image], created: int = None, model: str = None, provider: str = None) -> None:
|
||||
self.data = data
|
||||
@classmethod
|
||||
def model_construct(cls, data: list[Image], created: int = None, model: str = None, provider: str = None):
|
||||
if created is None:
|
||||
created = int(time())
|
||||
self.model = model
|
||||
if provider is not None:
|
||||
self.provider = provider
|
||||
self.created = created
|
||||
|
||||
def to_json(self):
|
||||
return {
|
||||
**self.__dict__,
|
||||
"data": [image.to_json() for image in self.data]
|
||||
}
|
||||
return super().model_construct(
|
||||
data=data,
|
||||
model=model,
|
||||
provider=provider,
|
||||
created=created
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue