Update (g4f/models.py g4f/Provider/airforce/AirforceChat.py docs/providers-and-models.md)

This commit is contained in:
kqlio67 2024-11-11 20:21:03 +02:00
parent 562a5c9579
commit 82b8c22b0b
4 changed files with 9 additions and 17 deletions

View file

@ -20,7 +20,7 @@ This document provides an overview of various AI providers and models, including
|[ai4chat.co](https://www.ai4chat.co)|`g4f.Provider.Ai4Chat`|`gpt-4`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[aichatfree.info](https://aichatfree.info)|`g4f.Provider.AIChatFree`|`gemini-pro`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[aimathgpt.forit.ai](https://aimathgpt.forit.ai)|`g4f.Provider.AiMathGPT`|`llama-3.1-70b`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[api.airforce](https://api.airforce)|`g4f.Provider.Airforce`|`gpt-4o, gpt-4o-mini, gpt-4-turbo, llama-2-7b, llama-3.1-8b, llama-3.1-70b, hermes-2-pro, hermes-2-dpo, phi-2, deepseek-coder, openchat-3.5, openhermes-2.5, cosmosrp, lfm-40b, german-7b, zephyr-7b, neural-7b`|`flux, flux-realism', flux-anime, flux-3d, flux-disney, flux-pixel, flux-4o, any-dark, sdxl`|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[api.airforce](https://api.airforce)|`g4f.Provider.Airforce`|`gpt-4o, gpt-4o-mini, gpt-4-turbo, llama-2-7b, llama-3.1-8b, llama-3.1-70b, hermes-2-pro, hermes-2-dpo, phi-2, deepseek-coder, openchat-3.5, openhermes-2.5, lfm-40b, german-7b, zephyr-7b, neural-7b`|`flux, flux-realism', flux-anime, flux-3d, flux-disney, flux-pixel, flux-4o, any-dark, sdxl`|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[aiuncensored.info](https://www.aiuncensored.info)|`g4f.Provider.AIUncensored`|✔|✔|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[allyfy.chat](https://allyfy.chat/)|`g4f.Provider.Allyfy`|`gpt-3.5-turbo`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[bing.com](https://bing.com/chat)|`g4f.Provider.Bing`|`gpt-4`|✔|`gpt-4-vision`|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌+✔|

View file

@ -20,7 +20,7 @@ class Airforce(AsyncGeneratorProvider, ProviderModelMixin):
supports_message_history = AirforceChat.supports_message_history
default_model = AirforceChat.default_model
models = [*AirforceChat.text_models, *AirforceImage.image_models]
models = [*AirforceChat.models, *AirforceImage.models]
model_aliases = {
**AirforceChat.model_aliases,

View file

@ -1,8 +1,8 @@
from __future__ import annotations
import re
import json
from aiohttp import ClientSession
import requests
from aiohttp import ClientSession
from typing import List
from ...typing import AsyncResult, Messages
@ -21,7 +21,11 @@ def clean_response(text: str) -> str:
]
for pattern in patterns:
text = re.sub(pattern, '', text)
return text.strip()
# Remove the <|im_end|> token if present
text = text.replace("<|im_end|>", "").strip()
return text
def split_message(message: str, max_length: int = 1000) -> List[str]:
"""Splits the message into chunks of a given length (max_length)"""

View file

@ -463,15 +463,6 @@ openhermes_2_5 = Model(
best_provider = Airforce
)
### Pawan ###
cosmosrp = Model(
name = 'cosmosrp',
base_provider = 'Pawan',
best_provider = Airforce
)
### Liquid ###
lfm_40b = Model(
name = 'lfm-40b',
@ -666,6 +657,7 @@ class ModelUtils:
### Microsoft ###
'phi-2': phi_2,
'phi-3.5-mini': phi_3_5_mini,
@ -764,10 +756,6 @@ class ModelUtils:
### Teknium ###
'openhermes-2.5': openhermes_2_5,
### Pawan ###
'cosmosrp': cosmosrp,
### Liquid ###
'lfm-40b': lfm_40b,