mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Add CohereForAI provider,
Updates for You.com provider Add default system prompt in UI Expose novnc port in docker
This commit is contained in:
parent
92005dacfc
commit
2064bb7355
11 changed files with 212 additions and 76 deletions
|
|
@ -84,4 +84,4 @@ RUN pip install --break-system-packages --upgrade pip \
|
||||||
ADD --chown=$G4F_USER:$G4F_USER g4f $G4F_DIR/g4f
|
ADD --chown=$G4F_USER:$G4F_USER g4f $G4F_DIR/g4f
|
||||||
|
|
||||||
# Expose ports
|
# Expose ports
|
||||||
EXPOSE 8080 1337
|
EXPOSE 8080 1337 7900
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ from .helper import format_prompt
|
||||||
from ..image import ImageResponse, ImagePreview, EXTENSIONS_MAP, to_bytes, is_accepted_format
|
from ..image import ImageResponse, ImagePreview, EXTENSIONS_MAP, to_bytes, is_accepted_format
|
||||||
from ..requests import StreamSession, FormData, raise_for_status, get_nodriver
|
from ..requests import StreamSession, FormData, raise_for_status, get_nodriver
|
||||||
from ..cookies import get_cookies
|
from ..cookies import get_cookies
|
||||||
from ..errors import MissingRequirementsError
|
from ..errors import MissingRequirementsError, ResponseError
|
||||||
from .. import debug
|
from .. import debug
|
||||||
|
|
||||||
class You(AsyncGeneratorProvider, ProviderModelMixin):
|
class You(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
|
|
@ -23,18 +23,19 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
models = [
|
models = [
|
||||||
default_model,
|
default_model,
|
||||||
"gpt-4o",
|
"gpt-4o",
|
||||||
|
"gpt-4o-mini",
|
||||||
"gpt-4-turbo",
|
"gpt-4-turbo",
|
||||||
"gpt-4",
|
"grok-2",
|
||||||
"claude-3.5-sonnet",
|
"claude-3.5-sonnet",
|
||||||
|
"claude-3.5-haiku",
|
||||||
"claude-3-opus",
|
"claude-3-opus",
|
||||||
"claude-3-sonnet",
|
"claude-3-sonnet",
|
||||||
"claude-3-haiku",
|
"claude-3-haiku",
|
||||||
"claude-2",
|
"llama-3.3-70b",
|
||||||
"llama-3.1-70b",
|
"llama-3.1-70b",
|
||||||
"llama-3",
|
"llama-3",
|
||||||
"gemini-1-5-flash",
|
"gemini-1-5-flash",
|
||||||
"gemini-1-5-pro",
|
"gemini-1-5-pro",
|
||||||
"gemini-1-0-pro",
|
|
||||||
"databricks-dbrx-instruct",
|
"databricks-dbrx-instruct",
|
||||||
"command-r",
|
"command-r",
|
||||||
"command-r-plus",
|
"command-r-plus",
|
||||||
|
|
@ -105,19 +106,14 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
"conversationTurnId": str(uuid.uuid4()),
|
"conversationTurnId": str(uuid.uuid4()),
|
||||||
"chatId": str(uuid.uuid4()),
|
"chatId": str(uuid.uuid4()),
|
||||||
}
|
}
|
||||||
params = {
|
|
||||||
"userFiles": upload,
|
|
||||||
"selectedChatMode": chat_mode,
|
|
||||||
}
|
|
||||||
if chat_mode == "custom":
|
if chat_mode == "custom":
|
||||||
if debug.logging:
|
if debug.logging:
|
||||||
print(f"You model: {model}")
|
print(f"You model: {model}")
|
||||||
params["selectedAiModel"] = model.replace("-", "_")
|
data["selectedAiModel"] = model.replace("-", "_")
|
||||||
|
|
||||||
async with (session.post if chat_mode == "default" else session.get)(
|
async with session.get(
|
||||||
f"{cls.url}/api/streamingSearch",
|
f"{cls.url}/api/streamingSearch",
|
||||||
data=data if chat_mode == "default" else None,
|
params=data,
|
||||||
params=params if chat_mode == "default" else data,
|
|
||||||
headers=headers,
|
headers=headers,
|
||||||
cookies=cookies
|
cookies=cookies
|
||||||
) as response:
|
) as response:
|
||||||
|
|
@ -126,9 +122,13 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
if line.startswith(b'event: '):
|
if line.startswith(b'event: '):
|
||||||
event = line[7:].decode()
|
event = line[7:].decode()
|
||||||
elif line.startswith(b'data: '):
|
elif line.startswith(b'data: '):
|
||||||
|
if event == "error":
|
||||||
|
raise ResponseError(line[6:])
|
||||||
if event in ["youChatUpdate", "youChatToken"]:
|
if event in ["youChatUpdate", "youChatToken"]:
|
||||||
data = json.loads(line[6:])
|
data = json.loads(line[6:])
|
||||||
if event == "youChatToken" and event in data and data[event]:
|
if event == "youChatToken" and event in data and data[event]:
|
||||||
|
if data[event].startswith("#### You\'ve hit your free quota for the Model Agent. For more usage of the Model Agent, learn more at:"):
|
||||||
|
continue
|
||||||
yield data[event]
|
yield data[event]
|
||||||
elif event == "youChatUpdate" and "t" in data and data["t"]:
|
elif event == "youChatUpdate" and "t" in data and data["t"]:
|
||||||
if chat_mode == "create":
|
if chat_mode == "create":
|
||||||
|
|
|
||||||
95
g4f/Provider/hf_space/CohereForAI.py
Normal file
95
g4f/Provider/hf_space/CohereForAI.py
Normal file
|
|
@ -0,0 +1,95 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
from aiohttp import ClientSession, FormData
|
||||||
|
|
||||||
|
from ...typing import AsyncResult, Messages
|
||||||
|
from ...requests import raise_for_status
|
||||||
|
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
||||||
|
from ..helper import format_prompt
|
||||||
|
from ...providers.response import JsonConversation, TitleGeneration
|
||||||
|
|
||||||
|
class CohereForAI(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
|
url = "https://cohereforai-c4ai-command.hf.space"
|
||||||
|
conversation_url = f"{url}/conversation"
|
||||||
|
|
||||||
|
working = True
|
||||||
|
|
||||||
|
default_model = "command-r-plus-08-2024"
|
||||||
|
models = [
|
||||||
|
default_model,
|
||||||
|
"command-r-08-2024",
|
||||||
|
"command-r-plus",
|
||||||
|
"command-r",
|
||||||
|
"command-r7b-12-2024",
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def create_async_generator(
|
||||||
|
cls, model: str, messages: Messages,
|
||||||
|
api_key: str = None,
|
||||||
|
proxy: str = None,
|
||||||
|
conversation: JsonConversation = None,
|
||||||
|
return_conversation: bool = False,
|
||||||
|
**kwargs
|
||||||
|
) -> AsyncResult:
|
||||||
|
model = cls.get_model(model)
|
||||||
|
headers = {
|
||||||
|
"Origin": cls.url,
|
||||||
|
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:133.0) Gecko/20100101 Firefox/133.0",
|
||||||
|
"Accept": "*/*",
|
||||||
|
"Accept-Language": "en-US,en;q=0.5",
|
||||||
|
"Referer": "https://cohereforai-c4ai-command.hf.space/",
|
||||||
|
"Sec-Fetch-Dest": "empty",
|
||||||
|
"Sec-Fetch-Mode": "cors",
|
||||||
|
"Sec-Fetch-Site": "same-origin",
|
||||||
|
"Priority": "u=4",
|
||||||
|
}
|
||||||
|
if api_key is not None:
|
||||||
|
headers["Authorization"] = f"Bearer {api_key}"
|
||||||
|
async with ClientSession(
|
||||||
|
headers=headers,
|
||||||
|
cookies=None if conversation is None else conversation.cookies
|
||||||
|
) as session:
|
||||||
|
system_prompt = "\n".join([message["content"] for message in messages if message["role"] == "system"])
|
||||||
|
messages = [message for message in messages if message["role"] != "system"]
|
||||||
|
inputs = format_prompt(messages) if conversation is None else messages[-1]["content"]
|
||||||
|
if conversation is None or conversation.model != model or conversation.preprompt != system_prompt:
|
||||||
|
data = {"model": model, "preprompt": system_prompt}
|
||||||
|
async with session.post(cls.conversation_url, json=data, proxy=proxy) as response:
|
||||||
|
await raise_for_status(response)
|
||||||
|
conversation = JsonConversation(
|
||||||
|
**await response.json(),
|
||||||
|
**data,
|
||||||
|
cookies={n: c.value for n, c in response.cookies.items()}
|
||||||
|
)
|
||||||
|
if return_conversation:
|
||||||
|
yield conversation
|
||||||
|
async with session.get(f"{cls.conversation_url}/{conversation.conversationId}/__data.json?x-sveltekit-invalidated=11", proxy=proxy) as response:
|
||||||
|
await raise_for_status(response)
|
||||||
|
node = json.loads((await response.text()).splitlines()[0])["nodes"][1]
|
||||||
|
if node["type"] == "error":
|
||||||
|
raise RuntimeError(node["error"])
|
||||||
|
data = node["data"]
|
||||||
|
message_id = data[data[data[data[0]["messages"]][-1]]["id"]]
|
||||||
|
data = FormData()
|
||||||
|
inputs = messages[-1]["content"]
|
||||||
|
data.add_field(
|
||||||
|
"data",
|
||||||
|
json.dumps({"inputs": inputs, "id": message_id, "is_retry": False, "is_continue": False, "web_search": False, "tools": []}),
|
||||||
|
content_type="application/json"
|
||||||
|
)
|
||||||
|
async with session.post(f"{cls.conversation_url}/{conversation.conversationId}", data=data, proxy=proxy) as response:
|
||||||
|
await raise_for_status(response)
|
||||||
|
async for chunk in response.content:
|
||||||
|
try:
|
||||||
|
data = json.loads(chunk)
|
||||||
|
except (json.JSONDecodeError) as e:
|
||||||
|
raise RuntimeError(f"Failed to read response: {chunk.decode(errors='replace')}", e)
|
||||||
|
if data["type"] == "stream":
|
||||||
|
yield data["token"].replace("\u0000", "")
|
||||||
|
elif data["type"] == "title":
|
||||||
|
yield TitleGeneration(data["title"])
|
||||||
|
elif data["type"] == "finalAnswer":
|
||||||
|
break
|
||||||
|
|
@ -8,6 +8,7 @@ from .BlackForestLabsFlux1Dev import BlackForestLabsFlux1Dev
|
||||||
from .BlackForestLabsFlux1Schnell import BlackForestLabsFlux1Schnell
|
from .BlackForestLabsFlux1Schnell import BlackForestLabsFlux1Schnell
|
||||||
from .VoodoohopFlux1Schnell import VoodoohopFlux1Schnell
|
from .VoodoohopFlux1Schnell import VoodoohopFlux1Schnell
|
||||||
from .StableDiffusion35Large import StableDiffusion35Large
|
from .StableDiffusion35Large import StableDiffusion35Large
|
||||||
|
from .CohereForAI import CohereForAI
|
||||||
from .Qwen_QVQ_72B import Qwen_QVQ_72B
|
from .Qwen_QVQ_72B import Qwen_QVQ_72B
|
||||||
|
|
||||||
class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
|
class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
|
|
@ -16,7 +17,7 @@ class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
working = True
|
working = True
|
||||||
default_model = BlackForestLabsFlux1Dev.default_model
|
default_model = BlackForestLabsFlux1Dev.default_model
|
||||||
default_vision_model = Qwen_QVQ_72B.default_model
|
default_vision_model = Qwen_QVQ_72B.default_model
|
||||||
providers = [BlackForestLabsFlux1Dev, BlackForestLabsFlux1Schnell, VoodoohopFlux1Schnell, StableDiffusion35Large, Qwen_QVQ_72B]
|
providers = [BlackForestLabsFlux1Dev, BlackForestLabsFlux1Schnell, VoodoohopFlux1Schnell, StableDiffusion35Large, CohereForAI, Qwen_QVQ_72B]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_parameters(cls, **kwargs) -> dict:
|
def get_parameters(cls, **kwargs) -> dict:
|
||||||
|
|
@ -28,11 +29,13 @@ class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_models(cls, **kwargs) -> list[str]:
|
def get_models(cls, **kwargs) -> list[str]:
|
||||||
if not cls.models:
|
if not cls.models:
|
||||||
|
models = []
|
||||||
for provider in cls.providers:
|
for provider in cls.providers:
|
||||||
cls.models.extend(provider.get_models(**kwargs))
|
models.extend(provider.get_models(**kwargs))
|
||||||
cls.models.extend(provider.model_aliases.keys())
|
models.extend(provider.model_aliases.keys())
|
||||||
cls.models = list(set(cls.models))
|
models = list(set(models))
|
||||||
cls.models.sort()
|
models.sort()
|
||||||
|
cls.models = models
|
||||||
return cls.models
|
return cls.models
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
||||||
|
|
@ -28,15 +28,16 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
|
||||||
def get_models(cls) -> list[str]:
|
def get_models(cls) -> list[str]:
|
||||||
if not cls.models:
|
if not cls.models:
|
||||||
url = "https://huggingface.co/api/models?inference=warm&pipeline_tag=text-generation"
|
url = "https://huggingface.co/api/models?inference=warm&pipeline_tag=text-generation"
|
||||||
cls.models = [model["id"] for model in requests.get(url).json()]
|
models = [model["id"] for model in requests.get(url).json()]
|
||||||
cls.models.append("meta-llama/Llama-3.2-11B-Vision-Instruct")
|
models.append("meta-llama/Llama-3.2-11B-Vision-Instruct")
|
||||||
cls.models.append("nvidia/Llama-3.1-Nemotron-70B-Instruct-HF")
|
models.append("nvidia/Llama-3.1-Nemotron-70B-Instruct-HF")
|
||||||
cls.models.sort()
|
models.sort()
|
||||||
if not cls.image_models:
|
if not cls.image_models:
|
||||||
url = "https://huggingface.co/api/models?pipeline_tag=text-to-image"
|
url = "https://huggingface.co/api/models?pipeline_tag=text-to-image"
|
||||||
cls.image_models = [model["id"] for model in requests.get(url).json() if model["trendingScore"] >= 20]
|
cls.image_models = [model["id"] for model in requests.get(url).json() if model["trendingScore"] >= 20]
|
||||||
cls.image_models.sort()
|
cls.image_models.sort()
|
||||||
cls.models.extend(cls.image_models)
|
models.extend(cls.image_models)
|
||||||
|
cls.models = models
|
||||||
return cls.models
|
return cls.models
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,6 @@ from .Poe import Poe
|
||||||
from .Raycast import Raycast
|
from .Raycast import Raycast
|
||||||
from .Reka import Reka
|
from .Reka import Reka
|
||||||
from .Replicate import Replicate
|
from .Replicate import Replicate
|
||||||
from .Theb import Theb
|
|
||||||
from .ThebApi import ThebApi
|
from .ThebApi import ThebApi
|
||||||
from .WhiteRabbitNeo import WhiteRabbitNeo
|
from .WhiteRabbitNeo import WhiteRabbitNeo
|
||||||
from .xAI import xAI
|
from .xAI import xAI
|
||||||
|
|
|
||||||
|
|
@ -14,4 +14,5 @@ from .Koala import Koala
|
||||||
from .MagickPen import MagickPen
|
from .MagickPen import MagickPen
|
||||||
from .MyShell import MyShell
|
from .MyShell import MyShell
|
||||||
from .RobocodersAPI import RobocodersAPI
|
from .RobocodersAPI import RobocodersAPI
|
||||||
|
from .Theb import Theb
|
||||||
from .Upstage import Upstage
|
from .Upstage import Upstage
|
||||||
|
|
|
||||||
|
|
@ -142,6 +142,10 @@
|
||||||
<input type="checkbox" id="refine"/>
|
<input type="checkbox" id="refine"/>
|
||||||
<label for="refine" class="toogle" title=""></label>
|
<label for="refine" class="toogle" title=""></label>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="field box">
|
||||||
|
<label for="systemPrompt" class="label" title="">Default for System prompt</label>
|
||||||
|
<textarea id="systemPrompt" placeholder="You are a helpful assistant."></textarea>
|
||||||
|
</div>
|
||||||
<div class="field box">
|
<div class="field box">
|
||||||
<label for="message-input-height" class="label" title="">Input max. height</label>
|
<label for="message-input-height" class="label" title="">Input max. height</label>
|
||||||
<input type="number" id="message-input-height" value="200"/>
|
<input type="number" id="message-input-height" value="200"/>
|
||||||
|
|
@ -149,6 +153,9 @@
|
||||||
<div class="field box">
|
<div class="field box">
|
||||||
<label for="recognition-language" class="label" title="">Speech recognition language</label>
|
<label for="recognition-language" class="label" title="">Speech recognition language</label>
|
||||||
<input type="text" id="recognition-language" value="" placeholder="navigator.language"/>
|
<input type="text" id="recognition-language" value="" placeholder="navigator.language"/>
|
||||||
|
<script>
|
||||||
|
document.getElementById('recognition-language').placeholder = navigator.language;
|
||||||
|
</script>
|
||||||
</div>
|
</div>
|
||||||
<div class="field box hidden">
|
<div class="field box hidden">
|
||||||
<label for="BingCreateImages-api_key" class="label" title="">Microsoft Designer in Bing:</label>
|
<label for="BingCreateImages-api_key" class="label" title="">Microsoft Designer in Bing:</label>
|
||||||
|
|
@ -179,7 +186,7 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="conversation">
|
<div class="conversation">
|
||||||
<textarea id="systemPrompt" class="box" placeholder="System prompt"></textarea>
|
<textarea id="chatPrompt" class="box" placeholder="System prompt"></textarea>
|
||||||
<div id="messages" class="box"></div>
|
<div id="messages" class="box"></div>
|
||||||
<button class="slide-systemPrompt">
|
<button class="slide-systemPrompt">
|
||||||
<i class="fa-solid fa-angles-up"></i>
|
<i class="fa-solid fa-angles-up"></i>
|
||||||
|
|
|
||||||
|
|
@ -859,19 +859,22 @@ select:hover,
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
#systemPrompt, .settings textarea, form textarea {
|
#chatPrompt{
|
||||||
|
min-height: 59px;
|
||||||
|
height: 59px;
|
||||||
|
resize: vertical;
|
||||||
|
padding: var(--inner-gap) var(--section-gap);
|
||||||
|
}
|
||||||
|
|
||||||
|
#systemPrompt, #chatPrompt, .settings textarea, form textarea {
|
||||||
font-size: 15px;
|
font-size: 15px;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
color: var(--colour-3);
|
color: var(--colour-3);
|
||||||
height: 59px;
|
|
||||||
outline: none;
|
outline: none;
|
||||||
padding: var(--inner-gap) var(--section-gap);
|
|
||||||
resize: vertical;
|
|
||||||
min-height: 59px;
|
|
||||||
transition: max-height 0.15s ease-out;
|
transition: max-height 0.15s ease-out;
|
||||||
}
|
}
|
||||||
|
|
||||||
#systemPrompt:focus {
|
#systemPrompt:focus, #chatPrompt:focus {
|
||||||
min-height: 200px;
|
min-height: 200px;
|
||||||
max-height: 1000px;
|
max-height: 1000px;
|
||||||
transition: max-height 0.25s ease-in;
|
transition: max-height 0.25s ease-in;
|
||||||
|
|
@ -940,7 +943,7 @@ select:hover,
|
||||||
body:not(.white) .gradient{
|
body:not(.white) .gradient{
|
||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
.settings .label, form .label, .settings label, form label {
|
.settings .label:not([for="systemPrompt"]), form .label {
|
||||||
min-width: 200px;
|
min-width: 200px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1126,8 +1129,8 @@ ul {
|
||||||
display: flex;
|
display: flex;
|
||||||
}
|
}
|
||||||
|
|
||||||
#systemPrompt::placeholder {
|
#chatPrompt {
|
||||||
text-align: center;
|
padding-left: 60px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.settings h3 {
|
.settings h3 {
|
||||||
|
|
@ -1423,7 +1426,7 @@ form .field.saved .fa-xmark {
|
||||||
}
|
}
|
||||||
|
|
||||||
@media print {
|
@media print {
|
||||||
#systemPrompt:placeholder-shown,
|
#chatPrompt:placeholder-shown,
|
||||||
.conversations,
|
.conversations,
|
||||||
.conversation .user-input,
|
.conversation .user-input,
|
||||||
.conversation .buttons,
|
.conversation .buttons,
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ const inputCount = document.getElementById("input-count").querySelector("
|
||||||
const providerSelect = document.getElementById("provider");
|
const providerSelect = document.getElementById("provider");
|
||||||
const modelSelect = document.getElementById("model");
|
const modelSelect = document.getElementById("model");
|
||||||
const modelProvider = document.getElementById("model2");
|
const modelProvider = document.getElementById("model2");
|
||||||
const systemPrompt = document.getElementById("systemPrompt");
|
const chatPrompt = document.getElementById("chatPrompt");
|
||||||
const settings = document.querySelector(".settings");
|
const settings = document.querySelector(".settings");
|
||||||
const chat = document.querySelector(".conversation");
|
const chat = document.querySelector(".conversation");
|
||||||
const album = document.querySelector(".images");
|
const album = document.querySelector(".images");
|
||||||
|
|
@ -486,10 +486,10 @@ const prepare_messages = (messages, message_index = -1, do_continue = false, do_
|
||||||
|
|
||||||
// Insert system prompt as first message
|
// Insert system prompt as first message
|
||||||
new_messages = [];
|
new_messages = [];
|
||||||
if (systemPrompt?.value) {
|
if (chatPrompt?.value) {
|
||||||
new_messages.push({
|
new_messages.push({
|
||||||
"role": "system",
|
"role": "system",
|
||||||
"content": systemPrompt.value
|
"content": chatPrompt.value
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -625,7 +625,6 @@ async function load_provider_parameters(provider) {
|
||||||
} else if (["api_key", "proof_token"].includes(key)) {
|
} else if (["api_key", "proof_token"].includes(key)) {
|
||||||
input_el.value = input_el.dataset.text;
|
input_el.value = input_el.dataset.text;
|
||||||
}
|
}
|
||||||
input_el.style.removeProperty("height");
|
|
||||||
input_el.style.height = (input_el.scrollHeight) + "px";
|
input_el.style.height = (input_el.scrollHeight) + "px";
|
||||||
}
|
}
|
||||||
input_el.onblur = () => {
|
input_el.onblur = () => {
|
||||||
|
|
@ -822,7 +821,6 @@ const ask_gpt = async (message_id, message_index = -1, regenerate = false, provi
|
||||||
highlight(content_map.inner);
|
highlight(content_map.inner);
|
||||||
if (imageInput) imageInput.value = "";
|
if (imageInput) imageInput.value = "";
|
||||||
if (cameraInput) cameraInput.value = "";
|
if (cameraInput) cameraInput.value = "";
|
||||||
if (fileInput) fileInput.value = "";
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
|
|
@ -984,8 +982,8 @@ const new_conversation = async () => {
|
||||||
document.title = window.title || document.title;
|
document.title = window.title || document.title;
|
||||||
|
|
||||||
await clear_conversation();
|
await clear_conversation();
|
||||||
if (systemPrompt) {
|
if (chatPrompt) {
|
||||||
systemPrompt.value = "";
|
chatPrompt.value = document.getElementById("systemPrompt")?.value;
|
||||||
}
|
}
|
||||||
load_conversations();
|
load_conversations();
|
||||||
hide_sidebar();
|
hide_sidebar();
|
||||||
|
|
@ -1002,6 +1000,8 @@ function merge_messages(message1, message2) {
|
||||||
}
|
}
|
||||||
} else if (newContent.startsWith("...")) {
|
} else if (newContent.startsWith("...")) {
|
||||||
newContent = " " + newContent.substring(3);
|
newContent = " " + newContent.substring(3);
|
||||||
|
} else if (newContent.startsWith(message1)) {
|
||||||
|
newContent = newContent.substring(message1.length);
|
||||||
} else {
|
} else {
|
||||||
// Remove duplicate lines
|
// Remove duplicate lines
|
||||||
let lines = message1.trim().split("\n");
|
let lines = message1.trim().split("\n");
|
||||||
|
|
@ -1014,9 +1014,8 @@ function merge_messages(message1, message2) {
|
||||||
}
|
}
|
||||||
if (foundLastLine != -1) {
|
if (foundLastLine != -1) {
|
||||||
newContent = newContent.substring(foundLastLine + lastLine.length);
|
newContent = newContent.substring(foundLastLine + lastLine.length);
|
||||||
}
|
} // Remove duplicate words
|
||||||
// Remove duplicate words
|
else if (newContent.indexOf(" ") > 0) {
|
||||||
if (foundLastLine == -1 && newContent.indexOf(" ") > 0) {
|
|
||||||
let words = message1.trim().split(" ");
|
let words = message1.trim().split(" ");
|
||||||
let lastWord = words[words.length - 1];
|
let lastWord = words[words.length - 1];
|
||||||
if (newContent.startsWith(lastWord)) {
|
if (newContent.startsWith(lastWord)) {
|
||||||
|
|
@ -1049,8 +1048,8 @@ const load_conversation = async (conversation_id, scroll=true) => {
|
||||||
document.title = title;
|
document.title = title;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (systemPrompt) {
|
if (chatPrompt) {
|
||||||
systemPrompt.value = conversation.system || "";
|
chatPrompt.value = conversation.system || "";
|
||||||
}
|
}
|
||||||
|
|
||||||
let elements = [];
|
let elements = [];
|
||||||
|
|
@ -1066,7 +1065,8 @@ const load_conversation = async (conversation_id, scroll=true) => {
|
||||||
buffer = "";
|
buffer = "";
|
||||||
}
|
}
|
||||||
buffer = buffer.replace(/ \[aborted\]$/g, "").replace(/ \[error\]$/g, "");
|
buffer = buffer.replace(/ \[aborted\]$/g, "").replace(/ \[error\]$/g, "");
|
||||||
buffer += merge_messages(buffer, item.content);
|
new_content = item.content.replace(/ \[aborted\]$/g, "").replace(/ \[error\]$/g, "");
|
||||||
|
buffer += merge_messages(buffer, new_content);
|
||||||
last_model = item.provider?.model;
|
last_model = item.provider?.model;
|
||||||
providers.push(item.provider?.name);
|
providers.push(item.provider?.name);
|
||||||
let next_i = parseInt(i) + 1;
|
let next_i = parseInt(i) + 1;
|
||||||
|
|
@ -1182,7 +1182,7 @@ const load_conversation = async (conversation_id, scroll=true) => {
|
||||||
last_model = last_model?.startsWith("gpt-3") ? "gpt-3.5-turbo" : "gpt-4"
|
last_model = last_model?.startsWith("gpt-3") ? "gpt-3.5-turbo" : "gpt-4"
|
||||||
let count_total = GPTTokenizer_cl100k_base?.encodeChat(filtered, last_model).length
|
let count_total = GPTTokenizer_cl100k_base?.encodeChat(filtered, last_model).length
|
||||||
if (count_total > 0) {
|
if (count_total > 0) {
|
||||||
elements.push(`<div class="count_total">(${count_total} tokens used)</div>`);
|
elements.push(`<div class="count_total">(${count_total} total tokens)</div>`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1243,7 +1243,7 @@ async function add_conversation(conversation_id) {
|
||||||
id: conversation_id,
|
id: conversation_id,
|
||||||
title: "",
|
title: "",
|
||||||
added: Date.now(),
|
added: Date.now(),
|
||||||
system: systemPrompt?.value,
|
system: chatPrompt?.value,
|
||||||
items: [],
|
items: [],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -1260,7 +1260,7 @@ async function save_system_message() {
|
||||||
}
|
}
|
||||||
const conversation = await get_conversation(window.conversation_id);
|
const conversation = await get_conversation(window.conversation_id);
|
||||||
if (conversation) {
|
if (conversation) {
|
||||||
conversation.system = systemPrompt?.value;
|
conversation.system = chatPrompt?.value;
|
||||||
await save_conversation(window.conversation_id, conversation);
|
await save_conversation(window.conversation_id, conversation);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1433,6 +1433,8 @@ window.addEventListener('popstate', hide_sidebar, false);
|
||||||
|
|
||||||
sidebar_button.addEventListener("click", async () => {
|
sidebar_button.addEventListener("click", async () => {
|
||||||
settings.classList.add("hidden");
|
settings.classList.add("hidden");
|
||||||
|
let provider_forms = document.querySelectorAll(".provider_forms from");
|
||||||
|
Array.from(provider_forms).forEach((form) => form.classList.add("hidden"));
|
||||||
if (sidebar.classList.contains("shown")) {
|
if (sidebar.classList.contains("shown")) {
|
||||||
await hide_sidebar();
|
await hide_sidebar();
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -1459,37 +1461,55 @@ function open_settings() {
|
||||||
const register_settings_storage = async () => {
|
const register_settings_storage = async () => {
|
||||||
const optionElements = document.querySelectorAll(optionElementsSelector);
|
const optionElements = document.querySelectorAll(optionElementsSelector);
|
||||||
optionElements.forEach((element) => {
|
optionElements.forEach((element) => {
|
||||||
element.name = element.name || element.id;
|
|
||||||
if (element.type == "textarea") {
|
if (element.type == "textarea") {
|
||||||
element.addEventListener('input', async (event) => {
|
element.addEventListener('input', async (event) => {
|
||||||
appStorage.setItem(element.name, element.value);
|
appStorage.setItem(element.id, element.value);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
element.addEventListener('change', async (event) => {
|
element.addEventListener('change', async (event) => {
|
||||||
switch (element.type) {
|
switch (element.type) {
|
||||||
case "checkbox":
|
case "checkbox":
|
||||||
appStorage.setItem(element.name, element.checked);
|
appStorage.setItem(element.id, element.checked);
|
||||||
break;
|
break;
|
||||||
case "select-one":
|
case "select-one":
|
||||||
appStorage.setItem(element.name, element.value);
|
appStorage.setItem(element.id, element.value);
|
||||||
break;
|
break;
|
||||||
case "text":
|
case "text":
|
||||||
case "number":
|
case "number":
|
||||||
appStorage.setItem(element.name, element.value);
|
appStorage.setItem(element.id, element.value);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
console.warn("Unresolved element type");
|
console.warn("Unresolved element type");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
if (element.id.endsWith("-api_key")) {
|
||||||
|
element.addEventListener('focus', async (event) => {
|
||||||
|
if (element.dataset.value) {
|
||||||
|
element.value = element.dataset.value
|
||||||
|
}
|
||||||
|
});
|
||||||
|
element.addEventListener('blur', async (event) => {
|
||||||
|
element.dataset.value = element.value;
|
||||||
|
if (element.value) {
|
||||||
|
element.placeholder = element.value && element.value.length >= 22 ? (element.value.substring(0, 12)+"*".repeat(12)+element.value.substring(element.value.length-12)) : "*".repeat(element.value.length);
|
||||||
|
} else if (element.placeholder != "api_key") {
|
||||||
|
element.placeholder = "";
|
||||||
|
}
|
||||||
|
element.value = ""
|
||||||
|
});
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const load_settings_storage = async () => {
|
const load_settings_storage = async () => {
|
||||||
const optionElements = document.querySelectorAll(optionElementsSelector);
|
const optionElements = document.querySelectorAll(optionElementsSelector);
|
||||||
optionElements.forEach((element) => {
|
optionElements.forEach((element) => {
|
||||||
element.name = element.name || element.id;
|
if (element.name && element.name != element.id && (value = appStorage.getItem(element.name))) {
|
||||||
if (!(value = appStorage.getItem(element.name))) {
|
appStorage.setItem(element.id, value);
|
||||||
|
appStorage.removeItem(element.name);
|
||||||
|
}
|
||||||
|
if (!(value = appStorage.getItem(element.id))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (value) {
|
if (value) {
|
||||||
|
|
@ -1503,8 +1523,12 @@ const load_settings_storage = async () => {
|
||||||
case "text":
|
case "text":
|
||||||
case "number":
|
case "number":
|
||||||
case "textarea":
|
case "textarea":
|
||||||
|
if (element.id.endsWith("-api_key")) {
|
||||||
|
element.placeholder = value && value.length >= 22 ? (value.substring(0, 12)+"*".repeat(12)+value.substring(value.length-12)) : "*".repeat(value.length);
|
||||||
|
element.dataset.value = value;
|
||||||
|
} else {
|
||||||
element.value = value;
|
element.value = value;
|
||||||
break;
|
}
|
||||||
default:
|
default:
|
||||||
console.warn("Unresolved element type");
|
console.warn("Unresolved element type");
|
||||||
}
|
}
|
||||||
|
|
@ -1608,12 +1632,12 @@ const count_input = async () => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
messageInput.addEventListener("keyup", count_input);
|
messageInput.addEventListener("keyup", count_input);
|
||||||
systemPrompt.addEventListener("keyup", count_input);
|
chatPrompt.addEventListener("keyup", count_input);
|
||||||
systemPrompt.addEventListener("focus", function() {
|
chatPrompt.addEventListener("focus", function() {
|
||||||
countFocus = systemPrompt;
|
countFocus = chatPrompt;
|
||||||
count_input();
|
count_input();
|
||||||
});
|
});
|
||||||
systemPrompt.addEventListener("input", function() {
|
chatPrompt.addEventListener("input", function() {
|
||||||
countFocus = messageInput;
|
countFocus = messageInput;
|
||||||
count_input();
|
count_input();
|
||||||
});
|
});
|
||||||
|
|
@ -1636,6 +1660,7 @@ async function on_load() {
|
||||||
load_conversation(window.conversation_id);
|
load_conversation(window.conversation_id);
|
||||||
} else {
|
} else {
|
||||||
say_hello()
|
say_hello()
|
||||||
|
chatPrompt.value = document.getElementById("systemPrompt")?.value || "";
|
||||||
}
|
}
|
||||||
load_conversations();
|
load_conversations();
|
||||||
}
|
}
|
||||||
|
|
@ -1746,7 +1771,6 @@ async function on_api() {
|
||||||
option = document.createElement("div");
|
option = document.createElement("div");
|
||||||
option.classList.add("field", "box", "hidden");
|
option.classList.add("field", "box", "hidden");
|
||||||
childs = childs.map((child)=>`${child}-api_key`).join(" ");
|
childs = childs.map((child)=>`${child}-api_key`).join(" ");
|
||||||
console.log(childs);
|
|
||||||
option.innerHTML = `
|
option.innerHTML = `
|
||||||
<label for="${name}-api_key" class="label" title="">${label}:</label>
|
<label for="${name}-api_key" class="label" title="">${label}:</label>
|
||||||
<input type="text" id="${name}-api_key" name="${name}[api_key]" class="${childs}" placeholder="api_key"/>
|
<input type="text" id="${name}-api_key" name="${name}[api_key]" class="${childs}" placeholder="api_key"/>
|
||||||
|
|
@ -1785,21 +1809,21 @@ async function on_api() {
|
||||||
const hide_systemPrompt = document.getElementById("hide-systemPrompt")
|
const hide_systemPrompt = document.getElementById("hide-systemPrompt")
|
||||||
const slide_systemPrompt_icon = document.querySelector(".slide-systemPrompt i");
|
const slide_systemPrompt_icon = document.querySelector(".slide-systemPrompt i");
|
||||||
if (hide_systemPrompt.checked) {
|
if (hide_systemPrompt.checked) {
|
||||||
systemPrompt.classList.add("hidden");
|
chatPrompt.classList.add("hidden");
|
||||||
slide_systemPrompt_icon.classList.remove("fa-angles-up");
|
slide_systemPrompt_icon.classList.remove("fa-angles-up");
|
||||||
slide_systemPrompt_icon.classList.add("fa-angles-down");
|
slide_systemPrompt_icon.classList.add("fa-angles-down");
|
||||||
}
|
}
|
||||||
hide_systemPrompt.addEventListener('change', async (event) => {
|
hide_systemPrompt.addEventListener('change', async (event) => {
|
||||||
if (event.target.checked) {
|
if (event.target.checked) {
|
||||||
systemPrompt.classList.add("hidden");
|
chatPrompt.classList.add("hidden");
|
||||||
} else {
|
} else {
|
||||||
systemPrompt.classList.remove("hidden");
|
chatPrompt.classList.remove("hidden");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
document.querySelector(".slide-systemPrompt")?.addEventListener("click", () => {
|
document.querySelector(".slide-systemPrompt")?.addEventListener("click", () => {
|
||||||
hide_systemPrompt.click();
|
hide_systemPrompt.click();
|
||||||
const checked = hide_systemPrompt.checked;
|
const checked = hide_systemPrompt.checked;
|
||||||
systemPrompt.classList[checked ? "add": "remove"]("hidden");
|
chatPrompt.classList[checked ? "add": "remove"]("hidden");
|
||||||
slide_systemPrompt_icon.classList[checked ? "remove": "add"]("fa-angles-up");
|
slide_systemPrompt_icon.classList[checked ? "remove": "add"]("fa-angles-up");
|
||||||
slide_systemPrompt_icon.classList[checked ? "add": "remove"]("fa-angles-down");
|
slide_systemPrompt_icon.classList[checked ? "add": "remove"]("fa-angles-down");
|
||||||
});
|
});
|
||||||
|
|
@ -1993,7 +2017,7 @@ fileInput.addEventListener('change', async (event) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
systemPrompt?.addEventListener("input", async () => {
|
chatPrompt?.addEventListener("input", async () => {
|
||||||
await save_system_message();
|
await save_system_message();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -2072,9 +2096,12 @@ async function read_response(response, message_id, provider, scroll) {
|
||||||
function get_api_key_by_provider(provider) {
|
function get_api_key_by_provider(provider) {
|
||||||
let api_key = null;
|
let api_key = null;
|
||||||
if (provider) {
|
if (provider) {
|
||||||
api_key = document.getElementById(`${provider}-api_key`)?.value || null;
|
api_key = document.getElementById(`${provider}-api_key`)?.id || null;
|
||||||
if (api_key == null) {
|
if (api_key == null) {
|
||||||
api_key = document.querySelector(`.${provider}-api_key`)?.value || null;
|
api_key = document.querySelector(`.${provider}-api_key`)?.id || null;
|
||||||
|
}
|
||||||
|
if (api_key) {
|
||||||
|
api_key = appStorage.getItem(api_key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return api_key;
|
return api_key;
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue