Merge pull request #2876 from hlohaus/30Mar

Fix generating images. Add headers / styling from @kqlio67 in UI
This commit is contained in:
H Lohaus 2025-04-01 19:34:20 +02:00 committed by GitHub
commit b220056af7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 214 additions and 96 deletions

View file

@ -187,7 +187,7 @@ class ARTA(AsyncGeneratorProvider, ProviderModelMixin):
image_urls = [image["url"] for image in status_data.get("response", [])] image_urls = [image["url"] for image in status_data.get("response", [])]
duration = time.time() - start_time duration = time.time() - start_time
yield Reasoning(label="Generated", status=f"{n} image(s) in {duration:.2f}s") yield Reasoning(label="Generated", status=f"{n} image(s) in {duration:.2f}s")
yield ImageResponse(images=image_urls, alt=prompt) yield ImageResponse(urls=image_urls, alt=prompt)
return return
elif status in ("IN_QUEUE", "IN_PROGRESS"): elif status in ("IN_QUEUE", "IN_PROGRESS"):
if last_status != status: if last_status != status:

View file

@ -639,7 +639,7 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
image_url_match = re.search(r'!\[.*?\]\((.*?)\)', full_response_text) image_url_match = re.search(r'!\[.*?\]\((.*?)\)', full_response_text)
if image_url_match: if image_url_match:
image_url = image_url_match.group(1) image_url = image_url_match.group(1)
yield ImageResponse(images=[image_url], alt=format_image_prompt(messages, prompt)) yield ImageResponse(urls=[image_url], alt=format_image_prompt(messages, prompt))
return return
# Handle conversation history once, in one place # Handle conversation history once, in one place

View file

@ -78,7 +78,7 @@ class ImageLabs(AsyncGeneratorProvider, ProviderModelMixin):
if progress_data.get('status') == 'Done' or progress_data.get('final_image_url'): if progress_data.get('status') == 'Done' or progress_data.get('final_image_url'):
# Yield ImageResponse with the final image URL # Yield ImageResponse with the final image URL
yield ImageResponse( yield ImageResponse(
images=[progress_data.get('final_image_url')], urls=[progress_data.get('final_image_url')],
alt=prompt alt=prompt
) )
break break

View file

@ -12,7 +12,7 @@ from .helper import filter_none, format_image_prompt
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..typing import AsyncResult, Messages, MediaListType from ..typing import AsyncResult, Messages, MediaListType
from ..image import is_data_an_audio from ..image import is_data_an_audio
from ..errors import ModelNotFoundError from ..errors import ModelNotFoundError, ResponseError
from ..requests.raise_for_status import raise_for_status from ..requests.raise_for_status import raise_for_status
from ..requests.aiohttp import get_connector from ..requests.aiohttp import get_connector
from ..image.copy_images import save_response_media from ..image.copy_images import save_response_media
@ -279,7 +279,7 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
**kwargs **kwargs
) -> AsyncResult: ) -> AsyncResult:
if not cache and seed is None: if not cache and seed is None:
seed = random.randint(9999, 99999999) seed = random.randint(0, 2**32)
json_mode = False json_mode = False
if response_format and response_format.get("type") == "json_object": if response_format and response_format.get("type") == "json_object":
json_mode = True json_mode = True
@ -318,31 +318,30 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
if line[6:].startswith(b"[DONE]"): if line[6:].startswith(b"[DONE]"):
break break
result = json.loads(line[6:]) result = json.loads(line[6:])
if "usage" in result:
yield Usage(**result["usage"])
choices = result.get("choices", [{}]) choices = result.get("choices", [{}])
choice = choices.pop() if choices else {} choice = choices.pop() if choices else {}
content = choice.get("delta", {}).get("content") content = choice.get("delta", {}).get("content")
if content: if content:
yield content yield content
if "usage" in result:
yield Usage(**result["usage"])
finish_reason = choice.get("finish_reason") finish_reason = choice.get("finish_reason")
if finish_reason: if finish_reason:
yield FinishReason(finish_reason) yield FinishReason(finish_reason)
return return
result = await response.json() result = await response.json()
choice = result["choices"][0] if "choices" in result:
message = choice.get("message", {}) choice = result["choices"][0]
content = message.get("content", "") message = choice.get("message", {})
content = message.get("content", "")
if "tool_calls" in message: if content:
yield ToolCalls(message["tool_calls"]) yield content
if "tool_calls" in message:
if content: yield ToolCalls(message["tool_calls"])
yield content else:
raise ResponseError(result)
if "usage" in result: if "usage" in result:
yield Usage(**result["usage"]) yield Usage(**result["usage"])
finish_reason = choice.get("finish_reason") finish_reason = choice.get("finish_reason")
if finish_reason: if finish_reason:
yield FinishReason(finish_reason) yield FinishReason(finish_reason)

View file

@ -124,7 +124,7 @@ class Websim(AsyncGeneratorProvider, ProviderModelMixin):
response_json = json.loads(response_text) response_json = json.loads(response_text)
image_url = response_json.get("url") image_url = response_json.get("url")
if image_url: if image_url:
yield ImageResponse(images=[image_url], alt=used_prompt) yield ImageResponse(urls=[image_url], alt=used_prompt)
@classmethod @classmethod
async def _handle_chat_request( async def _handle_chat_request(

View file

@ -71,5 +71,5 @@ class Voodoohop_Flux1Schnell(AsyncGeneratorProvider, ProviderModelMixin):
elif event_type == b'complete': elif event_type == b'complete':
json_data = json.loads(data) json_data = json.loads(data)
image_url = json_data[0]['url'] image_url = json_data[0]['url']
yield ImageResponse(images=[image_url], alt=prompt) yield ImageResponse(urls=[image_url], alt=prompt)
return return

View file

@ -83,7 +83,6 @@ class Grok(AsyncAuthedProvider, ProviderModelMixin):
model: str, model: str,
messages: Messages, messages: Messages,
auth_result: AuthResult, auth_result: AuthResult,
cookies: Cookies = None,
return_conversation: bool = False, return_conversation: bool = False,
conversation: Conversation = None, conversation: Conversation = None,
**kwargs **kwargs
@ -130,7 +129,7 @@ class Grok(AsyncAuthedProvider, ProviderModelMixin):
yield token yield token
generated_images = response_data.get("modelResponse", {}).get("generatedImageUrls", None) generated_images = response_data.get("modelResponse", {}).get("generatedImageUrls", None)
if generated_images: if generated_images:
yield ImageResponse([f'{cls.assets_url}/{image}' for image in generated_images], "", {"cookies": cookies, "headers": headers}) yield ImageResponse([f'{cls.assets_url}/{image}' for image in generated_images], "", {"cookies": auth_result.cookies, "headers": auth_result.headers})
title = result.get("title", {}).get("newTitle", "") title = result.get("title", {}).get("newTitle", "")
if title: if title:
yield TitleGeneration(title) yield TitleGeneration(title)

View file

@ -74,7 +74,7 @@ class AiChats(AsyncGeneratorProvider, ProviderModelMixin):
base64_image = base64.b64encode(image_data).decode('utf-8') base64_image = base64.b64encode(image_data).decode('utf-8')
base64_url = f"data:image/png;base64,{base64_image}" base64_url = f"data:image/png;base64,{base64_image}"
yield ImageResponse(base64_url, prompt) yield ImageResponse(image_url, prompt)
else: else:
yield f"Error: No image URL found in the response. Full response: {response_json}" yield f"Error: No image URL found in the response. Full response: {response_json}"
else: else:

View file

@ -170,7 +170,7 @@ class Airforce(AsyncGeneratorProvider, ProviderModelMixin):
async with session.get(cls.api_endpoint_imagine2, params=params, proxy=proxy) as response: async with session.get(cls.api_endpoint_imagine2, params=params, proxy=proxy) as response:
if response.status == 200: if response.status == 200:
image_url = str(response.url) image_url = str(response.url)
yield ImageResponse(images=image_url, alt=prompt) yield ImageResponse(urls=image_url, alt=prompt)
else: else:
error_text = await response.text() error_text = await response.text()
raise RuntimeError(f"Image generation failed: {response.status} - {error_text}") raise RuntimeError(f"Image generation failed: {response.status} - {error_text}")

View file

@ -77,7 +77,7 @@ def get_cookies(domain_name: str, raise_requirements_error: bool = True, single_
Returns: Returns:
Dict[str, str]: A dictionary of cookie names and values. Dict[str, str]: A dictionary of cookie names and values.
""" """
if cache_result and domain_name in CookiesConfig.cookies: if domain_name in CookiesConfig.cookies:
return CookiesConfig.cookies[domain_name] return CookiesConfig.cookies[domain_name]
cookies = load_cookies_from_browsers(domain_name, raise_requirements_error, single_browser) cookies = load_cookies_from_browsers(domain_name, raise_requirements_error, single_browser)

View file

@ -75,6 +75,12 @@
</script> </script>
<div class="gradient"></div> <div class="gradient"></div>
<div class="sidebar shown"> <div class="sidebar shown">
<div class="sidebar-header">
<div class="sidebar-logo">G4F Chat</div>
<div class="mobile-sidebar-toggle">
<i class="fa-solid fa-bars"></i>
</div>
</div>
<div class="top"> <div class="top">
<button class="new_convo" onclick="new_conversation()"> <button class="new_convo" onclick="new_conversation()">
<i class="fa-regular fa-plus"></i> <i class="fa-regular fa-plus"></i>
@ -104,8 +110,13 @@
</div> </div>
<div class="settings hidden"> <div class="settings hidden">
<div class="paper"> <div class="paper">
<h3>Settings</h3> <div class="settings-top-bar">
<div class="field"> <button class="settings-back-button" onclick="open_settings();">
<i class="fa-solid fa-arrow-left"></i>
</button>
<span>Settings</span>
</div>
<div class="field">
<span class="label">Enable Dark Mode</span> <span class="label">Enable Dark Mode</span>
<input type="checkbox" id="darkMode" checked /> <input type="checkbox" id="darkMode" checked />
<label for="darkMode" class="toogle" title=""></label> <label for="darkMode" class="toogle" title=""></label>
@ -221,8 +232,14 @@
</div> </div>
</div> </div>
<div class="chat-container"> <div class="chat-container">
<div class="chat-header box"> <div class="chat-top-panel">
G4F Chat <div class="mobile-sidebar-toggle">
<i class="fa-solid fa-bars"></i>
</div>
<div class="convo-title">New Conversation</div>
<button class="new_convo_icon" onclick="new_conversation()">
<i class="fa-regular fa-plus"></i>
</button>
</div> </div>
<textarea id="chatPrompt" class="box" placeholder="System prompt"></textarea> <textarea id="chatPrompt" class="box" placeholder="System prompt"></textarea>
<button class="slide-header"> <button class="slide-header">
@ -337,9 +354,13 @@
</div> </div>
</div> </div>
</div> </div>
<div class="log hidden"></div> <div class="log hidden">
<div class="mobile-sidebar"> <div class="log-top-bar">
<i class="fa-solid fa-bars"></i> <button class="log-back-button" onclick="open_settings();">
</div> <i class="fa-solid fa-arrow-left"></i>
</button>
<span>Logs</span>
</div>
</div>
</body> </body>
</html> </html>

View file

@ -57,15 +57,14 @@ html,
body { body {
scroll-behavior: smooth; scroll-behavior: smooth;
overflow: hidden; overflow: hidden;
max-height: 100%;
max-width: 100%;
} }
body { body {
background: var(--background); background: var(--background);
color: var(--colour-3); color: var(--colour-3);
height: 100vh;
display: flex; display: flex;
height: 100vh;
max-height: 100vh;
} }
body:not(.white) a:link, body:not(.white) a:link,
@ -936,7 +935,7 @@ input.model:hover
.slide-header { .slide-header {
position: absolute; position: absolute;
top: 0; top: 58px;
z-index: 1; z-index: 1;
padding: 10px; padding: 10px;
border: none; border: none;
@ -1101,26 +1100,35 @@ ul {
visibility: hidden; visibility: hidden;
} }
.mobile-sidebar { .sidebar-header {
position: fixed; padding: 12px 0;
z-index: 1000; margin-bottom: 12px;
top: 10px; display: flex;
right: 10px; justify-content: space-between;
width: 40px;
height: 40px;
background-color: var(--blur-bg);
border: 1px solid var(--blur-border);
border-radius: 10px;
backdrop-filter: blur(20px);
-webkit-backdrop-filter: blur(20px);
cursor: pointer;
justify-content: center;
align-items: center; align-items: center;
transition: all 0.3s ease; padding: 0 0 10px 0;
padding: 10px 12px; border-bottom: 1px solid var(--blur-border);
} }
.mobile-sidebar i { .sidebar-logo {
font-size: 16px;
font-weight: 500;
}
.mobile-sidebar-toggle {
display: flex;
justify-content: center;
align-items: center;
width: 36px;
height: 36px;
background-color: var(--blur-bg);
border: 1px solid var(--blur-border);
border-radius: 8px;
cursor: pointer;
transition: all 0.3s ease;
}
.mobile-sidebar-toggle i {
transition: 0.33s; transition: 0.33s;
} }
@ -1308,6 +1316,7 @@ form textarea {
.field a { .field a {
text-wrap: nowrap; text-wrap: nowrap;
padding: 4px;
} }
.field .fa-xmark { .field .fa-xmark {
@ -1328,7 +1337,9 @@ form textarea {
.settings, .log, form { .settings, .log, form {
overflow: auto; overflow: auto;
width: 100%; display: flex;
flex-direction: column;
flex: 1;
} }
.log { .log {
@ -1508,13 +1519,59 @@ form textarea {
flex-direction: column; flex-direction: column;
overflow: hidden; overflow: hidden;
} }
.chat-header { .chat-top-panel {
padding: 10px 28px; padding: 12px 15px;
font-weight: 500; display: flex;
white-space: nowrap; justify-content: space-between;
text-overflow: ellipsis; align-items: center;
overflow: hidden;
} }
.chat-logo {
font-size: 16px;
}
.chat-top-panel .convo-title {
margin: 0 10px;
font-size: 14px;
text-align: center;
flex: 1;
}
.chat-top-panel .mobile-sidebar-toggle {
display: flex;
justify-content: center;
align-items: center;
width: 36px;
height: 36px;
background-color: var(--blur-bg);
border: 1px solid var(--blur-border);
border-radius: 8px;
cursor: pointer;
}
@media only screen and (min-width: 40em) {
.chat-top-panel .mobile-sidebar-toggle {
display: none;
}
}
.chat-top-panel .new_convo_icon {
display: flex;
justify-content: center;
align-items: center;
width: 36px;
height: 36px;
background-color: var(--blur-bg);
border: 1px solid var(--blur-border);
border-radius: 8px;
cursor: pointer;
color: var(--colour-3);
font-size: 16px;
}
.chat-top-panel .mobile-sidebar-toggle:hover,
.chat-top-panel .new_convo_icon:hover {
background-color: var(--button-hover);
}
@media only screen and (min-width: 40em) { @media only screen and (min-width: 40em) {
.sidebar { .sidebar {
width: 300px; width: 300px;
@ -1564,4 +1621,44 @@ form textarea {
border-style: solid; border-style: solid;
box-shadow: none; box-shadow: none;
background-color: var(--button-hover); background-color: var(--button-hover);
}
.settings-top-bar, .log-top-bar {
display: flex;
align-items: center;
padding: 15px 0;
margin-bottom: 15px;
border-bottom: 1px solid var(--blur-border);
gap: 12px; /* Space between button and title */
}
.settings-back-button, .log-back-button {
display: flex;
align-items: center;
justify-content: center;
background: none;
border: none;
color: var(--colour-3);
font-size: 18px;
cursor: pointer;
width: 40px;
height: 40px;
border-radius: 8px;
}
.settings-back-button:hover, .log-back-button:hover {
background-color: var(--blur-border);
}
.settings-title, .log-title {
font-size: 20px;
font-weight: 500;
color: var(--colour-3);
user-select: none; /* Prevent text selection */
}
.settings-top-bar span, .log-top-bar span {
font-weight: 600;
font-size: 18px;
margin-left: 8px;
} }

View file

@ -5,7 +5,7 @@ const box_conversations = document.querySelector(`.top`);
const stop_generating = document.querySelector(`.stop_generating`); const stop_generating = document.querySelector(`.stop_generating`);
const regenerate_button = document.querySelector(`.regenerate`); const regenerate_button = document.querySelector(`.regenerate`);
const sidebar = document.querySelector(".sidebar"); const sidebar = document.querySelector(".sidebar");
const sidebar_button = document.querySelector(".mobile-sidebar"); const sidebar_buttons = document.querySelectorAll(".mobile-sidebar-toggle");
const sendButton = document.getElementById("sendButton"); const sendButton = document.getElementById("sendButton");
const addButton = document.getElementById("addButton"); const addButton = document.getElementById("addButton");
const imageInput = document.querySelector(".image-label"); const imageInput = document.querySelector(".image-label");
@ -570,6 +570,7 @@ const handle_ask = async (do_ask_gpt = true, message = null) => {
} }
} else { } else {
await safe_load_conversation(window.conversation_id, true); await safe_load_conversation(window.conversation_id, true);
await load_conversations();
} }
}; };
@ -1360,10 +1361,12 @@ const set_conversation = async (conversation_id) => {
}; };
const new_conversation = async () => { const new_conversation = async () => {
history.pushState({}, null, `/chat/`); if (!/\/chat\/(share|\?|$)/.test(window.location.href)) {
history.pushState({}, null, `/chat/`);
}
window.conversation_id = generateUUID(); window.conversation_id = generateUUID();
document.title = window.title || document.title; document.title = window.title || document.title;
document.querySelector(".chat-header").innerText = "New Conversation - G4F"; document.querySelector(".chat-top-panel .convo-title").innerText = "New Conversation";
await clear_conversation(); await clear_conversation();
if (chatPrompt) { if (chatPrompt) {
@ -1429,16 +1432,16 @@ const load_conversation = async (conversation, scroll=true) => {
let messages = conversation?.items || []; let messages = conversation?.items || [];
console.debug("Conversation:", conversation.id) console.debug("Conversation:", conversation.id)
let title = conversation.new_title || conversation.title; let conversation_title = conversation.new_title || conversation.title;
title = title ? `${title} - G4F` : window.title; title = conversation_title ? `${conversation_title} - G4F` : window.title;
if (title) { if (title) {
document.title = title; document.title = title;
} }
const chatHeader = document.querySelector(".chat-header"); const chatHeader = document.querySelector(".chat-top-panel .convo-title");
if (window.share_id && conversation.id == window.start_id) { if (window.share_id && conversation.id == window.start_id) {
chatHeader.innerHTML = '<i class="fa-solid fa-qrcode"></i> ' + escapeHtml(title); chatHeader.innerHTML = '<i class="fa-solid fa-qrcode"></i> ' + escapeHtml(conversation_title);
} else { } else {
chatHeader.innerText = title; chatHeader.innerText = conversation_title;
} }
if (chatPrompt) { if (chatPrompt) {
@ -1860,7 +1863,7 @@ async function hide_sidebar(remove_shown=false) {
if (remove_shown) { if (remove_shown) {
sidebar.classList.remove("shown"); sidebar.classList.remove("shown");
} }
sidebar_button.classList.remove("rotated"); sidebar_buttons.forEach((el)=>el.classList.remove("rotated"))
settings.classList.add("hidden"); settings.classList.add("hidden");
chat.classList.remove("hidden"); chat.classList.remove("hidden");
log_storage.classList.add("hidden"); log_storage.classList.add("hidden");
@ -1878,18 +1881,16 @@ async function hide_settings() {
window.addEventListener('popstate', hide_sidebar, false); window.addEventListener('popstate', hide_sidebar, false);
sidebar_button.addEventListener("click", async () => { sidebar_buttons.forEach((el)=>el.addEventListener("click", async () => {
if (sidebar.classList.contains("shown") || sidebar_button.classList.contains("rotated")) { if (sidebar.classList.contains("shown") || el.classList.contains("rotated")) {
await hide_sidebar(); await hide_sidebar(true);
chat.classList.remove("hidden"); chat.classList.remove("hidden");
sidebar.classList.remove("shown");
sidebar_button.classList.remove("rotated");
} else { } else {
await show_menu(); await show_menu();
chat.classList.add("hidden"); chat.classList.add("hidden");
} }
window.scrollTo(0, 0); window.scrollTo(0, 0);
}); }));
function add_url_to_history(url) { function add_url_to_history(url) {
if (!window?.pywebview) { if (!window?.pywebview) {
@ -1899,7 +1900,7 @@ function add_url_to_history(url) {
async function show_menu() { async function show_menu() {
sidebar.classList.add("shown"); sidebar.classList.add("shown");
sidebar_button.classList.add("rotated"); sidebar_buttons.forEach((el)=>el.classList.add("rotated"))
await hide_settings(); await hide_settings();
add_url_to_history("/chat/menu/"); add_url_to_history("/chat/menu/");
} }
@ -2137,7 +2138,7 @@ window.addEventListener('load', async function() {
if (!appStorage.getItem(`conversation:${window.conversation_id}`) || conversation.id == window.conversation_id) { if (!appStorage.getItem(`conversation:${window.conversation_id}`) || conversation.id == window.conversation_id) {
// Copy conversation from share // Copy conversation from share
if (conversation.id != window.conversation_id) { if (conversation.id != window.conversation_id) {
conversation.id = window.conversation_id; window.conversation_id = conversation.id;
conversation.updated = Date.now(); conversation.updated = Date.now();
window.share_id = null; window.share_id = null;
} }
@ -2210,7 +2211,8 @@ async function on_load() {
count_input(); count_input();
if (/\/settings\//.test(window.location.href)) { if (/\/settings\//.test(window.location.href)) {
open_settings(); open_settings();
} else if (/\/chat\/share/.test(window.location.href)) { } else if (/\/chat\/(share|\?|$)/.test(window.location.href)) {
chatPrompt.value = document.getElementById("systemPrompt")?.value || "";
chatPrompt.value = document.getElementById("systemPrompt")?.value || ""; chatPrompt.value = document.getElementById("systemPrompt")?.value || "";
let chat_url = new URL(window.location.href) let chat_url = new URL(window.location.href)
let chat_params = new URLSearchParams(chat_url.search); let chat_params = new URLSearchParams(chat_url.search);
@ -2218,11 +2220,9 @@ async function on_load() {
userInput.value = chat_params.get("prompt"); userInput.value = chat_params.get("prompt");
userInput.style.height = userInput.scrollHeight + "px"; userInput.style.height = userInput.scrollHeight + "px";
userInput.focus(); userInput.focus();
//await handle_ask(); } else {
new_conversation();
} }
} else if (/\/chat\/[?$]/.test(window.location.href)) {
chatPrompt.value = document.getElementById("systemPrompt")?.value || "";
say_hello();
} else { } else {
//load_conversation(window.conversation_id); //load_conversation(window.conversation_id);
} }
@ -2475,8 +2475,7 @@ async function on_api() {
}); });
document.querySelector(".slide-header")?.addEventListener("click", () => { document.querySelector(".slide-header")?.addEventListener("click", () => {
const checked = slide_systemPrompt_icon.classList.contains("fa-angles-up"); const checked = slide_systemPrompt_icon.classList.contains("fa-angles-up");
document.querySelector(".chat-header").classList[checked ? "add": "remove"]("hidden"); chatPrompt.classList[checked ? "add": "remove"]("hidden");
chatPrompt.classList[checked || hide_systemPrompt.checked ? "add": "remove"]("hidden");
slide_systemPrompt_icon.classList[checked ? "remove": "add"]("fa-angles-up"); slide_systemPrompt_icon.classList[checked ? "remove": "add"]("fa-angles-up");
slide_systemPrompt_icon.classList[checked ? "add": "remove"]("fa-angles-down"); slide_systemPrompt_icon.classList[checked ? "add": "remove"]("fa-angles-down");
}); });

View file

@ -18,6 +18,7 @@ from hashlib import sha256
from ...client.service import convert_to_provider from ...client.service import convert_to_provider
from ...providers.asyncio import to_sync_generator from ...providers.asyncio import to_sync_generator
from ...providers.response import FinishReason
from ...client.helper import filter_markdown from ...client.helper import filter_markdown
from ...tools.files import supports_filename, get_streaming, get_bucket_dir, get_buckets from ...tools.files import supports_filename, get_streaming, get_bucket_dir, get_buckets
from ...tools.run_tools import iter_run_tools from ...tools.run_tools import iter_run_tools
@ -79,16 +80,12 @@ class Backend_Api(Api):
@app.route('/backend-api/v2/models', methods=['GET']) @app.route('/backend-api/v2/models', methods=['GET'])
def jsonify_models(**kwargs): def jsonify_models(**kwargs):
response = get_demo_models() if app.demo else self.get_models(**kwargs) response = get_demo_models() if app.demo else self.get_models(**kwargs)
if isinstance(response, list): return jsonify(response)
return jsonify(response)
return response
@app.route('/backend-api/v2/models/<provider>', methods=['GET']) @app.route('/backend-api/v2/models/<provider>', methods=['GET'])
def jsonify_provider_models(**kwargs): def jsonify_provider_models(**kwargs):
response = self.get_provider_models(**kwargs) response = self.get_provider_models(**kwargs)
if isinstance(response, list): return jsonify(response)
return jsonify(response)
return response
@app.route('/backend-api/v2/providers', methods=['GET']) @app.route('/backend-api/v2/providers', methods=['GET'])
def jsonify_providers(**kwargs): def jsonify_providers(**kwargs):
@ -259,9 +256,11 @@ class Backend_Api(Api):
else: else:
response = iter_run_tools(ChatCompletion.create, **parameters) response = iter_run_tools(ChatCompletion.create, **parameters)
cache_dir.mkdir(parents=True, exist_ok=True) cache_dir.mkdir(parents=True, exist_ok=True)
copy_response = [chunk for chunk in response]
with cache_file.open("w") as f: with cache_file.open("w") as f:
for chunk in response: for chunk in copy_response:
f.write(str(chunk)) f.write(str(chunk))
response = copy_response
else: else:
response = iter_run_tools(ChatCompletion.create, **parameters) response = iter_run_tools(ChatCompletion.create, **parameters)
@ -269,7 +268,9 @@ class Backend_Api(Api):
return Response(filter_markdown("".join([str(chunk) for chunk in response]), do_filter_markdown), mimetype='text/plain') return Response(filter_markdown("".join([str(chunk) for chunk in response]), do_filter_markdown), mimetype='text/plain')
def cast_str(): def cast_str():
for chunk in response: for chunk in response:
if not isinstance(chunk, Exception): if isinstance(chunk, FinishReason):
yield f"[{chunk.reason}]" if chunk.reason != "stop" else ""
elif not isinstance(chunk, Exception):
yield str(chunk) yield str(chunk)
return Response(cast_str(), mimetype='text/plain') return Response(cast_str(), mimetype='text/plain')
except Exception as e: except Exception as e:

View file

@ -424,7 +424,10 @@ class AsyncAuthedProvider(AsyncGeneratorProvider, AuthFileMixin):
def write_cache_file(cls, cache_file: Path, auth_result: AuthResult = None): def write_cache_file(cls, cache_file: Path, auth_result: AuthResult = None):
if auth_result is not None: if auth_result is not None:
cache_file.parent.mkdir(parents=True, exist_ok=True) cache_file.parent.mkdir(parents=True, exist_ok=True)
cache_file.write_text(json.dumps(auth_result.get_dict())) try:
cache_file.write_text(json.dumps(auth_result.get_dict()))
except TypeError:
raise RuntimeError(f"Failed to save: {auth_result.get_dict()}")
elif cache_file.exists(): elif cache_file.exists():
cache_file.unlink() cache_file.unlink()
@ -489,5 +492,4 @@ class AsyncAuthedProvider(AsyncGeneratorProvider, AuthFileMixin):
cache_file = None cache_file = None
yield chunk yield chunk
finally: finally:
if cache_file is not None: cls.write_cache_file(cache_file, auth_result)
cls.write_cache_file(cache_file, auth_result)