Increase max token in HuggingfaceAPI

Restore browser instance on start up errors in nodriver
Restored instances can be used as usual or to stop the browser
Add demo modus to web ui for HuggingSpace
Add rate limit support to web ui. Simply install flask_limiter
Add home for demo with Access Token input and validation
Add stripped model list for demo
Add ignores for encoding error in web_search and file upload
This commit is contained in:
hlohaus 2025-01-26 02:48:43 +01:00
parent e4c4e7b5ba
commit ab04d1d894
17 changed files with 444 additions and 118 deletions

View file

@ -6,12 +6,19 @@ import os
import logging
import asyncio
import shutil
import random
from flask import Flask, Response, request, jsonify
from typing import Generator
from pathlib import Path
from urllib.parse import quote_plus
from hashlib import sha256
from werkzeug.utils import secure_filename
try:
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
has_flask_limiter = True
except ImportError:
has_flask_limiter = False
from ...image import is_allowed_extension, to_image
from ...client.service import convert_to_provider
@ -22,6 +29,7 @@ from ...tools.run_tools import iter_run_tools
from ...errors import ProviderNotFoundError
from ...cookies import get_cookies_dir
from ... import ChatCompletion
from ... import models
from .api import Api
logger = logging.getLogger(__name__)
@ -53,45 +61,98 @@ class Backend_Api(Api):
"""
self.app: Flask = app
if has_flask_limiter:
limiter = Limiter(
get_remote_address,
app=app,
default_limits=["200 per day", "50 per hour"],
storage_uri="memory://",
)
else:
class Dummy():
def limit(self, value):
pass
limiter = Dummy()
@app.route('/backend-api/v2/models', methods=['GET'])
def jsonify_models(**kwargs):
response = self.get_models(**kwargs)
response = get_demo_models() if app.demo else self.get_models(**kwargs)
if isinstance(response, list):
return jsonify(response)
return response
@app.route('/backend-api/v2/models/<provider>', methods=['GET'])
def jsonify_provider_models(**kwargs):
response = self.get_provider_models(**kwargs)
if isinstance(response, list):
return jsonify(response)
return response
@app.route('/backend-api/v2/providers', methods=['GET'])
def jsonify_providers(**kwargs):
response = self.get_providers(**kwargs)
if isinstance(response, list):
return jsonify(response)
return response
def get_demo_models():
return [{
"name": model.name,
"image": isinstance(model, models.ImageModel),
"vision": isinstance(model, models.VisionModel),
"providers": [
getattr(provider, "parent", provider.__name__)
for provider in providers
],
"demo": True
}
for model, providers in models.demo_models.values()]
@app.route('/backend-api/v2/conversation', methods=['POST'])
@limiter.limit("4 per minute")
def handle_conversation():
"""
Handles conversation requests and streams responses back.
Returns:
Response: A Flask response object for streaming.
"""
kwargs = {}
if "files[]" in request.files:
images = []
for file in request.files.getlist('files[]'):
if file.filename != '' and is_allowed_extension(file.filename):
images.append((to_image(file.stream, file.filename.endswith('.svg')), file.filename))
kwargs['images'] = images
if "json" in request.form:
json_data = json.loads(request.form['json'])
else:
json_data = request.json
if app.demo:
model = json_data.get("model")
if model != "default" and model in models.demo_models:
json_data["provider"] = random.choice(models.demo_models[model][1])
else:
json_data["model"] = models.demo_models["default"][0].name
json_data["provider"] = random.choice(models.demo_models["default"][1])
kwargs = self._prepare_conversation_kwargs(json_data, kwargs)
return self.app.response_class(
self._create_response_stream(
kwargs,
json_data.get("conversation_id"),
json_data.get("provider"),
json_data.get("download_images", True),
),
mimetype='text/event-stream'
)
self.routes = {
'/backend-api/v2/models': {
'function': jsonify_models,
'methods': ['GET']
},
'/backend-api/v2/models/<provider>': {
'function': jsonify_provider_models,
'methods': ['GET']
},
'/backend-api/v2/providers': {
'function': jsonify_providers,
'methods': ['GET']
},
'/backend-api/v2/version': {
'function': self.get_version,
'methods': ['GET']
},
'/backend-api/v2/conversation': {
'function': self.handle_conversation,
'methods': ['POST']
},
'/backend-api/v2/synthesize/<provider>': {
'function': self.handle_synthesize,
'methods': ['GET']
@ -250,38 +311,6 @@ class Backend_Api(Api):
return "File saved", 200
return 'Not supported file', 400
def handle_conversation(self):
"""
Handles conversation requests and streams responses back.
Returns:
Response: A Flask response object for streaming.
"""
kwargs = {}
if "files[]" in request.files:
images = []
for file in request.files.getlist('files[]'):
if file.filename != '' and is_allowed_extension(file.filename):
images.append((to_image(file.stream, file.filename.endswith('.svg')), file.filename))
kwargs['images'] = images
if "json" in request.form:
json_data = json.loads(request.form['json'])
else:
json_data = request.json
kwargs = self._prepare_conversation_kwargs(json_data, kwargs)
return self.app.response_class(
self._create_response_stream(
kwargs,
json_data.get("conversation_id"),
json_data.get("provider"),
json_data.get("download_images", True),
),
mimetype='text/event-stream'
)
def handle_synthesize(self, provider: str):
try:
provider_handler = convert_to_provider(provider)