mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-05 18:20:35 -08:00
Improve logging
This commit is contained in:
parent
a31cb50d6a
commit
6106ca95c9
5 changed files with 56 additions and 16 deletions
|
|
@ -36,4 +36,4 @@ RUN git clone https://github.com/hlohaus/deepseek4free.git \
|
|||
&& cd deepseek4free && git checkout 21Feb \
|
||||
&& pip install --no-cache-dir . && cd .. && rm -rf deepseek4free
|
||||
|
||||
CMD git pull origin main && docker/update.sh & docker/start.sh
|
||||
CMD python -m etc.tool.update && docker/update.sh & docker/start.sh
|
||||
|
|
@ -15,7 +15,7 @@ echo "UPDATE: d$c"
|
|||
git pull origin main
|
||||
sleep 120
|
||||
echo "UPDATE: #$c"
|
||||
python -m etc.tool.update
|
||||
git pull origin main
|
||||
sleep 120
|
||||
done
|
||||
echo "STOPPED."
|
||||
|
|
|
|||
|
|
@ -188,6 +188,13 @@ class AppConfig:
|
|||
for key, value in data.items():
|
||||
setattr(cls, key, value)
|
||||
|
||||
def remove_authorization(request: Request) -> Request:
|
||||
new_header = request.headers.mutablecopy()
|
||||
del new_header["authorization"]
|
||||
request.scope["headers"] = new_header.raw
|
||||
delattr(request, "_headers")
|
||||
return request
|
||||
|
||||
class Api:
|
||||
def __init__(self, app: FastAPI) -> None:
|
||||
self.app = app
|
||||
|
|
@ -220,14 +227,16 @@ class Api:
|
|||
session_key = get_session_key()
|
||||
@self.app.middleware("http")
|
||||
async def authorization(request: Request, call_next):
|
||||
user = None
|
||||
if AppConfig.g4f_api_key is not None or AppConfig.demo:
|
||||
is_authorization_header = False
|
||||
try:
|
||||
user_g4f_api_key = await self.get_g4f_api_key(request)
|
||||
except HTTPException:
|
||||
user_g4f_api_key = await self.security(request)
|
||||
if hasattr(user_g4f_api_key, "credentials"):
|
||||
user_g4f_api_key = user_g4f_api_key.credentials
|
||||
user = None
|
||||
is_authorization_header = True
|
||||
if AppConfig.g4f_api_key is None or not secrets.compare_digest(AppConfig.g4f_api_key, user_g4f_api_key):
|
||||
if has_crypto and user_g4f_api_key:
|
||||
try:
|
||||
|
|
@ -260,10 +269,12 @@ class Api:
|
|||
user = await self.get_username(request)
|
||||
except HTTPException as e:
|
||||
return ErrorResponse.from_message(e.detail, e.status_code, e.headers)
|
||||
if is_authorization_header:
|
||||
request = remove_authorization(request)
|
||||
response = await call_next(request)
|
||||
response.headers["x-user"] = user
|
||||
if user is not None:
|
||||
response.headers["x_user"] = user
|
||||
return response
|
||||
return await call_next(request)
|
||||
|
||||
def register_validation_exception_handler(self):
|
||||
@self.app.exception_handler(RequestValidationError)
|
||||
|
|
|
|||
|
|
@ -215,6 +215,13 @@ class Backend_Api(Api):
|
|||
f.write(f"{json.dumps(request.json)}\n")
|
||||
return {}
|
||||
|
||||
@app.route('/backend-api/v2/usage/<date>', methods=['GET'])
|
||||
def get_usage(date: str):
|
||||
cache_dir = Path(get_cookies_dir()) / ".usage"
|
||||
cache_file = cache_dir / f"{date}.jsonl"
|
||||
print(f"Loading usage data from {cache_file}")
|
||||
return cache_file.read_text() if cache_file.exists() else (jsonify({"error": {"message": "No usage data found for this date"}}), 404)
|
||||
|
||||
@app.route('/backend-api/v2/log', methods=['POST'])
|
||||
def add_log():
|
||||
cache_dir = Path(get_cookies_dir()) / ".logging"
|
||||
|
|
|
|||
|
|
@ -5,13 +5,14 @@ import re
|
|||
import json
|
||||
import asyncio
|
||||
import time
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, Callable, AsyncIterator, Iterator, Dict, Any, Tuple, List, Union
|
||||
from typing import Optional, AsyncIterator, Iterator, Dict, Any, Tuple, List, Union
|
||||
|
||||
from ..typing import Messages
|
||||
from ..providers.helper import filter_none
|
||||
from ..providers.asyncio import to_async_iterator
|
||||
from ..providers.response import Reasoning, FinishReason, Sources
|
||||
from ..providers.response import Reasoning, FinishReason, Sources, Usage, ProviderInfo
|
||||
from ..providers.types import ProviderType
|
||||
from ..cookies import get_cookies_dir
|
||||
from .web_search import do_search, get_search_message
|
||||
|
|
@ -141,7 +142,7 @@ class AuthManager:
|
|||
env_var = f"{cls.aliases[provider_name].upper()}_API_KEY"
|
||||
api_key = os.environ.get(env_var)
|
||||
if api_key:
|
||||
debug.log(f"Loading API key from environment variable {env_var}")
|
||||
print(f"Loading API key for {provider_name} from environment variable {env_var}")
|
||||
return api_key
|
||||
return None
|
||||
|
||||
|
|
@ -236,6 +237,7 @@ async def async_iter_run_tools(
|
|||
messages, sources = await perform_web_search(messages, web_search)
|
||||
|
||||
# Get API key
|
||||
if not kwargs.get("api_key"):
|
||||
api_key = AuthManager.load_api_key(provider)
|
||||
if api_key:
|
||||
kwargs["api_key"] = api_key
|
||||
|
|
@ -248,7 +250,17 @@ async def async_iter_run_tools(
|
|||
# Generate response
|
||||
response = to_async_iterator(provider.async_create_function(model=model, messages=messages, **kwargs))
|
||||
|
||||
model_info = model
|
||||
async for chunk in response:
|
||||
if isinstance(chunk, ProviderInfo):
|
||||
model_info = getattr(chunk, 'model', model_info)
|
||||
elif isinstance(chunk, Usage):
|
||||
usage = {"user": kwargs.get("user"), "model": model_info, "provider": provider.get_parent(), **chunk.get_dict()}
|
||||
usage_dir = Path(get_cookies_dir()) / ".usage"
|
||||
usage_file = usage_dir / f"{datetime.date.today()}.jsonl"
|
||||
usage_dir.mkdir(parents=True, exist_ok=True)
|
||||
with usage_file.open("a" if usage_file.exists() else "w") as f:
|
||||
f.write(f"{json.dumps(usage)}\n")
|
||||
yield chunk
|
||||
|
||||
# Yield sources if available
|
||||
|
|
@ -277,7 +289,7 @@ def iter_run_tools(
|
|||
debug.error(f"Couldn't do web search:", e)
|
||||
|
||||
# Get API key if needed
|
||||
if provider is not None:
|
||||
if not kwargs.get("api_key"):
|
||||
api_key = AuthManager.load_api_key(provider)
|
||||
if api_key:
|
||||
kwargs["api_key"] = api_key
|
||||
|
|
@ -321,7 +333,7 @@ def iter_run_tools(
|
|||
# Process response chunks
|
||||
thinking_start_time = 0
|
||||
processor = ThinkingProcessor()
|
||||
|
||||
model_info = model
|
||||
for chunk in provider.create_function(model=model, messages=messages, provider=provider, **kwargs):
|
||||
if isinstance(chunk, FinishReason):
|
||||
if sources is not None:
|
||||
|
|
@ -331,6 +343,16 @@ def iter_run_tools(
|
|||
continue
|
||||
elif isinstance(chunk, Sources):
|
||||
sources = None
|
||||
elif isinstance(chunk, ProviderInfo):
|
||||
model_info = getattr(chunk, 'model', model_info)
|
||||
elif isinstance(chunk, Usage):
|
||||
usage = {"user": kwargs.get("user"), "model": model_info, "provider": provider.get_parent(), **chunk.get_dict()}
|
||||
usage_dir = Path(get_cookies_dir()) / ".usage"
|
||||
usage_file = usage_dir / f"{datetime.date.today()}.jsonl"
|
||||
usage_dir.mkdir(parents=True, exist_ok=True)
|
||||
with usage_file.open("a" if usage_file.exists() else "w") as f:
|
||||
f.write(f"{json.dumps(usage)}\n")
|
||||
|
||||
if not isinstance(chunk, str):
|
||||
yield chunk
|
||||
continue
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue