Track live provider instances and update count on success or failure in various providers

This commit is contained in:
hlohaus 2025-09-06 11:28:09 +02:00
parent 6a6a25fba8
commit 1cbf9dfb6a
5 changed files with 55 additions and 37 deletions

View file

@ -49,6 +49,8 @@ class OpenaiTemplate(AsyncGeneratorProvider, ProviderModelMixin, RaiseErrorMixin
raise_for_status(response) raise_for_status(response)
data = response.json() data = response.json()
data = data.get("data") if isinstance(data, dict) else data data = data.get("data") if isinstance(data, dict) else data
if data:
cls.live += 1
cls.image_models = [model.get("name") if cls.use_model_names else model.get("id", model.get("name")) for model in data if model.get("image") or model.get("type") == "image" or model.get("supports_images")] cls.image_models = [model.get("name") if cls.use_model_names else model.get("id", model.get("name")) for model in data if model.get("image") or model.get("type") == "image" or model.get("supports_images")]
cls.vision_models = cls.vision_models.copy() cls.vision_models = cls.vision_models.copy()
cls.vision_models += [model.get("name") if cls.use_model_names else model.get("id", model.get("name")) for model in data if model.get("vision")] cls.vision_models += [model.get("name") if cls.use_model_names else model.get("id", model.get("name")) for model in data if model.get("vision")]

View file

@ -108,6 +108,7 @@ class Api:
"active_by_default": False if provider.active_by_default is None else provider.active_by_default, "active_by_default": False if provider.active_by_default is None else provider.active_by_default,
"auth": provider.needs_auth, "auth": provider.needs_auth,
"login_url": getattr(provider, "login_url", None), "login_url": getattr(provider, "login_url", None),
"live": provider.live
} for provider in Provider.__providers__ if provider.working and safe_get_models(provider)] } for provider in Provider.__providers__ if provider.working and safe_get_models(provider)]
def get_all_models(self) -> dict[str, list]: def get_all_models(self) -> dict[str, list]:

View file

@ -99,9 +99,11 @@ class RotatedProvider(BaseRetryProvider):
if is_content(chunk): if is_content(chunk):
started = True started = True
if started: if started:
provider.live += 1
# Success, so we return and do not rotate # Success, so we return and do not rotate
return return
except Exception as e: except Exception as e:
provider.live -= 1
exceptions[provider.__name__] = e exceptions[provider.__name__] = e
debug.error(f"{provider.__name__} failed: {e}") debug.error(f"{provider.__name__} failed: {e}")
@ -161,8 +163,10 @@ class RotatedProvider(BaseRetryProvider):
if is_content(chunk): if is_content(chunk):
started = True started = True
if started: if started:
provider.live += 1
return # Success return # Success
except Exception as e: except Exception as e:
provider.live -= 1
exceptions[provider.__name__] = e exceptions[provider.__name__] = e
debug.error(f"{provider.__name__} failed: {e}") debug.error(f"{provider.__name__} failed: {e}")

View file

@ -28,6 +28,7 @@ class BaseProvider(ABC):
params: str params: str
create_function: callable create_function: callable
async_create_function: callable async_create_function: callable
live: int = 0
@classmethod @classmethod
def get_dict(cls) -> Dict[str, str]: def get_dict(cls) -> Dict[str, str]:

View file

@ -253,18 +253,23 @@ async def async_iter_run_tools(
# Generate response # Generate response
response = to_async_iterator(provider.async_create_function(model=model, messages=messages, **kwargs)) response = to_async_iterator(provider.async_create_function(model=model, messages=messages, **kwargs))
model_info = model try:
async for chunk in response: model_info = model
if isinstance(chunk, ProviderInfo): async for chunk in response:
model_info = getattr(chunk, 'model', model_info) if isinstance(chunk, ProviderInfo):
elif isinstance(chunk, Usage): model_info = getattr(chunk, 'model', model_info)
usage = {"user": kwargs.get("user"), "model": model_info, "provider": provider.get_parent(), **chunk.get_dict()} elif isinstance(chunk, Usage):
usage_dir = Path(get_cookies_dir()) / ".usage" usage = {"user": kwargs.get("user"), "model": model_info, "provider": provider.get_parent(), **chunk.get_dict()}
usage_file = usage_dir / f"{datetime.date.today()}.jsonl" usage_dir = Path(get_cookies_dir()) / ".usage"
usage_dir.mkdir(parents=True, exist_ok=True) usage_file = usage_dir / f"{datetime.date.today()}.jsonl"
with usage_file.open("a" if usage_file.exists() else "w") as f: usage_dir.mkdir(parents=True, exist_ok=True)
f.write(f"{json.dumps(usage)}\n") with usage_file.open("a" if usage_file.exists() else "w") as f:
yield chunk f.write(f"{json.dumps(usage)}\n")
yield chunk
provider.live += 1
except:
provider.live -= 1
raise
# Yield sources if available # Yield sources if available
if sources: if sources:
@ -338,33 +343,38 @@ def iter_run_tools(
thinking_start_time = 0 thinking_start_time = 0
processor = ThinkingProcessor() processor = ThinkingProcessor()
model_info = model model_info = model
for chunk in provider.create_function(model=model, messages=messages, provider=provider, **kwargs): try:
if isinstance(chunk, FinishReason): for chunk in provider.create_function(model=model, messages=messages, provider=provider, **kwargs):
if sources is not None: if isinstance(chunk, FinishReason):
yield sources if sources is not None:
yield sources
sources = None
yield chunk
continue
elif isinstance(chunk, Sources):
sources = None sources = None
yield chunk elif isinstance(chunk, ProviderInfo):
continue model_info = getattr(chunk, 'model', model_info)
elif isinstance(chunk, Sources): elif isinstance(chunk, Usage):
sources = None usage = {"user": kwargs.get("user"), "model": model_info, "provider": provider.get_parent(), **chunk.get_dict()}
elif isinstance(chunk, ProviderInfo): usage_dir = Path(get_cookies_dir()) / ".usage"
model_info = getattr(chunk, 'model', model_info) usage_file = usage_dir / f"{datetime.date.today()}.jsonl"
elif isinstance(chunk, Usage): usage_dir.mkdir(parents=True, exist_ok=True)
usage = {"user": kwargs.get("user"), "model": model_info, "provider": provider.get_parent(), **chunk.get_dict()} with usage_file.open("a" if usage_file.exists() else "w") as f:
usage_dir = Path(get_cookies_dir()) / ".usage" f.write(f"{json.dumps(usage)}\n")
usage_file = usage_dir / f"{datetime.date.today()}.jsonl" if not isinstance(chunk, str):
usage_dir.mkdir(parents=True, exist_ok=True) yield chunk
with usage_file.open("a" if usage_file.exists() else "w") as f: continue
f.write(f"{json.dumps(usage)}\n")
if not isinstance(chunk, str): thinking_start_time, results = processor.process_thinking_chunk(chunk, thinking_start_time)
yield chunk
continue
thinking_start_time, results = processor.process_thinking_chunk(chunk, thinking_start_time) for result in results:
yield result
for result in results: provider.live += 1
yield result except:
provider.live -= 1
raise
if sources is not None: if sources is not None:
yield sources yield sources