mirror of
https://github.com/xtekky/gpt4free.git
synced 2025-12-06 02:30:41 -08:00
Update copy_image
This commit is contained in:
parent
0d8234bc77
commit
a354a2e8f8
4 changed files with 90 additions and 57 deletions
|
|
@ -1,5 +1,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
import asyncio
|
||||
import random
|
||||
from aiohttp import ClientSession, ClientTimeout
|
||||
|
|
@ -13,7 +14,7 @@ except:
|
|||
pass
|
||||
|
||||
from ...typing import Messages, AsyncResult
|
||||
from ...providers.response import VideoResponse, Reasoning
|
||||
from ...providers.response import VideoResponse, Reasoning, ContinueResponse
|
||||
from ...requests import get_nodriver
|
||||
from ...errors import MissingRequirementsError
|
||||
from ..base_provider import AsyncGeneratorProvider
|
||||
|
|
@ -49,24 +50,34 @@ class Video(AsyncGeneratorProvider):
|
|||
prompt: str = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
started = time.time()
|
||||
prompt = format_media_prompt(messages, prompt)
|
||||
if not prompt:
|
||||
raise ValueError("Prompt cannot be empty.")
|
||||
async with ClientSession() as session:
|
||||
yield Reasoning(label="Lookup")
|
||||
has_video = False
|
||||
for skip in range(0, 9):
|
||||
async with session.get(cls.search_url + quote_plus(prompt) + f"?skip={skip}", timeout=ClientTimeout(total=10)) as response:
|
||||
if response.ok:
|
||||
yield Reasoning(label=f"Found {skip+1}", status="")
|
||||
yield VideoResponse(str(response.url), prompt)
|
||||
has_video = True
|
||||
else:
|
||||
break
|
||||
if has_video:
|
||||
yield Reasoning(label=f"Finished", status="")
|
||||
return
|
||||
try:
|
||||
yield Reasoning(label="Open browser")
|
||||
browser, stop_browser = await get_nodriver(proxy=proxy, user_data_dir="gemini")
|
||||
except Exception as e:
|
||||
debug.error(f"Error getting nodriver:", e)
|
||||
async with ClientSession() as session:
|
||||
yield Reasoning(label="Lookup")
|
||||
async with session.get(cls.search_url + quote_plus(prompt) + f"&min={prompt.count(' ') + 1}", timeout=ClientTimeout(total=10)) as response:
|
||||
if response.status == 200:
|
||||
yield Reasoning(label="Found", status="")
|
||||
yield VideoResponse(str(response.url), prompt)
|
||||
return
|
||||
yield Reasoning(label="Generating")
|
||||
async with session.post(cls.api_url + quote(prompt)) as response:
|
||||
if not response.ok:
|
||||
debug.error(f"Failed to lookup Video: {response.status}")
|
||||
debug.error(f"Failed to generate Video: {response.status}")
|
||||
else:
|
||||
yield Reasoning(label="Finished", status="")
|
||||
if response.headers.get("content-type", "text/plain").startswith("text/plain"):
|
||||
|
|
@ -92,6 +103,54 @@ class Video(AsyncGeneratorProvider):
|
|||
page = cls.page
|
||||
await asyncio.sleep(3)
|
||||
await page.select("textarea", 240)
|
||||
try:
|
||||
button = await page.find("Image")
|
||||
if button:
|
||||
await button.click()
|
||||
else:
|
||||
debug.error("No 'Image' button found.")
|
||||
button = await page.find("Video")
|
||||
if button:
|
||||
await button.click()
|
||||
yield Reasoning(label=f"Clicked 'Video' button")
|
||||
else:
|
||||
debug.error("No 'Video' button found.")
|
||||
except Exception as e:
|
||||
debug.error(f"Error clicking button:", e)
|
||||
debug.log(f"Using prompt: {prompt}")
|
||||
textarea = await page.select("textarea", 180)
|
||||
await textarea.send_keys(prompt)
|
||||
yield Reasoning(label=f"Sending prompt", token=prompt)
|
||||
# try:
|
||||
# button = await page.select('button[type="submit"]', 5)
|
||||
# if button:
|
||||
# await button.click()
|
||||
# finally:
|
||||
try:
|
||||
button = await page.find("Create")
|
||||
if button:
|
||||
await button.click()
|
||||
yield Reasoning(label=f"Clicked 'Create' button")
|
||||
except Exception as e:
|
||||
debug.error(f"Error clicking 'Create' button:", e)
|
||||
try:
|
||||
button = await page.find("Activity")
|
||||
if button:
|
||||
await button.click()
|
||||
yield Reasoning(label=f"Clicked 'Activity' button")
|
||||
except Exception as e:
|
||||
debug.error(f"Error clicking 'Activity' button:", e)
|
||||
for idx in range(60):
|
||||
await asyncio.sleep(1)
|
||||
try:
|
||||
button = await page.find("Queued")
|
||||
if button:
|
||||
await button.click()
|
||||
yield Reasoning(label=f"Clicked 'Queued' button")
|
||||
break
|
||||
except Exception as e:
|
||||
debug.error(f"Error clicking 'Queued' button:", e)
|
||||
yield Reasoning(label=f"Waiting for Video URL...")
|
||||
def on_request(event: nodriver.cdp.network.RequestWillBeSent, page=None):
|
||||
if "mp4" in event.request.url:
|
||||
RequestConfig.headers = {}
|
||||
|
|
@ -105,52 +164,9 @@ class Video(AsyncGeneratorProvider):
|
|||
RequestConfig.urls.append(event.request.url)
|
||||
await page.send(nodriver.cdp.network.enable())
|
||||
page.add_handler(nodriver.cdp.network.RequestWillBeSent, on_request)
|
||||
|
||||
try:
|
||||
button = await page.find("Image")
|
||||
if button:
|
||||
await button.click()
|
||||
else:
|
||||
debug.error("No 'Image' button found.")
|
||||
button = await page.find("Video")
|
||||
if button:
|
||||
await button.click()
|
||||
else:
|
||||
debug.error("No 'Video' button found.")
|
||||
except Exception as e:
|
||||
debug.error(f"Error clicking button:", e)
|
||||
debug.log(f"Using prompt: {prompt}")
|
||||
textarea = await page.select("textarea", 180)
|
||||
await textarea.send_keys(prompt)
|
||||
# try:
|
||||
# button = await page.select('button[type="submit"]', 5)
|
||||
# if button:
|
||||
# await button.click()
|
||||
# finally:
|
||||
try:
|
||||
button = await page.find("Create")
|
||||
if button:
|
||||
await button.click()
|
||||
except Exception as e:
|
||||
debug.error(f"Error clicking 'Create' button:", e)
|
||||
try:
|
||||
button = await page.find("Activity")
|
||||
if button:
|
||||
await button.click()
|
||||
except Exception as e:
|
||||
debug.error(f"Error clicking 'Activity' button:", e)
|
||||
for idx in range(60):
|
||||
await asyncio.sleep(1)
|
||||
try:
|
||||
button = await page.find("Queued")
|
||||
if button:
|
||||
await button.click()
|
||||
debug.log(f"Clicked 'Queued' button")
|
||||
break
|
||||
except:
|
||||
debug.error(f"Error clicking 'Queued' button:", e)
|
||||
debug.log(f"Waiting for Video URL...")
|
||||
for idx in range(600):
|
||||
if time.time() - started > 30:
|
||||
yield ContinueResponse("Timeout waiting for Video URL")
|
||||
await asyncio.sleep(1)
|
||||
if RequestConfig.urls:
|
||||
await asyncio.sleep(2)
|
||||
|
|
@ -159,6 +175,7 @@ class Video(AsyncGeneratorProvider):
|
|||
yield VideoResponse(RequestConfig.urls, prompt, {
|
||||
"headers": {"authorization": RequestConfig.headers.get("authorization")} if RequestConfig.headers.get("authorization") else {}
|
||||
})
|
||||
yield Reasoning(label=f"Finished", status="")
|
||||
break
|
||||
if idx == 599:
|
||||
raise RuntimeError("Failed to get Video URL")
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import time
|
|||
import asyncio
|
||||
import hashlib
|
||||
import base64
|
||||
from datetime import datetime
|
||||
from typing import AsyncIterator
|
||||
from urllib.parse import quote, unquote
|
||||
from aiohttp import ClientSession, ClientError
|
||||
|
|
@ -135,14 +136,18 @@ async def copy_media(
|
|||
if target_path is None:
|
||||
# Build safe filename with full Unicode support
|
||||
media_extension = get_media_extension(image)
|
||||
filename = get_filename(tags, alt, media_extension, image)
|
||||
path = urlparse(image).path
|
||||
if path.startswith("/media/"):
|
||||
filename = secure_filename(path[len("/media/"):])
|
||||
else:
|
||||
filename = get_filename(tags, alt, media_extension, image)
|
||||
target_path = os.path.join(get_media_dir(), filename)
|
||||
try:
|
||||
# Handle different image types
|
||||
if image.startswith("data:"):
|
||||
with open(target_path, "wb") as f:
|
||||
f.write(extract_data_uri(image))
|
||||
else:
|
||||
elif not os.path.exists(target_path) or os.lstat(target_path).st_size <= 0:
|
||||
# Apply BackendApi settings if needed
|
||||
if BackendApi.working and image.startswith(BackendApi.url):
|
||||
request_headers = BackendApi.headers if headers is None else headers
|
||||
|
|
@ -151,9 +156,14 @@ async def copy_media(
|
|||
request_headers = headers
|
||||
request_ssl = ssl
|
||||
# Use aiohttp to fetch the image
|
||||
debug.log(f"Copying image: {image} to {target_path}")
|
||||
async with session.get(image, ssl=request_ssl, headers=request_headers) as response:
|
||||
response.raise_for_status()
|
||||
date = response.headers.get("date")
|
||||
if date and target_path != target:
|
||||
timestamp = datetime.strptime(date, '%a, %d %b %Y %H:%M:%S %Z').timestamp()
|
||||
filename = str(int(timestamp)) + "_" + filename.split("_", maxsplit=1)[-1]
|
||||
target_path = os.path.join(get_media_dir(), filename)
|
||||
debug.log(f"Copying image: {image} to {target_path}")
|
||||
media_type = response.headers.get("content-type", "application/octet-stream")
|
||||
if media_type not in ("application/octet-stream", "binary/octet-stream"):
|
||||
if media_type not in MEDIA_TYPE_MAP:
|
||||
|
|
|
|||
|
|
@ -192,6 +192,11 @@ class DebugResponse(HiddenResponse):
|
|||
"""Initialize with a log message."""
|
||||
self.log = log
|
||||
|
||||
class ContinueResponse(HiddenResponse):
|
||||
def __init__(self, log: str) -> None:
|
||||
"""Initialize with a log message."""
|
||||
self.log = log
|
||||
|
||||
class Reasoning(ResponseType):
|
||||
def __init__(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ def set_browser_executable_path(browser_executable_path: str):
|
|||
async def get_nodriver(
|
||||
proxy: str = None,
|
||||
user_data_dir = "nodriver",
|
||||
timeout: int = 120,
|
||||
timeout: int = 300,
|
||||
browser_executable_path: str = None,
|
||||
**kwargs
|
||||
) -> tuple[Browser, callable]:
|
||||
|
|
@ -172,6 +172,7 @@ async def get_nodriver(
|
|||
time_open = time.time() - opend_at
|
||||
if timeout * 2 > time_open:
|
||||
debug.log(f"Nodriver: Browser is already in use since {time_open} secs.")
|
||||
debug.log("Lock file:", lock_file)
|
||||
for _ in range(timeout):
|
||||
if lock_file.exists():
|
||||
await asyncio.sleep(1)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue