mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2026-03-22 06:10:51 -07:00
Merge remote-tracking branch 'a1111/dev' into hf_endpoint
This commit is contained in:
commit
23e0e75e1b
3 changed files with 21 additions and 4 deletions
|
|
@ -378,7 +378,7 @@ def prepare_environment():
|
|||
openclip_package = os.environ.get('OPENCLIP_PACKAGE', "https://github.com/mlfoundations/open_clip/archive/bb6e834e9c70d9c27d0dc3ecedeebeaeb1ffad6b.zip")
|
||||
|
||||
assets_repo = os.environ.get('ASSETS_REPO', "https://github.com/AUTOMATIC1111/stable-diffusion-webui-assets.git")
|
||||
stable_diffusion_repo = os.environ.get('STABLE_DIFFUSION_REPO', "https://github.com/Stability-AI/stablediffusion.git")
|
||||
stable_diffusion_repo = os.environ.get('STABLE_DIFFUSION_REPO', "https://github.com/w-e-w/stablediffusion.git")
|
||||
stable_diffusion_xl_repo = os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://github.com/Stability-AI/generative-models.git")
|
||||
k_diffusion_repo = os.environ.get('K_DIFFUSION_REPO', 'https://github.com/crowsonkb/k-diffusion.git')
|
||||
blip_repo = os.environ.get('BLIP_REPO', 'https://github.com/salesforce/BLIP.git')
|
||||
|
|
@ -422,8 +422,24 @@ def prepare_environment():
|
|||
)
|
||||
startup_timer.record("torch GPU test")
|
||||
|
||||
# Ensure build dependencies are installed before any package that might need them
|
||||
def ensure_build_dependencies():
|
||||
"""Ensure essential build tools are available"""
|
||||
if not is_installed("wheel"):
|
||||
run_pip("install wheel", "wheel")
|
||||
# Check setuptools version compatibility
|
||||
try:
|
||||
setuptools_version = run(f'"{python}" -c "import setuptools; print(setuptools.__version__)"', None, None).strip()
|
||||
if setuptools_version >= "70":
|
||||
run_pip("install setuptools==69.5.1", "setuptools")
|
||||
except Exception:
|
||||
# If setuptools check fails, install compatible version
|
||||
run_pip("install setuptools==69.5.1", "setuptools")
|
||||
# Install build dependencies early
|
||||
ensure_build_dependencies()
|
||||
|
||||
if not is_installed("clip"):
|
||||
run_pip(f"install {clip_package}", "clip")
|
||||
run_pip(f"install --no-build-isolation {clip_package}", "clip")
|
||||
startup_timer.record("install clip")
|
||||
|
||||
if not is_installed("open_clip"):
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ class SdOptimizationXformers(SdOptimization):
|
|||
priority = 100
|
||||
|
||||
def is_available(self):
|
||||
return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
|
||||
return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (12, 0))
|
||||
|
||||
def apply(self):
|
||||
ldm.modules.attention.CrossAttention.forward = xformers_attention_forward
|
||||
|
|
|
|||
|
|
@ -41,7 +41,8 @@ def patch():
|
|||
|
||||
modified_command = ["uv", "pip", *cmd]
|
||||
|
||||
result = subprocess.__original_run([*modified_command, *_args], **_kwargs)
|
||||
cmd_str = shlex.join([*modified_command, *_args])
|
||||
result = subprocess.__original_run(cmd_str, **_kwargs)
|
||||
if result.returncode != 0:
|
||||
return subprocess.__original_run(*args, **kwargs)
|
||||
return result
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue