diff --git a/modules/launch_utils.py b/modules/launch_utils.py index 394804f67..804b80205 100644 --- a/modules/launch_utils.py +++ b/modules/launch_utils.py @@ -378,7 +378,7 @@ def prepare_environment(): openclip_package = os.environ.get('OPENCLIP_PACKAGE', "https://github.com/mlfoundations/open_clip/archive/bb6e834e9c70d9c27d0dc3ecedeebeaeb1ffad6b.zip") assets_repo = os.environ.get('ASSETS_REPO', "https://github.com/AUTOMATIC1111/stable-diffusion-webui-assets.git") - stable_diffusion_repo = os.environ.get('STABLE_DIFFUSION_REPO', "https://github.com/Stability-AI/stablediffusion.git") + stable_diffusion_repo = os.environ.get('STABLE_DIFFUSION_REPO', "https://github.com/w-e-w/stablediffusion.git") stable_diffusion_xl_repo = os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://github.com/Stability-AI/generative-models.git") k_diffusion_repo = os.environ.get('K_DIFFUSION_REPO', 'https://github.com/crowsonkb/k-diffusion.git') blip_repo = os.environ.get('BLIP_REPO', 'https://github.com/salesforce/BLIP.git') @@ -422,8 +422,24 @@ def prepare_environment(): ) startup_timer.record("torch GPU test") + # Ensure build dependencies are installed before any package that might need them + def ensure_build_dependencies(): + """Ensure essential build tools are available""" + if not is_installed("wheel"): + run_pip("install wheel", "wheel") + # Check setuptools version compatibility + try: + setuptools_version = run(f'"{python}" -c "import setuptools; print(setuptools.__version__)"', None, None).strip() + if setuptools_version >= "70": + run_pip("install setuptools==69.5.1", "setuptools") + except Exception: + # If setuptools check fails, install compatible version + run_pip("install setuptools==69.5.1", "setuptools") + # Install build dependencies early + ensure_build_dependencies() + if not is_installed("clip"): - run_pip(f"install {clip_package}", "clip") + run_pip(f"install --no-build-isolation {clip_package}", "clip") startup_timer.record("install clip") if not is_installed("open_clip"): diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 0269f1f5b..d9af5a0d4 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -54,7 +54,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100 def is_available(self): - return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)) + return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (12, 0)) def apply(self): ldm.modules.attention.CrossAttention.forward = xformers_attention_forward diff --git a/modules/uv_hook.py b/modules/uv_hook.py index 99e89ef10..3a637d213 100644 --- a/modules/uv_hook.py +++ b/modules/uv_hook.py @@ -41,7 +41,8 @@ def patch(): modified_command = ["uv", "pip", *cmd] - result = subprocess.__original_run([*modified_command, *_args], **_kwargs) + cmd_str = shlex.join([*modified_command, *_args]) + result = subprocess.__original_run(cmd_str, **_kwargs) if result.returncode != 0: return subprocess.__original_run(*args, **kwargs) return result