From e0be7a84275e9a1c1866ceae1a0cb7015f06d66d Mon Sep 17 00:00:00 2001 From: namemechan Date: Wed, 17 Sep 2025 20:23:19 +0900 Subject: [PATCH] =?UTF-8?q?update=20sd=5Fhijack=5Foptimizations.py=20?= =?UTF-8?q?=E2=80=94=20Changing=20[CUDA=20Compute=20Capability]=20Version?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/sd_hijack_optimizations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 0269f1f5b..d9af5a0d4 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -54,7 +54,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100 def is_available(self): - return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)) + return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (12, 0)) def apply(self): ldm.modules.attention.CrossAttention.forward = xformers_attention_forward