From d42526f44828508bf7bc19432fbba685baad97e5 Mon Sep 17 00:00:00 2001 From: aria1th <35677394+aria1th@users.noreply.github.com> Date: Fri, 8 Dec 2023 02:54:52 +0900 Subject: [PATCH] remove print messages --- extensions-builtin/deepcache/deepcache.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/extensions-builtin/deepcache/deepcache.py b/extensions-builtin/deepcache/deepcache.py index 8daa629af..5276d0148 100644 --- a/extensions-builtin/deepcache/deepcache.py +++ b/extensions-builtin/deepcache/deepcache.py @@ -8,6 +8,7 @@ import torch from ldm.modules.diffusionmodules.openaimodel import timestep_embedding from scripts.forward_timestep_embed_patch import forward_timestep_embed +from logging import getLogger @dataclass class DeepCacheParams: cache_in_level: int = 0 @@ -39,11 +40,12 @@ class DeepCacheSession: total = self.cache_success_count + self.cache_fail_count if total == 0: return - print(f"DeepCache success rate: {self.cache_success_count / total * 100}% ({self.cache_success_count}/{total})") + logger = getLogger() + logger.log("DeepCache Information :") for fail_reasons, count in self.fail_reasons.items(): - print(f" {fail_reasons}: {count}") + logger.log(f" {fail_reasons}: {count}") for success_reasons, count in self.success_reasons.items(): - print(f" {success_reasons}: {count}") + logger.log(f" {success_reasons}: {count}") def deepcache_hook_model(self, unet, params:DeepCacheParams): """ @@ -94,8 +96,6 @@ class DeepCacheSession: self.success_reasons['cached_exact'] += 1 CACHE_LAST["last"] = CACHE_LAST[f"timestep_{current_timestep}"] # update last return CACHE_LAST[f"timestep_{current_timestep}"] - else: - print(f"Cache not found for timestep {current_timestep}\n available: {list(CACHE_LAST.keys())}") self.fail_reasons['full_run_step_rate_division'] += 1 self.cache_fail_count += 1 return None