From bb154eb71f02659398a58d7f305c7fc54602e563 Mon Sep 17 00:00:00 2001 From: kijai <40791699+kijai@users.noreply.github.com> Date: Mon, 17 Mar 2025 09:48:06 +0200 Subject: [PATCH] Fix TeaCache start/end --- nodes/model_optimization_nodes.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/nodes/model_optimization_nodes.py b/nodes/model_optimization_nodes.py index 79ebee1..2a9d0ad 100644 --- a/nodes/model_optimization_nodes.py +++ b/nodes/model_optimization_nodes.py @@ -780,7 +780,12 @@ def teacache_wanvideo_forward_orig(self, x, t, context, clip_fea=None, freqs=Non #print(f"TeaCache: Skipping {suffix} step") return should_calc, cache - should_calc, cache = tea_cache(x, e0, e, kwargs) + teacache_enabled = transformer_options.get("teacache_enabled", False) + if not teacache_enabled: + should_calc = True + else: + should_calc, cache = tea_cache(x, e0, e, kwargs) + if should_calc: original_x = x.clone().detach() patches_replace = transformer_options.get("patches_replace", {}) @@ -796,7 +801,8 @@ def teacache_wanvideo_forward_orig(self, x, t, context, clip_fea=None, freqs=Non else: x = block(x, e=e0, freqs=freqs, context=context) - cache['previous_residual'] = (x - original_x).to(transformer_options["teacache_device"]) + if teacache_enabled: + cache['previous_residual'] = (x - original_x).to(transformer_options["teacache_device"]) # head x = self.head(x, e)