From 4659f8e3575011fb9c3decf1be28e916a7fa7703 Mon Sep 17 00:00:00 2001 From: Mr-Neutr0n <64578610+Mr-Neutr0n@users.noreply.github.com> Date: Wed, 11 Feb 2026 19:51:32 +0530 Subject: [PATCH] fix: pass additional_tokens and n_times_crossframe_attn_in_self through checkpoint --- sgm/modules/attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sgm/modules/attention.py b/sgm/modules/attention.py index 52a50b7bd..d5e166886 100644 --- a/sgm/modules/attention.py +++ b/sgm/modules/attention.py @@ -543,7 +543,7 @@ def forward( # return mixed_checkpoint(self._forward, kwargs, self.parameters(), self.checkpoint) if self.checkpoint: # inputs = {"x": x, "context": context} - return checkpoint(self._forward, x, context) + return checkpoint(self._forward, x, context, additional_tokens, n_times_crossframe_attn_in_self) # return checkpoint(self._forward, inputs, self.parameters(), self.checkpoint) else: return self._forward(**kwargs)