mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-09-10 19:46:38 +00:00
Add logs to verify optimized_attention_override is passed all the way into attention function
This commit is contained in:
@@ -226,7 +226,14 @@ def wrap_attn(func):
|
||||
# move up the stack
|
||||
frame = frame.f_back
|
||||
|
||||
LOG_CONTENTS["|".join(logged_stack)] = (logged_stack_to_index, logged_stack)
|
||||
# check if we get what we want from transformer_options
|
||||
t_check = "❌❌❌"
|
||||
transformer_options = kwargs.get("transformer_options", None)
|
||||
if transformer_options is not None:
|
||||
if "optimized_attention_override" in transformer_options:
|
||||
t_check = "✅✅✅"
|
||||
|
||||
LOG_CONTENTS["|".join(logged_stack)] = (t_check, logged_stack_to_index, logged_stack)
|
||||
|
||||
finally:
|
||||
# Important: break ref cycles so tensors aren't pinned
|
||||
|
@@ -1034,8 +1034,13 @@ class CFGGuider:
|
||||
self,
|
||||
comfy.patcher_extension.get_all_wrappers(comfy.patcher_extension.WrappersMP.OUTER_SAMPLE, self.model_options, is_model_options=True)
|
||||
)
|
||||
# comfy.ldm.modules.attention.LOG_ATTN_CALLS = True #TODO: Remove this $$$$$
|
||||
comfy.ldm.modules.attention.LOG_ATTN_CALLS = True #TODO: Remove this $$$$$
|
||||
comfy.ldm.modules.attention.LOG_CONTENTS = {}
|
||||
if "optimized_attention_override" not in self.model_options["transformer_options"]:
|
||||
def optimized_attention_override(func, *args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
self.model_options["transformer_options"]["optimized_attention_override"] = optimized_attention_override
|
||||
|
||||
output = executor.execute(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
|
||||
finally:
|
||||
cast_to_load_options(self.model_options, device=self.model_patcher.offload_device)
|
||||
|
Reference in New Issue
Block a user