mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-09-11 03:58:22 +00:00
Turn off attention logging for now, make AttentionOverrideTestNode have a dropdown with available attention (this is a test node only)
This commit is contained in:
@@ -1034,7 +1034,7 @@ class CFGGuider:
|
|||||||
self,
|
self,
|
||||||
comfy.patcher_extension.get_all_wrappers(comfy.patcher_extension.WrappersMP.OUTER_SAMPLE, self.model_options, is_model_options=True)
|
comfy.patcher_extension.get_all_wrappers(comfy.patcher_extension.WrappersMP.OUTER_SAMPLE, self.model_options, is_model_options=True)
|
||||||
)
|
)
|
||||||
comfy.ldm.modules.attention.LOG_ATTN_CALLS = True #TODO: Remove this $$$$$
|
# comfy.ldm.modules.attention.LOG_ATTN_CALLS = True #TODO: Remove this $$$$$
|
||||||
comfy.ldm.modules.attention.LOG_CONTENTS = {}
|
comfy.ldm.modules.attention.LOG_CONTENTS = {}
|
||||||
output = executor.execute(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
|
output = executor.execute(noise, latent_image, sampler, sigmas, denoise_mask, callback, disable_pbar, seed)
|
||||||
finally:
|
finally:
|
||||||
@@ -1042,8 +1042,9 @@ class CFGGuider:
|
|||||||
self.model_options = orig_model_options
|
self.model_options = orig_model_options
|
||||||
self.model_patcher.hook_mode = orig_hook_mode
|
self.model_patcher.hook_mode = orig_hook_mode
|
||||||
self.model_patcher.restore_hook_patches()
|
self.model_patcher.restore_hook_patches()
|
||||||
|
if comfy.ldm.modules.attention.LOG_ATTN_CALLS:
|
||||||
|
comfy.ldm.modules.attention.save_log_contents()
|
||||||
comfy.ldm.modules.attention.LOG_ATTN_CALLS = False #TODO: Remove this $$$$$
|
comfy.ldm.modules.attention.LOG_ATTN_CALLS = False #TODO: Remove this $$$$$
|
||||||
comfy.ldm.modules.attention.save_log_contents()
|
|
||||||
comfy.ldm.modules.attention.LOG_CONTENTS = {}
|
comfy.ldm.modules.attention.LOG_CONTENTS = {}
|
||||||
|
|
||||||
del self.conds
|
del self.conds
|
||||||
|
@@ -1005,14 +1005,18 @@ class Wan22ImageToVideoLatent(io.ComfyNode):
|
|||||||
|
|
||||||
import comfy.patcher_extension
|
import comfy.patcher_extension
|
||||||
import comfy.ldm.modules.attention
|
import comfy.ldm.modules.attention
|
||||||
|
import logging
|
||||||
|
|
||||||
class AttentionOverrideTest(io.ComfyNode):
|
class AttentionOverrideTest(io.ComfyNode):
|
||||||
@classmethod
|
@classmethod
|
||||||
def define_schema(cls):
|
def define_schema(cls):
|
||||||
|
attention_function_names = list(comfy.ldm.modules.attention.REGISTERED_ATTENTION_FUNCTIONS.keys())
|
||||||
return io.Schema(
|
return io.Schema(
|
||||||
node_id="AttentionOverrideTest",
|
node_id="AttentionOverrideTest",
|
||||||
category="devtools",
|
category="devtools",
|
||||||
inputs=[
|
inputs=[
|
||||||
io.Model.Input("model"),
|
io.Model.Input("model"),
|
||||||
|
io.Combo.Input("attention", options=attention_function_names),
|
||||||
],
|
],
|
||||||
outputs=[
|
outputs=[
|
||||||
io.Model.Output(),
|
io.Model.Output(),
|
||||||
@@ -1020,9 +1024,10 @@ class AttentionOverrideTest(io.ComfyNode):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def attention_override(func, transformer_options, *args, **kwargs):
|
def attention_override_factory(attention_func):
|
||||||
new_attention = comfy.ldm.modules.attention.attention_basic
|
def attention_override(func, *args, **kwargs):
|
||||||
return new_attention.__wrapped__(*args, **kwargs)
|
return attention_func(*args, **kwargs)
|
||||||
|
return attention_override
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sampler_sampler_wrapper(executor, *args, **kwargs):
|
def sampler_sampler_wrapper(executor, *args, **kwargs):
|
||||||
@@ -1033,10 +1038,14 @@ class AttentionOverrideTest(io.ComfyNode):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def execute(cls, model: io.Model.Type) -> io.NodeOutput:
|
def execute(cls, model: io.Model.Type, attention: str) -> io.NodeOutput:
|
||||||
model = model.clone()
|
attention_func = comfy.ldm.modules.attention.get_attention_function(attention, None)
|
||||||
|
if attention_func is None:
|
||||||
|
logging.info(f"Attention type '{attention}' not found, using default optimized attention for your hardware.")
|
||||||
|
return model
|
||||||
|
|
||||||
model.model_options["transformer_options"]["optimized_attention_override"] = cls.attention_override
|
model = model.clone()
|
||||||
|
model.model_options["transformer_options"]["optimized_attention_override"] = cls.attention_override_factory(attention_func)
|
||||||
model.add_wrapper_with_key(comfy.patcher_extension.WrappersMP.SAMPLER_SAMPLE, "attention_override_test", cls.sampler_sampler_wrapper)
|
model.add_wrapper_with_key(comfy.patcher_extension.WrappersMP.SAMPLER_SAMPLE, "attention_override_test", cls.sampler_sampler_wrapper)
|
||||||
return io.NodeOutput(model)
|
return io.NodeOutput(model)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user