Make last PR not crash comfy on old pytorch. (#9324)

This commit is contained in:
comfyanonymous
2025-08-13 12:12:41 -07:00
committed by GitHub
parent 3da5a07510
commit 9df8792d4b
4 changed files with 27 additions and 17 deletions

View File

@@ -285,7 +285,7 @@ def pytorch_attention(q, k, v):
)
try:
out = ops.scaled_dot_product_attention(q, k, v, attn_mask=None, dropout_p=0.0, is_causal=False)
out = comfy.ops.scaled_dot_product_attention(q, k, v, attn_mask=None, dropout_p=0.0, is_causal=False)
out = out.transpose(2, 3).reshape(orig_shape)
except model_management.OOM_EXCEPTION:
logging.warning("scaled_dot_product_attention OOMed: switched to slice attention")