Disable pytorch attention in VAE for AMD.

This commit is contained in:
comfyanonymous
2025-02-14 05:42:14 -05:00
parent d7b4bf21a2
commit 1cd6cd6080
2 changed files with 6 additions and 1 deletions

View File

@@ -912,6 +912,11 @@ def pytorch_attention_enabled():
global ENABLE_PYTORCH_ATTENTION
return ENABLE_PYTORCH_ATTENTION
def pytorch_attention_enabled_vae():
if is_amd():
return False # enabling pytorch attention on AMD currently causes crash when doing high res
return pytorch_attention_enabled()
def pytorch_attention_flash_attention():
global ENABLE_PYTORCH_ATTENTION
if ENABLE_PYTORCH_ATTENTION: