Not sure if AMD actually support fp16 acc but it doesn't crash. (#9258)

This commit is contained in:
comfyanonymous 2025-08-09 09:49:25 -07:00 committed by GitHub
parent 735bb4bdb1
commit 5828607ccf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -340,7 +340,7 @@ if ENABLE_PYTORCH_ATTENTION:
PRIORITIZE_FP16 = False # TODO: remove and replace with something that shows exactly which dtype is faster than the other
try:
if is_nvidia() and PerformanceFeature.Fp16Accumulation in args.fast:
if (is_nvidia() or is_amd()) and PerformanceFeature.Fp16Accumulation in args.fast:
torch.backends.cuda.matmul.allow_fp16_accumulation = True
PRIORITIZE_FP16 = True # TODO: limit to cards where it actually boosts performance
logging.info("Enabled fp16 accumulation.")