Fix potential rope issue. (#9710)

This commit is contained in:
comfyanonymous
2025-09-03 19:20:13 -07:00
committed by GitHub
parent f48d05a2d1
commit 72855db715

View File

@@ -632,7 +632,7 @@ class ContinuousTransformer(nn.Module):
# Attention layers # Attention layers
if self.rotary_pos_emb is not None: if self.rotary_pos_emb is not None:
rotary_pos_emb = self.rotary_pos_emb.forward_from_seq_len(x.shape[1], dtype=x.dtype, device=x.device) rotary_pos_emb = self.rotary_pos_emb.forward_from_seq_len(x.shape[1], dtype=torch.float, device=x.device)
else: else:
rotary_pos_emb = None rotary_pos_emb = None