Fix wan issues when prompt length is long.

This commit is contained in:
comfyanonymous
2025-02-26 20:34:02 -05:00
parent 8e69e2ddfd
commit 3ea3bc8546
2 changed files with 2 additions and 2 deletions

View File

@@ -421,7 +421,7 @@ class WanModel(torch.nn.Module):
e0 = self.time_projection(e).unflatten(1, (6, self.dim))
# context
context = self.text_embedding(torch.cat([context, context.new_zeros(context.size(0), self.text_len - context.size(1), context.size(2))], dim=1))
context = self.text_embedding(context)
if clip_fea is not None and self.img_emb is not None:
context_clip = self.img_emb(clip_fea) # bs x 257 x dim