Lint all unused variables (#5989)

* Enable F841

* Autofix

* Remove all unused variable assignment
This commit is contained in:
Chenlei Hu
2024-12-12 14:59:16 -08:00
committed by GitHub
parent fd5dfb812c
commit d9d7f3c619
29 changed files with 22 additions and 72 deletions

View File

@@ -172,7 +172,6 @@ class T5LayerSelfAttention(torch.nn.Module):
# self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, x, mask=None, past_bias=None, optimized_attention=None):
normed_hidden_states = self.layer_norm(x)
output, past_bias = self.SelfAttention(self.layer_norm(x), mask=mask, past_bias=past_bias, optimized_attention=optimized_attention)
# x = x + self.dropout(attention_output)
x += output