Skip to content

Commit eac4313

Browse files
committed
drop me: adjust comment
1 parent 56c6b43 commit eac4313

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

i6_models/parts/conformer/mhsa_rel_pos.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ def forward(self, input_tensor: torch.Tensor, sequence_mask: torch.Tensor) -> to
219219
print("q_with_bias_u", q_with_bias_u.transpose(-3, -2).shape)
220220
print("k", k.transpose(-3, -2).shape)
221221
print("v", v.transpose(-3, -2).shape)
222-
print("attn_bd_mask_scaled", attn_bd_mask_scaled.transpose(-3, -2).shape)
222+
print("attn_bd_mask_scaled", attn_bd_mask_scaled.shape)
223223
attn_output = F.scaled_dot_product_attention(
224224
q_with_bias_u.transpose(-3, -2), # [B, #heads, T, F']
225225
k.transpose(-3, -2), # [B, #heads, T', F']

0 commit comments

Comments
 (0)