refactor(model): 移除 encoder 的 padding_mask 参数调用

This commit is contained in:
songsenand 2026-02-24 00:48:54 +08:00
parent 63efc49aa6
commit be6b686bd1
1 changed files with 3 additions and 3 deletions

View File

@ -181,9 +181,9 @@ class MoEModel(nn.Module):
# ----- 2. Transformer Encoder ----- # ----- 2. Transformer Encoder -----
# padding mask: True 表示忽略该位置 # padding mask: True 表示忽略该位置
# padding_mask = attention_mask == 0 # padding_mask = attention_mask == 0
# encoded = self.encoder( encoded = self.encoder(
# embeddings, src_key_padding_mask=padding_mask embeddings #, src_key_padding_mask=padding_mask
# ) # [B, S, H] ) # [B, S, H]
# ----- 3. 池化量 ----- # ----- 3. 池化量 -----
# for block in self.shared_resblocks: # for block in self.shared_resblocks: