refactor(model): 移除 encoder 的 padding_mask 参数调用
This commit is contained in:
parent
63efc49aa6
commit
be6b686bd1
|
|
@ -181,9 +181,9 @@ class MoEModel(nn.Module):
|
|||
# ----- 2. Transformer Encoder -----
|
||||
# padding mask: True 表示忽略该位置
|
||||
# padding_mask = attention_mask == 0
|
||||
# encoded = self.encoder(
|
||||
# embeddings, src_key_padding_mask=padding_mask
|
||||
# ) # [B, S, H]
|
||||
encoded = self.encoder(
|
||||
embeddings #, src_key_padding_mask=padding_mask
|
||||
) # [B, S, H]
|
||||
|
||||
# ----- 3. 池化量 -----
|
||||
# for block in self.shared_resblocks:
|
||||
|
|
|
|||
Loading…
Reference in New Issue