revert unrelated transformer.py diffs from rebase

This commit is contained in:
Bailey Hirota 2025-08-05 21:44:26 +09:00
parent c23af2ea1a
commit ed79fa3c04
6 changed files with 0 additions and 6 deletions

View File

@ -545,7 +545,6 @@ class TransformerDecoderLayer(nn.Module):
memory_mask: Optional[torch.Tensor] = None,
tgt_key_padding_mask: Optional[torch.Tensor] = None,
memory_key_padding_mask: Optional[torch.Tensor] = None,
**kwargs,
) -> torch.Tensor:
"""Pass the inputs (and mask) through the decoder layer.

View File

@ -549,7 +549,6 @@ class TransformerDecoderLayer(nn.Module):
memory_mask: Optional[torch.Tensor] = None,
tgt_key_padding_mask: Optional[torch.Tensor] = None,
memory_key_padding_mask: Optional[torch.Tensor] = None,
**kwargs,
) -> torch.Tensor:
"""Pass the inputs (and mask) through the decoder layer.

View File

@ -549,7 +549,6 @@ class TransformerDecoderLayer(nn.Module):
memory_mask: Optional[torch.Tensor] = None,
tgt_key_padding_mask: Optional[torch.Tensor] = None,
memory_key_padding_mask: Optional[torch.Tensor] = None,
**kwargs,
) -> torch.Tensor:
"""Pass the inputs (and mask) through the decoder layer.

View File

@ -550,7 +550,6 @@ class TransformerDecoderLayer(nn.Module):
tgt_key_padding_mask: Optional[torch.Tensor] = None,
memory_key_padding_mask: Optional[torch.Tensor] = None,
warmup: float = 1.0,
**kwargs,
) -> torch.Tensor:
"""Pass the inputs (and mask) through the decoder layer.

View File

@ -537,7 +537,6 @@ class TransformerDecoderLayer(nn.Module):
memory_mask: Optional[torch.Tensor] = None,
tgt_key_padding_mask: Optional[torch.Tensor] = None,
memory_key_padding_mask: Optional[torch.Tensor] = None,
**kwargs,
) -> torch.Tensor:
"""Pass the inputs (and mask) through the decoder layer.

View File

@ -567,7 +567,6 @@ class TransformerDecoderLayer(nn.Module):
memory_mask: Optional[torch.Tensor] = None,
tgt_key_padding_mask: Optional[torch.Tensor] = None,
memory_key_padding_mask: Optional[torch.Tensor] = None,
**kwargs,
) -> torch.Tensor:
"""Pass the inputs (and mask) through the decoder layer.