10000 refactor transformer decoder and revamp the left padding attention mask by vince62s · Pull Request #178 · eole-nlp/eole · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

refactor transformer decoder and revamp the left padding attention mask #178

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Jan 13, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ jobs:
-config eole/tests/data/lm_data.yaml \
-src_vocab /tmp/eole.vocab.src \
-tgt_vocab /tmp/eole.vocab.src \
-model '{"hidden_size": 16, "transformer_ff": 64, "embeddings": {"word_vec_size": 16}, "encoder": None, "decoder": {"decoder_type": "transformer_lm", "layers": 2, "heads": 4}}' \
-model '{"hidden_size": 16, "transformer_ff": 64, "embeddings": {"word_vec_size": 16}, "encoder": None, "decoder": {"decoder_type": "transformer", "layers": 2, "heads": 4}}' \
-training '{"batch_size": 10, "num_workers": 0, "bucket_size": 1024, "train_steps": 10}' \
-src_vocab_size 1000 \
-tgt_vocab_size 1000 \
Expand Down Expand Up @@ -357,7 +357,7 @@ jobs:
-config eole/tests/data/lm_data.yaml \
-src_vocab /tmp/eole.vocab.src \
-tgt_vocab /tmp/eole.vocab.src \
-model '{"layers": 2, "hidden_size": 16, "transformer_ff": 64, "embeddings": {"word_vec_size": 16}, "encoder": None, "decoder": {"decoder_type": "transformer_lm", "heads": 4}}' \
-model '{"layers": 2, "hidden_size": 16, "transformer_ff": 64, "embeddings": {"word_vec_size": 16}, "encoder": None, "decoder": {"decoder_type": "transformer", "heads": 4}}' \
-training '{"batch_size": 10, "num_workers": 0, "bucket_size": 1024, "train_steps": 10, "model_path": "/tmp/lm.eole.model", "save_checkpoint_steps": 10}' \
-src_vocab_size 1000 \
-tgt_vocab_size 1000 \
Expand Down
19 changes: 3 additions & 16 deletions eole/config/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,15 +318,6 @@ def _validate_transformer_decoder_config(self):
return self


class TransformerLMDecoderConfig(TransformerDecoderConfig):
"""
Right now just wraps TransformerDecoderConfig for simplicity.
Might merge in a single class later once TransformerLM path is clarified.
"""

decoder_type: Literal["transformer_lm"] = Field(default="transformer_lm")


# use Field with default= + description would be more readable
# was inheriting from VocabConfig, but removed for now to facilitate inference tests
# could we have different BaseModelConfig classes (inheriting from a base one)
Expand All @@ -353,7 +344,6 @@ class BaseModelConfig(Config):
decoder: (
Union[
TransformerDecoderConfig,
TransformerLMDecoderConfig,
RnnDecoderConfig,
CnnDecoderConfig,
]
Expand Down Expand Up @@ -489,10 +479,7 @@ def update_model_opts(self):

if self.decoder is not None:
update_dict["decoder"] = {"tgt_word_vec_size": self.embeddings.tgt_word_vec_size}
if getattr(self.decoder, "decoder_type", None) in [
"transformer",
"transformer_lm",
]:
if getattr(self.decoder, "decoder_type", None) == "transformer":
update_dict["decoder"].update(
{
"position_encoding_type": self.embeddings.position_encoding_type,
Expand Down Expand Up @@ -669,9 +656,9 @@ def encoder_decoder_type(cls, data: Any) -> Any:
if not (isinstance(data, dict)):
return data
if "decoder" in data.keys():
data["decoder"]["decoder_type"] = "transformer_lm"
data["decoder"]["decoder_type"] = "transformer"
else:
data["decoder"] = {"decoder_type": "transformer_lm"}
data["decoder"] = {"decoder_type": "transformer"}
return data

@model_validator(mode="before")
Expand Down
4 changes: 1 addition & 3 deletions eole/decoders/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
"""Module defining decoders."""

from eole.decoders.rnn_decoder import InputFeedRNNDecoder, StdRNNDecoder
from eole.decoders.transformer_decoder import TransformerDecoder
from eole.decoders.transformer_lm_decoder import TransformerLMDecoder
from eole.decoders.transformer import TransformerDecoder
from eole.decoders.cnn_decoder import CNNDecoder


Expand All @@ -11,5 +10,4 @@
"ifrnn": InputFeedRNNDecoder,
"cnn": CNNDecoder,
"transformer": TransformerDecoder,
"transformer_lm": TransformerLMDecoder,
}
4 changes: 3 additions & 1 deletion eole/decoders/cnn_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ def __init__(
self,
model_config,
running_config=None,
with_cross_attn=False,
):
super(CNNDecoder, self).__init__()

Expand All @@ -49,11 +50,12 @@ def __init__(
)

@classmethod
def from_config(cls, model_config, running_config=None):
def from_config(cls, model_config, running_config=None, with_cross_attn=False):
"""Alternate constructor."""
return cls(
model_config,
running_config,
with_cross_attn=False,
)

def init_state(self, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion eole/decoders/decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(self, attentional=True):
self.state = {}

@classmethod
def from_config(cls, model_config, running_config=None):
def from_config(cls, model_config, running_config=None, with_cross_attn=False):
"""Alternate constructor.

Subclasses should override this method.
Expand Down
6 changes: 5 additions & 1 deletion eole/decoders/rnn_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ def __init__(
self,
model_config,
running_config=None,
with_cross_attn=False,
):
super(RNNDecoderBase, self).__init__(
attentional=model_config.global_attention != "none" and model_config.global_attention is not None
Expand Down Expand Up @@ -66,12 +67,13 @@ def __init__(
)

@classmethod
def from_config(cls, model_config, running_config=None):
def from_config(cls, model_config, running_config=None, with_cross_attn=False):
"""Alternate constructor."""
# config = opt.model.decoder # RnnDecoderConfig
return cls(
model_config,
running_config=running_config,
with_cross_attn=False,
)

def init_state(self, **kwargs):
Expand Down Expand Up @@ -174,6 +176,7 @@ def __init__(
self,
model_config,
running_config=None,
with_cross_attn=False,
):
self.hidden_size = model_config.hidden_size
self._input_size = model_config.tgt_word_vec_size
Expand Down Expand Up @@ -253,6 +256,7 @@ def __init__(
self,
model_config,
running_config=None,
with_cross_attn=False,
):
self.hidden_size = model_config.hidden_size
self._input_size = model_config.tgt_word_vec_size + self.hidden_size
Expand Down
Loading
Loading
0