Skip to content

Commit

Permalink
decoder and encoder have the same TFmer blocks
Browse files Browse the repository at this point in the history
  • Loading branch information
Dawith committed Oct 24, 2025
1 parent 70c3d54 commit e74353e
Showing 1 changed file with 0 additions and 21 deletions.
21 changes: 0 additions & 21 deletions model/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,27 +74,6 @@ def build_transformerblock(self, inputs, head_size, num_heads,

return outputs

def build_decoderblock(self, inputs, head_size, num_heads, ff_dim,
dropout):
"""
Constructs the decoder block. This consists of masked multi-head
attention, dropout, layer normalization, a residual connection,
a feedforward neural network, and another residual connection, but in
the reverse order as the encoder block.
"""

x = LayerNormalization(epsilon=1e-6)(inputs)
x = Conv1D(filters=ff_dim, kernel_size=1, activation="relu")(inputs)
x = Dropout(dropout)(x)
x = Conv1D(filters=inputs.shape[-1], kernel_size=1)(x)
x = Dropout(dropout)(x)
res = x + inputs
outputs = MultiHeadAttention(
key_dim=head_size, num_heads=num_heads,
dropout=dropout)(res, res, use_causal_mask=True)

return outputs

def call(self, inputs):
"""
Calls the TimeSeriesTransformer model on a batch of inputs.
Expand Down

0 comments on commit e74353e

Please sign in to comment.