From 4f591936f6c27a350139e77c36258401a0fd6f4c Mon Sep 17 00:00:00 2001 From: Dawith Lim Date: Tue, 30 Sep 2025 10:19:40 -0400 Subject: [PATCH] Model files have new parts --- model/dnn.py | 19 +++++++++++++++++++ model/model.py | 1 + 2 files changed, 20 insertions(+) diff --git a/model/dnn.py b/model/dnn.py index 8b13789..ec0988e 100644 --- a/model/dnn.py +++ b/model/dnn.py @@ -1 +1,20 @@ +#-- coding: utf-8 -*- +# This code defines a simple Deep Neural Network (DNN) model using Keras. + +import keras + +class DNN(keras.Model): + def __init__(self, input_shape, layer_stack_size, num_classes): + super(DNN, self).__init__() + self.input_layer = keras.layers.InputLayer(input_shape=input_shape) + for i in range(layer_stack_size): + setattr(self, f"hidden_layer{i+1}", + keras.layers.Dense(input_shape, activation="relu")) + self.output_layer = keras.layers.Dense(num_classes, activation="softmax") + + def call(self, inputs): + x = self.input_layer(inputs) + x = self.hidden_layer1(x) + x = self.hidden_layer2(x) + return self.output_layer(x) diff --git a/model/model.py b/model/model.py index abe26f1..398e93c 100644 --- a/model/model.py +++ b/model/model.py @@ -98,6 +98,7 @@ def _transformerblocks(self, inputs, head_size, num_heads, Returns: A model layer. """ + inputs = Masking(mask_value=pad_value)(inputs) x = MultiHeadAttention( key_dim=head_size, num_heads=num_heads, dropout=dropout)(inputs, inputs)