From 044c3faba40f6142626e700887dfbbdf37cced19 Mon Sep 17 00:00:00 2001 From: Dawith Date: Fri, 18 Apr 2025 14:17:54 -0400 Subject: [PATCH] Two-category classification --- model/model.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/model/model.py b/model/model.py index 40b3270..abe26f1 100644 --- a/model/model.py +++ b/model/model.py @@ -4,7 +4,7 @@ """ from keras import Input, Model -from keras.layers import BatchNormalization, Conv1D, Dense, Dropout, \ +from keras.layers import BatchNormalization, Conv1D, Dense, Dropout, Reshape, \ GlobalAveragePooling1D, LayerNormalization, Masking, \ MultiHeadAttention @@ -57,7 +57,7 @@ def _modelstack(self, input_shape, head_size, num_heads, ff_dim, num_Transformer_blocks: int, the number of Transformer blocks. mlp_units: list of ints, the number of neurons in each layer of the MLP. - n_classes: int, the number of output classes. + n_classes: list of ints, the number of output classes. dropout: float, dropout rate. mlp_dropout: float, dropout rate in the MLP. @@ -75,9 +75,10 @@ def _modelstack(self, input_shape, head_size, num_heads, ff_dim, for dim in mlp_units: x = Dense(dim, activation="relu")(x) x = Dropout(mlp_dropout)(x) - outputs = Dense(n_classes, activation="softmax")(x) + y = Dense(n_classes[0], activation="softmax")(x) + z = Dense(n_classes[1], activation="softmax")(x) - return Model(inputs, outputs) + return Model(inputs, [y, z]) def _transformerblocks(self, inputs, head_size, num_heads, ff_dim, dropout):