-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Improved workflow for model training, including proper monitoring for…
… autoencoder training
- Loading branch information
Dawith
committed
Mar 2, 2026
1 parent
520fbe3
commit e891126
Showing
3 changed files
with
177 additions
and
9 deletions.
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,3 +1,121 @@ | ||
| # -*- coding: utf-8 -*- | ||
| #-*- coding: utf-8 -*- | ||
|
|
||
| from datetime import datetime | ||
| import time | ||
| import typing | ||
| from typing import List | ||
|
|
||
| import numpy as np | ||
| import os | ||
| import keras | ||
| from keras.metrics import MeanSquaredError | ||
| from keras import Model | ||
| from keras.callbacks import ModelCheckpoint, CSVLogger | ||
| import matplotlib.pyplot as plt | ||
|
|
||
| from model.model import CompoundModel | ||
| from model.metrics import MutualInformation, mutual_information | ||
| from visualize.visualize import confusion_matrix | ||
| from visualize.plot import roc_plot | ||
| from train.encoder_train import build_encoder | ||
| from train.decoder_train import build_decoder | ||
|
|
||
| def autoencoder_workflow(params, shape, n_classes, | ||
| train_set, validation_set, test_set, | ||
| categories, keys, path): | ||
|
|
||
| model = build_autoencoder(params, shape, n_classes) | ||
| model = train_autoencoder(params, model, train_set, validation_set, path) | ||
|
|
||
| m = {key: None for key in keys} | ||
| m, test_predict = test_autoencoder( | ||
| model, | ||
| test_set, | ||
| m | ||
| ) | ||
| model_metrics = {metric: value for metric, value in m.items()} | ||
|
|
||
| evaluate_autoencoder( | ||
| params, | ||
| test_predict, | ||
| test_set[0], | ||
| categories, | ||
|
|
||
| keys, | ||
| path | ||
| ) | ||
|
|
||
| save_autoencoder(params, model, path) | ||
|
|
||
| def build_autoencoder(params, shape, n_classes): | ||
| autoencoder_params = params["autoencoder_params"] | ||
| #mi = MutualInformation() | ||
| mse = MeanSquaredError() | ||
|
|
||
| encoder_model = build_encoder(params, shape, n_classes) | ||
| decoder_model = build_decoder(params, shape, n_classes) | ||
| model = keras.Sequential([encoder_model, decoder_model]) | ||
| model.compile( | ||
| optimizer=keras.optimizers.Adam(learning_rate=4e-4), | ||
| loss=autoencoder_params["loss"], | ||
| metrics=[mse]#, mutual_information] | ||
| ) | ||
|
|
||
| return model | ||
|
|
||
| def train_autoencoder(params, model, train_set, validation_set, path): | ||
| log_level = params["log_level"] | ||
| timestamp = params["timestamp"] | ||
| params = params["autoencoder_params"] | ||
| callbacks = [ | ||
| ModelCheckpoint( | ||
| filepath=path / timestamp / f"{timestamp}_checkpoint.keras", | ||
| monitor = "val_loss", | ||
| save_best_only=True, | ||
| save_weights_only=False, | ||
| verbose=1 | ||
| ), | ||
| CSVLogger(path / timestamp / f"{timestamp}_log.csv") | ||
| ] | ||
|
|
||
| start = time.time() | ||
| model.fit( | ||
| x=train_set, y=train_set, | ||
| validation_data=(validation_set, validation_set), | ||
| batch_size=params["batch_size"], | ||
| epochs=params["epochs"], | ||
| verbose=log_level, | ||
| callbacks=callbacks | ||
| ) | ||
| end = time.time() | ||
| print("Training time: ", end - start) | ||
| return model | ||
|
|
||
| def test_autoencoder(model: Model, test: List, metrics: dict): | ||
| """ | ||
| """ | ||
|
|
||
| test_eval = model.evaluate(test, test) | ||
| if len(metrics.keys()) == 1: | ||
| metrics[metrics.keys()[0]] = test_eval | ||
| else: | ||
| for i, key in enumerate(metrics.keys()): | ||
| metrics[key] = np.mean(test_eval[i]) | ||
|
|
||
| test_predict = model.predict(test)[0] | ||
|
|
||
| return metrics, test_predict | ||
|
|
||
| def evaluate_autoencoder(params, test_predict, test_set, categories, keys, path): | ||
| plt.pcolor(test_set) | ||
| plt.savefig(path / params["timestamp"] / "original.png") | ||
| plt.close() | ||
| plt.pcolor(test_predict) | ||
| plt.savefig(path / params["timestamp"] / "reproduction.png") | ||
| plt.close() | ||
| return | ||
|
|
||
| def save_autoencoder(params, model, path): | ||
| model.save(path / params["timestamp"] / f"{params['timestamp']}_autoencoder.keras") | ||
|
|
||
| # EOF |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters