diff --git a/mlair/model_modules/fully_connected_networks.py b/mlair/model_modules/fully_connected_networks.py index a4c61b5b55f56e974d90a83fca32771019778154..313fc837825e108b877b8a48856a26667211764a 100644 --- a/mlair/model_modules/fully_connected_networks.py +++ b/mlair/model_modules/fully_connected_networks.py @@ -28,8 +28,6 @@ class FCN_64_32_16(AbstractModelClass): super().__init__(input_shape[0], output_shape[0]) # settings - self.dropout_rate = 0.1 - self.regularizer = keras.regularizers.l2(0.1) self.activation = keras.layers.PReLU # apply to model @@ -42,20 +40,19 @@ class FCN_64_32_16(AbstractModelClass): Build the model. """ x_input = keras.layers.Input(shape=self._input_shape) - x_in = keras.layers.Flatten(name='{}'.format("major"))(x_input) - x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in) + x_in = keras.layers.Flatten()(x_input) + x_in = keras.layers.Dense(64, name="Dense_64")(x_in) x_in = self.activation()(x_in) - x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in) + x_in = keras.layers.Dense(32, name="Dense_32")(x_in) x_in = self.activation()(x_in) - x_in = keras.layers.Dense(16, name='{}_Dense_16'.format("major"))(x_in) + x_in = keras.layers.Dense(16, name="Dense_16")(x_in) x_in = self.activation()(x_in) - x_in = keras.layers.Dense(self._output_shape, name='{}_Dense'.format("major"))(x_in) + x_in = keras.layers.Dense(self._output_shape, name="Dense_output")(x_in) out_main = self.activation()(x_in) self.model = keras.Model(inputs=x_input, outputs=[out_main]) def set_compile_options(self): - self.initial_lr = 1e-2 - self.optimizer = keras.optimizers.adam(lr=self.initial_lr) + self.optimizer = keras.optimizers.adam(lr=1e-2) self.compile_options = {"loss": [keras.losses.mean_squared_error], "metrics": ["mse", "mae"]} @@ -117,7 +114,7 @@ class FCN(AbstractModelClass): n_layer, n_hidden = self.layer_configuration n_input = str(reduce(lambda x, y: x * y, self._input_shape)) n_output = str(self._output_shape) - self.model_name += "_".join([n_input, *[f"{n_hidden}" for _ in range(n_layer)], n_output]) + self.model_name += "_".join(["", n_input, *[f"{n_hidden}" for _ in range(n_layer)], n_output]) def set_model(self): """ diff --git a/mlair/model_modules/model_class.py b/mlair/model_modules/model_class.py index f6e979878604ff6e6c22c4f362520dc00acf69d7..f8e3a21a81351ac614e2275749bb85fa82a96e02 100644 --- a/mlair/model_modules/model_class.py +++ b/mlair/model_modules/model_class.py @@ -128,58 +128,6 @@ from mlair.model_modules.flatten import flatten_tail from mlair.model_modules.advanced_paddings import PadUtils, Padding2D, SymmetricPadding2D -class MyLittleModel(AbstractModelClass): - """ - A customised model 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the output layer depending - on the window_lead_time parameter. - """ - - def __init__(self, input_shape: list, output_shape: list): - """ - Sets model and loss depending on the given arguments. - - :param input_shape: list of input shapes (expect len=1 with shape=(window_hist, station, variables)) - :param output_shape: list of output shapes (expect len=1 with shape=(window_forecast)) - """ - - assert len(input_shape) == 1 - assert len(output_shape) == 1 - super().__init__(input_shape[0], output_shape[0]) - - # settings - self.dropout_rate = 0.1 - self.regularizer = keras.regularizers.l2(0.1) - self.activation = keras.layers.PReLU - - # apply to model - self.set_model() - self.set_compile_options() - self.set_custom_objects(loss=self.compile_options['loss']) - - def set_model(self): - """ - Build the model. - """ - x_input = keras.layers.Input(shape=self._input_shape) - x_in = keras.layers.Flatten(name='{}'.format("major"))(x_input) - x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in) - x_in = self.activation()(x_in) - x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in) - x_in = self.activation()(x_in) - x_in = keras.layers.Dense(16, name='{}_Dense_16'.format("major"))(x_in) - x_in = self.activation()(x_in) - x_in = keras.layers.Dense(self._output_shape, name='{}_Dense'.format("major"))(x_in) - out_main = self.activation()(x_in) - self.model = keras.Model(inputs=x_input, outputs=[out_main]) - - def set_compile_options(self): - self.initial_lr = 1e-2 - self.optimizer = keras.optimizers.adam(lr=self.initial_lr) - # self.lr_decay = mlair.model_modules.keras_extensions.LearningRateDecay(base_lr=self.initial_lr, drop=.94, - # epochs_drop=10) - self.compile_options = {"loss": [keras.losses.mean_squared_error], "metrics": ["mse", "mae"]} - - class MyLittleModelHourly(AbstractModelClass): """ A customised model with a 1x1 Conv, and 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the @@ -529,8 +477,3 @@ class MyPaperModel(AbstractModelClass): self.optimizer = keras.optimizers.SGD(lr=self.initial_lr, momentum=0.9) self.compile_options = {"loss": [keras.losses.mean_squared_error, keras.losses.mean_squared_error], "metrics": ['mse', 'mae']} - - -if __name__ == "__main__": - model = MyLittleModel([(1, 3, 10)], [2]) - print(model.compile_options) diff --git a/mlair/run_modules/experiment_setup.py b/mlair/run_modules/experiment_setup.py index af540fc296f1d4b707b5373fdbcbb14dac1afc7f..30672ecc9206319896205d886157b2f2f8977f39 100644 --- a/mlair/run_modules/experiment_setup.py +++ b/mlair/run_modules/experiment_setup.py @@ -20,7 +20,7 @@ from mlair.configuration.defaults import DEFAULT_STATIONS, DEFAULT_VAR_ALL_DICT, DEFAULT_NUMBER_OF_BOOTSTRAPS, DEFAULT_PLOT_LIST, DEFAULT_SAMPLING, DEFAULT_DATA_ORIGIN, DEFAULT_ITER_DIM from mlair.data_handler import DefaultDataHandler from mlair.run_modules.run_environment import RunEnvironment -from mlair.model_modules.model_class import MyLittleModel as VanillaModel +from mlair.model_modules.fully_connected_networks import FCN_64_32_16 as VanillaModel class ExperimentSetup(RunEnvironment): diff --git a/test/test_data_handler/test_iterator.py b/test/test_data_handler/test_iterator.py index ade5c19215e61de5e209db900920187294ac9b18..e47d725a4fd78fec98e81a6de9c18869e7b47637 100644 --- a/test/test_data_handler/test_iterator.py +++ b/test/test_data_handler/test_iterator.py @@ -1,7 +1,7 @@ - from mlair.data_handler.iterator import DataCollection, StandardIterator, KerasIterator from mlair.helpers.testing import PyTestAllEqual -from mlair.model_modules.model_class import MyLittleModel, MyBranchedModel +from mlair.model_modules.model_class import MyBranchedModel +from mlair.model_modules.fully_connected_networks import FCN_64_32_16 import numpy as np import pytest @@ -275,7 +275,7 @@ class TestKerasIterator: def test_get_model_rank_single_output_branch(self): iterator = object.__new__(KerasIterator) - iterator.model = MyLittleModel(input_shape=[(14, 1, 2)], output_shape=[(3,)]) + iterator.model = FCN_64_32_16(input_shape=[(14, 1, 2)], output_shape=[(3,)]) assert iterator._get_model_rank() == 1 def test_get_model_rank_multiple_output_branch(self): diff --git a/test/test_run_modules/test_model_setup.py b/test/test_run_modules/test_model_setup.py index bc4421269a3458c0eb551f8f1820a8d46a597afc..7a4378531ca02733af1bbeeac0efbde56203be3a 100644 --- a/test/test_run_modules/test_model_setup.py +++ b/test/test_run_modules/test_model_setup.py @@ -8,7 +8,7 @@ from mlair.data_handler import KerasIterator from mlair.data_handler import DataCollection from mlair.helpers.datastore import EmptyScope from mlair.model_modules.keras_extensions import CallbackHandler -from mlair.model_modules.model_class import MyLittleModel +from mlair.model_modules.fully_connected_networks import FCN_64_32_16 from mlair.model_modules import AbstractModelClass from mlair.run_modules.model_setup import ModelSetup from mlair.run_modules.run_environment import RunEnvironment @@ -23,7 +23,7 @@ class TestModelSetup: obj.scope = "general.model" obj.model = None obj.callbacks_name = "placeholder_%s_str.pickle" - obj.data_store.set("model_class", MyLittleModel) + obj.data_store.set("model_class", FCN_64_32_16) obj.data_store.set("lr_decay", "dummy_str", "general.model") obj.data_store.set("hist", "dummy_str", "general.model") obj.data_store.set("epochs", 2)