diff --git a/mlair/model_modules/fully_connected_networks.py b/mlair/model_modules/fully_connected_networks.py index 1f965f3c8b4ad997a829ebb643ce0a219cdee013..fb2ee26ef6a59748fc8fc60759a29d1f4d45e309 100644 --- a/mlair/model_modules/fully_connected_networks.py +++ b/mlair/model_modules/fully_connected_networks.py @@ -73,7 +73,8 @@ class FCN(AbstractModelClass): _requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov", "l1", "l2"] def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear", - optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, **kwargs): + optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, explicite_layers=None, + **kwargs): """ Sets model and loss depending on the given arguments. @@ -89,7 +90,7 @@ class FCN(AbstractModelClass): self.activation = self._set_activation(activation) self.activation_output = self._set_activation(activation_output) self.optimizer = self._set_optimizer(optimizer, **kwargs) - self.layer_configuration = (n_layer, n_hidden) + self.layer_configuration = (n_layer, n_hidden) if explicite_layers is None else explicite_layers self._update_model_name() self.kernel_initializer = self._initializer.get(activation, "glorot_uniform") self.kernel_regularizer = self._set_regularizer(regularizer, **kwargs) @@ -144,10 +145,13 @@ class FCN(AbstractModelClass): return dropout def _update_model_name(self): - n_layer, n_hidden = self.layer_configuration n_input = str(reduce(lambda x, y: x * y, self._input_shape)) n_output = str(self._output_shape) - self.model_name += "_".join(["", n_input, *[f"{n_hidden}" for _ in range(n_layer)], n_output]) + if isinstance(self.layer_configuration, tuple) and len(self.layer_configuration) == 2: + n_layer, n_hidden = self.layer_configuration + self.model_name += "_".join(["", n_input, *[f"{n_hidden}" for _ in range(n_layer)], n_output]) + else: + self.model_name += "_".join(["", n_input, *[f"{n}" for n in self.layer_configuration], n_output]) def set_model(self): """ @@ -155,15 +159,24 @@ class FCN(AbstractModelClass): """ x_input = keras.layers.Input(shape=self._input_shape) x_in = keras.layers.Flatten()(x_input) - n_layer, n_hidden = self.layer_configuration - for layer in range(n_layer): - x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer, - kernel_regularizer=self.kernel_regularizer)(x_in) - x_in = self.activation()(x_in) - if self.dropout is not None: - x_in = keras.layers.Dropout(self.dropout)(x_in) + if isinstance(self.layer_configuration, tuple) is True: + n_layer, n_hidden = self.layer_configuration + for layer in range(n_layer): + x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer, + kernel_regularizer=self.kernel_regularizer)(x_in) + x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in) + if self.dropout is not None: + x_in = keras.layers.Dropout(self.dropout)(x_in) + else: + assert isinstance(self.layer_configuration, list) is True + for layer, n_hidden in enumerate(self.layer_configuration): + x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer, + kernel_regularizer=self.kernel_regularizer)(x_in) + x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in) + if self.dropout is not None: + x_in = keras.layers.Dropout(self.dropout)(x_in) x_in = keras.layers.Dense(self._output_shape)(x_in) - out = self.activation_output()(x_in) + out = self.activation_output(name=f"{self.activation_output.args[0]}_output")(x_in) self.model = keras.Model(inputs=x_input, outputs=[out]) def set_compile_options(self):