diff --git a/mlair/model_modules/fully_connected_networks.py b/mlair/model_modules/fully_connected_networks.py index 1fd61d9895fec525a764bc20dd669925240d3342..007b8f0de9d2ea6ad6ae64179371a98a56d40447 100644 --- a/mlair/model_modules/fully_connected_networks.py +++ b/mlair/model_modules/fully_connected_networks.py @@ -88,7 +88,9 @@ class FCN(AbstractModelClass): # settings self.activation = self._set_activation(activation) + self.activation_name = activation self.activation_output = self._set_activation(activation_output) + self.activation_output_name = activation_output self.optimizer = self._set_optimizer(optimizer, **kwargs) self.layer_configuration = (n_layer, n_hidden) if layer_configuration is None else layer_configuration self._update_model_name() @@ -164,7 +166,7 @@ class FCN(AbstractModelClass): for layer in range(n_layer): x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer)(x_in) - x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in) + x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in) if self.dropout is not None: x_in = keras.layers.Dropout(self.dropout)(x_in) else: @@ -172,11 +174,11 @@ class FCN(AbstractModelClass): for layer, n_hidden in enumerate(self.layer_configuration): x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer)(x_in) - x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in) + x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in) if self.dropout is not None: x_in = keras.layers.Dropout(self.dropout)(x_in) x_in = keras.layers.Dense(self._output_shape)(x_in) - out = self.activation_output(name=f"{self.activation_output.args[0]}_output")(x_in) + out = self.activation_output(name=f"{self.activation_output_name}_output")(x_in) self.model = keras.Model(inputs=x_input, outputs=[out]) def set_compile_options(self):