Skip to content
Snippets Groups Projects
Commit 8b5f1346 authored by leufen1's avatar leufen1
Browse files

FCN activation name now works with relu

parent 584668a2
No related branches found
No related tags found
6 merge requests!319add all changes of dev into release v1.4.0 branch,!318Resolve "release v1.4.0",!283Merge latest develop into falcos issue,!278Felix issue295 transformation parameters in data handler,!270FCN activation name now works with relu,!259Draft: Resolve "WRF-Datahandler should inherit from SingleStationDatahandler"
Pipeline #62471 passed
...@@ -88,7 +88,9 @@ class FCN(AbstractModelClass): ...@@ -88,7 +88,9 @@ class FCN(AbstractModelClass):
# settings # settings
self.activation = self._set_activation(activation) self.activation = self._set_activation(activation)
self.activation_name = activation
self.activation_output = self._set_activation(activation_output) self.activation_output = self._set_activation(activation_output)
self.activation_output_name = activation_output
self.optimizer = self._set_optimizer(optimizer, **kwargs) self.optimizer = self._set_optimizer(optimizer, **kwargs)
self.layer_configuration = (n_layer, n_hidden) if layer_configuration is None else layer_configuration self.layer_configuration = (n_layer, n_hidden) if layer_configuration is None else layer_configuration
self._update_model_name() self._update_model_name()
...@@ -164,7 +166,7 @@ class FCN(AbstractModelClass): ...@@ -164,7 +166,7 @@ class FCN(AbstractModelClass):
for layer in range(n_layer): for layer in range(n_layer):
x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer, x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in) kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in) x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in)
if self.dropout is not None: if self.dropout is not None:
x_in = keras.layers.Dropout(self.dropout)(x_in) x_in = keras.layers.Dropout(self.dropout)(x_in)
else: else:
...@@ -172,11 +174,11 @@ class FCN(AbstractModelClass): ...@@ -172,11 +174,11 @@ class FCN(AbstractModelClass):
for layer, n_hidden in enumerate(self.layer_configuration): for layer, n_hidden in enumerate(self.layer_configuration):
x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer, x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in) kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in) x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in)
if self.dropout is not None: if self.dropout is not None:
x_in = keras.layers.Dropout(self.dropout)(x_in) x_in = keras.layers.Dropout(self.dropout)(x_in)
x_in = keras.layers.Dense(self._output_shape)(x_in) x_in = keras.layers.Dense(self._output_shape)(x_in)
out = self.activation_output(name=f"{self.activation_output.args[0]}_output")(x_in) out = self.activation_output(name=f"{self.activation_output_name}_output")(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out]) self.model = keras.Model(inputs=x_input, outputs=[out])
def set_compile_options(self): def set_compile_options(self):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment