Skip to content
Snippets Groups Projects
Commit ce0e123c authored by lukas leufen's avatar lukas leufen
Browse files

Merge branch 'lukas_issue284_feat_create-fcn-model-class' into 'develop'

FCN class can use any layer configuration

See merge request toar/mlair!269
parents e8eec5ec 584668a2
No related branches found
No related tags found
6 merge requests!319add all changes of dev into release v1.4.0 branch,!318Resolve "release v1.4.0",!283Merge latest develop into falcos issue,!278Felix issue295 transformation parameters in data handler,!269FCN class can use any layer configuration,!259Draft: Resolve "WRF-Datahandler should inherit from SingleStationDatahandler"
Pipeline #62469 passed
......@@ -73,7 +73,8 @@ class FCN(AbstractModelClass):
_requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov", "l1", "l2"]
def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear",
optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, **kwargs):
optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, layer_configuration=None,
**kwargs):
"""
Sets model and loss depending on the given arguments.
......@@ -89,7 +90,7 @@ class FCN(AbstractModelClass):
self.activation = self._set_activation(activation)
self.activation_output = self._set_activation(activation_output)
self.optimizer = self._set_optimizer(optimizer, **kwargs)
self.layer_configuration = (n_layer, n_hidden)
self.layer_configuration = (n_layer, n_hidden) if layer_configuration is None else layer_configuration
self._update_model_name()
self.kernel_initializer = self._initializer.get(activation, "glorot_uniform")
self.kernel_regularizer = self._set_regularizer(regularizer, **kwargs)
......@@ -144,10 +145,13 @@ class FCN(AbstractModelClass):
return dropout
def _update_model_name(self):
n_layer, n_hidden = self.layer_configuration
n_input = str(reduce(lambda x, y: x * y, self._input_shape))
n_output = str(self._output_shape)
if isinstance(self.layer_configuration, tuple) and len(self.layer_configuration) == 2:
n_layer, n_hidden = self.layer_configuration
self.model_name += "_".join(["", n_input, *[f"{n_hidden}" for _ in range(n_layer)], n_output])
else:
self.model_name += "_".join(["", n_input, *[f"{n}" for n in self.layer_configuration], n_output])
def set_model(self):
"""
......@@ -155,15 +159,24 @@ class FCN(AbstractModelClass):
"""
x_input = keras.layers.Input(shape=self._input_shape)
x_in = keras.layers.Flatten()(x_input)
if isinstance(self.layer_configuration, tuple) is True:
n_layer, n_hidden = self.layer_configuration
for layer in range(n_layer):
x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation()(x_in)
x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in)
if self.dropout is not None:
x_in = keras.layers.Dropout(self.dropout)(x_in)
else:
assert isinstance(self.layer_configuration, list) is True
for layer, n_hidden in enumerate(self.layer_configuration):
x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation(name=f"{self.activation.args[0]}_{layer + 1}")(x_in)
if self.dropout is not None:
x_in = keras.layers.Dropout(self.dropout)(x_in)
x_in = keras.layers.Dense(self._output_shape)(x_in)
out = self.activation_output()(x_in)
out = self.activation_output(name=f"{self.activation_output.args[0]}_output")(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out])
def set_compile_options(self):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment