diff --git a/mlair/model_modules/fully_connected_networks.py b/mlair/model_modules/fully_connected_networks.py index fb2ee26ef6a59748fc8fc60759a29d1f4d45e309..1fd61d9895fec525a764bc20dd669925240d3342 100644 --- a/mlair/model_modules/fully_connected_networks.py +++ b/mlair/model_modules/fully_connected_networks.py @@ -73,7 +73,7 @@ class FCN(AbstractModelClass): _requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov", "l1", "l2"] def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear", - optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, explicite_layers=None, + optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, layer_configuration=None, **kwargs): """ Sets model and loss depending on the given arguments. @@ -90,7 +90,7 @@ class FCN(AbstractModelClass): self.activation = self._set_activation(activation) self.activation_output = self._set_activation(activation_output) self.optimizer = self._set_optimizer(optimizer, **kwargs) - self.layer_configuration = (n_layer, n_hidden) if explicite_layers is None else explicite_layers + self.layer_configuration = (n_layer, n_hidden) if layer_configuration is None else layer_configuration self._update_model_name() self.kernel_initializer = self._initializer.get(activation, "glorot_uniform") self.kernel_regularizer = self._set_regularizer(regularizer, **kwargs)