Skip to content
Snippets Groups Projects

Resolve "release v1.4.0"

Merged Ghost User requested to merge release_v1.4.0 into master
1 file
+ 2
2
Compare changes
  • Side-by-side
  • Inline
@@ -73,7 +73,7 @@ class FCN(AbstractModelClass):
_requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov", "l1", "l2"]
def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear",
optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, explicite_layers=None,
optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, layer_configuration=None,
**kwargs):
"""
Sets model and loss depending on the given arguments.
@@ -90,7 +90,7 @@ class FCN(AbstractModelClass):
self.activation = self._set_activation(activation)
self.activation_output = self._set_activation(activation_output)
self.optimizer = self._set_optimizer(optimizer, **kwargs)
self.layer_configuration = (n_layer, n_hidden) if explicite_layers is None else explicite_layers
self.layer_configuration = (n_layer, n_hidden) if layer_configuration is None else layer_configuration
self._update_model_name()
self.kernel_initializer = self._initializer.get(activation, "glorot_uniform")
self.kernel_regularizer = self._set_regularizer(regularizer, **kwargs)
Loading