From 584668a20b42e431a61c3286f80500c094eb6732 Mon Sep 17 00:00:00 2001
From: leufen1 <l.leufen@fz-juelich.de>
Date: Wed, 10 Mar 2021 16:46:15 +0100
Subject: [PATCH] renamed explicite_layer to layer_configuration

---
 mlair/model_modules/fully_connected_networks.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mlair/model_modules/fully_connected_networks.py b/mlair/model_modules/fully_connected_networks.py
index fb2ee26e..1fd61d98 100644
--- a/mlair/model_modules/fully_connected_networks.py
+++ b/mlair/model_modules/fully_connected_networks.py
@@ -73,7 +73,7 @@ class FCN(AbstractModelClass):
     _requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov", "l1", "l2"]
 
     def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear",
-                 optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, explicite_layers=None,
+                 optimizer="adam", n_layer=1, n_hidden=10, regularizer=None, dropout=None, layer_configuration=None,
                  **kwargs):
         """
         Sets model and loss depending on the given arguments.
@@ -90,7 +90,7 @@ class FCN(AbstractModelClass):
         self.activation = self._set_activation(activation)
         self.activation_output = self._set_activation(activation_output)
         self.optimizer = self._set_optimizer(optimizer, **kwargs)
-        self.layer_configuration = (n_layer, n_hidden) if explicite_layers is None else explicite_layers
+        self.layer_configuration = (n_layer, n_hidden) if layer_configuration is None else layer_configuration
         self._update_model_name()
         self.kernel_initializer = self._initializer.get(activation, "glorot_uniform")
         self.kernel_regularizer = self._set_regularizer(regularizer, **kwargs)
-- 
GitLab