From 3ace6bbfe993195f1bdc0f0519661a8809c370dc Mon Sep 17 00:00:00 2001 From: lukas leufen <l.leufen@fz-juelich.de> Date: Wed, 16 Feb 2022 21:24:31 +0100 Subject: [PATCH] enumerate of layer was not proper --- mlair/model_modules/recurrent_networks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mlair/model_modules/recurrent_networks.py b/mlair/model_modules/recurrent_networks.py index 89a4e7ea..e909ae76 100644 --- a/mlair/model_modules/recurrent_networks.py +++ b/mlair/model_modules/recurrent_networks.py @@ -139,9 +139,9 @@ class RNN(AbstractModelClass): # pragma: no cover for layer, n_hidden in enumerate(self.dense_layer_configuration): if n_hidden < self._output_shape: break - x_in = keras.layers.Dense(n_hidden, name=f"Dense_{layer + 1}", + x_in = keras.layers.Dense(n_hidden, name=f"Dense_{len(conf) + layer + 1}", kernel_initializer=self.kernel_initializer, )(x_in) - x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in) + x_in = self.activation(name=f"{self.activation_name}_{len(conf) + layer + 1}")(x_in) if self.dropout is not None: x_in = self.dropout(self.dropout_rate)(x_in) -- GitLab