From 1f11c299ca22abea6d95e518c5e09b4e9b170f81 Mon Sep 17 00:00:00 2001
From: lukas leufen <l.leufen@fz-juelich.de>
Date: Wed, 16 Feb 2022 19:01:23 +0100
Subject: [PATCH] corrected wrong brackets

---
 mlair/model_modules/recurrent_networks.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/mlair/model_modules/recurrent_networks.py b/mlair/model_modules/recurrent_networks.py
index 8a53dd26..89a4e7ea 100644
--- a/mlair/model_modules/recurrent_networks.py
+++ b/mlair/model_modules/recurrent_networks.py
@@ -131,7 +131,7 @@ class RNN(AbstractModelClass):  # pragma: no cover
         if self.add_dense_layer is True:
             if len(self.dense_layer_configuration) == 0:
                 x_in = keras.layers.Dense(min(self._output_shape ** 2, conf[-1]), name=f"Dense_{len(conf) + 1}",
-                                        kernel_initializer=self.kernel_initializer, )(x_in)
+                                          kernel_initializer=self.kernel_initializer, )(x_in)
                 x_in = self.activation(name=f"{self.activation_name}_{len(conf) + 1}")(x_in)
                 if self.dropout is not None:
                     x_in = self.dropout(self.dropout_rate)(x_in)
@@ -139,8 +139,8 @@ class RNN(AbstractModelClass):  # pragma: no cover
                 for layer, n_hidden in enumerate(self.dense_layer_configuration):
                     if n_hidden < self._output_shape:
                         break
-                    x_in = keras.layers.Dense(n_hidden), name=f"Dense_{layer + 1}",
-                                            kernel_initializer=self.kernel_initializer, )(x_in)
+                    x_in = keras.layers.Dense(n_hidden, name=f"Dense_{layer + 1}",
+                                              kernel_initializer=self.kernel_initializer, )(x_in)
                     x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in)
                     if self.dropout is not None:
                         x_in = self.dropout(self.dropout_rate)(x_in)
-- 
GitLab