Skip to content
Snippets Groups Projects

Resolve "custom dense layers in rnn"

Merged Ghost User requested to merge lukas_issue361_feat_custom-dense-layers-in-rnn into develop
1 file
+ 3
3
Compare changes
  • Side-by-side
  • Inline
@@ -131,7 +131,7 @@ class RNN(AbstractModelClass): # pragma: no cover
if self.add_dense_layer is True:
if len(self.dense_layer_configuration) == 0:
x_in = keras.layers.Dense(min(self._output_shape ** 2, conf[-1]), name=f"Dense_{len(conf) + 1}",
kernel_initializer=self.kernel_initializer, )(x_in)
kernel_initializer=self.kernel_initializer, )(x_in)
x_in = self.activation(name=f"{self.activation_name}_{len(conf) + 1}")(x_in)
if self.dropout is not None:
x_in = self.dropout(self.dropout_rate)(x_in)
@@ -139,8 +139,8 @@ class RNN(AbstractModelClass): # pragma: no cover
for layer, n_hidden in enumerate(self.dense_layer_configuration):
if n_hidden < self._output_shape:
break
x_in = keras.layers.Dense(n_hidden), name=f"Dense_{layer + 1}",
kernel_initializer=self.kernel_initializer, )(x_in)
x_in = keras.layers.Dense(n_hidden, name=f"Dense_{layer + 1}",
kernel_initializer=self.kernel_initializer, )(x_in)
x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in)
if self.dropout is not None:
x_in = self.dropout(self.dropout_rate)(x_in)
Loading