Skip to content
Snippets Groups Projects

Resolve "custom dense layers in rnn"

Merged Ghost User requested to merge lukas_issue361_feat_custom-dense-layers-in-rnn into develop
1 file
+ 2
2
Compare changes
  • Side-by-side
  • Inline
@@ -139,9 +139,9 @@ class RNN(AbstractModelClass): # pragma: no cover
@@ -139,9 +139,9 @@ class RNN(AbstractModelClass): # pragma: no cover
for layer, n_hidden in enumerate(self.dense_layer_configuration):
for layer, n_hidden in enumerate(self.dense_layer_configuration):
if n_hidden < self._output_shape:
if n_hidden < self._output_shape:
break
break
x_in = keras.layers.Dense(n_hidden, name=f"Dense_{layer + 1}",
x_in = keras.layers.Dense(n_hidden, name=f"Dense_{len(conf) + layer + 1}",
kernel_initializer=self.kernel_initializer, )(x_in)
kernel_initializer=self.kernel_initializer, )(x_in)
x_in = self.activation(name=f"{self.activation_name}_{layer + 1}")(x_in)
x_in = self.activation(name=f"{self.activation_name}_{len(conf) + layer + 1}")(x_in)
if self.dropout is not None:
if self.dropout is not None:
x_in = self.dropout(self.dropout_rate)(x_in)
x_in = self.dropout(self.dropout_rate)(x_in)
Loading