Resolve "release v1.4.0"
Compare changes
- leufen1 authored
@@ -31,7 +31,7 @@ class RNN(AbstractModelClass):
@@ -31,7 +31,7 @@ class RNN(AbstractModelClass):
def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear",
@@ -42,8 +42,10 @@ class RNN(AbstractModelClass):
@@ -42,8 +42,10 @@ class RNN(AbstractModelClass):
:param activation: set your desired activation function. Chose from relu, tanh, sigmoid, linear, selu, prelu,
:param activation_output: same as activation parameter but exclusively applied on output layer only. (Default
@@ -55,6 +57,8 @@ class RNN(AbstractModelClass):
@@ -55,6 +57,8 @@ class RNN(AbstractModelClass):
:param dropout: use dropout with given rate. If no value is provided, dropout layers are not added to the
:param batch_normalization: use batch normalization layer in the network if enabled. These layers are inserted
between the linear part of a layer (the nn part) and the non-linear part (activation function). No BN layer
@@ -82,6 +86,8 @@ class RNN(AbstractModelClass):
@@ -82,6 +86,8 @@ class RNN(AbstractModelClass):
@@ -105,7 +111,7 @@ class RNN(AbstractModelClass):
@@ -105,7 +111,7 @@ class RNN(AbstractModelClass):