diff --git a/src/model_modules/flatten.py b/src/model_modules/flatten.py
index 218b12eddbf3e6a1bf6b986afc5879dbfecd0d72..dd1e8e21eeb96f75372add0208b03dc06f5dc25c 100644
--- a/src/model_modules/flatten.py
+++ b/src/model_modules/flatten.py
@@ -10,19 +10,24 @@ def get_activation(input_to_activate: keras.layers, activation: Union[Callable,
     """
     Apply activation on a given input layer.
 
-    This helper function is able to handle advanced keras activations as well as strings for standard activations
+    This helper function is able to handle advanced keras activations as well as strings for standard activations.
 
     :param input_to_activate: keras layer to apply activation on
     :param activation: activation to apply on `input_to_activate'. Can be a standard keras strings or activation layers
-    :param kwargs:
-    :return:
+    :param kwargs: keyword arguments used inside activation layer
+
+    :return: activation
 
     .. code-block:: python
 
         input_x = ... # your input data
         x_in = keras.layer(<without activation>)(input_x)
+
+        # get activation via string
         x_act_string = get_activation(x_in, 'relu')
+        # or get activation via layer callable
         x_act_layer = get_activation(x_in, keras.layers.advanced_activations.ELU)
+
     """
     if isinstance(activation, str):
         name = kwargs.pop('name', None)
@@ -42,7 +47,7 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca
                  kernel_regularizer: keras.regularizers = None
                  ):
     """
-    Flatten output of convolutional layers
+    Flatten output of convolutional layers.
 
     :param input_x: Multidimensional keras layer (ConvLayer)
     :param output_neurons: Number of neurons in the last layer (must fit the shape of labels)
@@ -55,12 +60,12 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca
     :param inner_neurons: Number of neurons in inner dense layer
     :param kernel_regularizer: regularizer to apply on conv and dense layers
 
-    :return:
+    :return: flatten branch with size n=output_neurons
 
     .. code-block:: python
 
         input_x = ... # your input data
-        conv_out = Conv2D(*args)(input_x) # your convolutional stack
+        conv_out = Conv2D(*args)(input_x) # your convolution stack
         out = flatten_tail(conv_out, inner_neurons=64, activation=keras.layers.advanced_activations.ELU,
                            output_neurons=4
                            output_activation='linear', reduction_filter=64,
@@ -69,7 +74,6 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca
                            )
         model = keras.Model(inputs=input_x, outputs=[out])
 
-
     """
     # compression layer
     if reduction_filter is None: