diff --git a/src/model_modules/flatten.py b/src/model_modules/flatten.py
index 39d61f251eea165fd427cb36d95dd5acc712dd03..218b12eddbf3e6a1bf6b986afc5879dbfecd0d72 100644
--- a/src/model_modules/flatten.py
+++ b/src/model_modules/flatten.py
@@ -12,10 +12,17 @@ def get_activation(input_to_activate: keras.layers, activation: Union[Callable,
 
     This helper function is able to handle advanced keras activations as well as strings for standard activations
 
-    :param input_to_activate:
-    :param activation:
+    :param input_to_activate: keras layer to apply activation on
+    :param activation: activation to apply on `input_to_activate'. Can be a standard keras strings or activation layers
     :param kwargs:
     :return:
+
+    .. code-block:: python
+
+        input_x = ... # your input data
+        x_in = keras.layer(<without activation>)(input_x)
+        x_act_string = get_activation(x_in, 'relu')
+        x_act_layer = get_activation(x_in, keras.layers.advanced_activations.ELU)
     """
     if isinstance(activation, str):
         name = kwargs.pop('name', None)
@@ -37,16 +44,16 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca
     """
     Flatten output of convolutional layers
 
-    :param input_x:
-    :param output_neurons:
-    :param output_activation:
-    :param name:
-    :param bound_weight:
-    :param dropout_rate:
-    :param activation:
-    :param reduction_filter:
-    :param inner_neurons:
-    :param kernel_regularizer:
+    :param input_x: Multidimensional keras layer (ConvLayer)
+    :param output_neurons: Number of neurons in the last layer (must fit the shape of labels)
+    :param output_activation: final activation function
+    :param name: Name of the flatten tail.
+    :param bound_weight: Use `tanh' as inner activation if set to True, otherwise `activation'
+    :param dropout_rate: Dropout rate to be applied between trainable layers
+    :param activation: activation to after conv and dense layers
+    :param reduction_filter: number of filters used for information compression on `input_x' before flatten()
+    :param inner_neurons: Number of neurons in inner dense layer
+    :param kernel_regularizer: regularizer to apply on conv and dense layers
 
     :return:
 
@@ -60,6 +67,9 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca
                            name='Main', bound_weight=False, dropout_rate=.3,
                            kernel_regularizer=keras.regularizers.l2()
                            )
+        model = keras.Model(inputs=input_x, outputs=[out])
+
+
     """
     # compression layer
     if reduction_filter is None: