diff --git a/src/model_modules/flatten.py b/src/model_modules/flatten.py
index e2dde4481cb405078dffdff324f5d8157388cd83..39d61f251eea165fd427cb36d95dd5acc712dd03 100644
--- a/src/model_modules/flatten.py
+++ b/src/model_modules/flatten.py
@@ -18,6 +18,8 @@ def get_activation(input_to_activate: keras.layers, activation: Union[Callable,
     :return:
     """
     if isinstance(activation, str):
+        name = kwargs.pop('name', None)
+        kwargs['name'] = f'{name}_{activation}'
         act = keras.layers.Activation(activation, **kwargs)(input_to_activate)
     else:
         act = activation(**kwargs)(input_to_activate)
@@ -63,12 +65,9 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca
     if reduction_filter is None:
         x_in = input_x
     else:
-        x_in = keras.layers.Conv2D(reduction_filter, (1, 1), name=f'{name}_Conv_1x1')(input_x)
+        x_in = keras.layers.Conv2D(reduction_filter, (1, 1), name=f'{name}_Conv_1x1',
+                                   kernel_regularizer=kernel_regularizer)(input_x)
         x_in = get_activation(x_in, activation, name=f'{name}_conv_act')
-        # if isinstance(activation, str):
-        #     x_in = keras.layers.Activation(activation, )
-        # else:
-        #     x_in = activation(name='{}_conv_act'.format(name))(x_in)
 
     x_in = keras.layers.Flatten(name='{}'.format(name))(x_in)
 
@@ -80,10 +79,6 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca
         x_in = keras.layers.Activation('tanh')(x_in)
     else:
         x_in = get_activation(x_in, activation, name=f'{name}_act')
-        # try:
-        #     x_in = activation(name='{}_act'.format(name))(x_in)
-        # except:
-        #     x_in = activation()(x_in)
 
     if dropout_rate is not None:
         x_in = keras.layers.Dropout(dropout_rate, name='{}_Dropout_2'.format(name))(x_in)
diff --git a/test/test_model_modules/test_flatten_tail.py b/test/test_model_modules/test_flatten_tail.py
new file mode 100644
index 0000000000000000000000000000000000000000..0de138ec2323aea3409d5deadfb26c9741b89f50
--- /dev/null
+++ b/test/test_model_modules/test_flatten_tail.py
@@ -0,0 +1,119 @@
+import keras
+import pytest
+from src.model_modules.flatten import flatten_tail, get_activation
+
+
+class TestGetActivation:
+
+    @pytest.fixture()
+    def model_input(self):
+        input_x = keras.layers.Input(shape=(7, 1, 2))
+        return input_x
+
+    def test_string_act(self, model_input):
+        x_in = get_activation(model_input, activation='relu', name='String')
+        act = x_in._keras_history[0]
+        assert act.name == 'String_relu'
+
+    def test_sting_act_unknown(self, model_input):
+        with pytest.raises(ValueError) as einfo:
+            get_activation(model_input, activation='invalid_activation', name='String')
+        assert 'Unknown activation function:invalid_activation' in str(einfo.value)
+
+    def test_layer_act(self, model_input):
+        x_in = get_activation(model_input, activation=keras.layers.advanced_activations.ELU, name='adv_layer')
+        act = x_in._keras_history[0]
+        assert act.name == 'adv_layer'
+
+    def test_layer_act_invalid(self, model_input):
+        with pytest.raises(TypeError) as einfo:
+            get_activation(model_input, activation=keras.layers.Conv2D, name='adv_layer')
+
+
+class TestFlattenTail:
+
+    @pytest.fixture()
+    def model_input(self):
+        input_x = keras.layers.Input(shape=(7, 1, 2))
+        return input_x
+
+    @staticmethod
+    def step_in(element, depth=1):
+        for _ in range(depth):
+            element = element.input._keras_history[0]
+        return element
+
+    def test_flatten_tail_no_bound_no_regul_no_drop(self, model_input):
+        tail = flatten_tail(input_x=model_input, inner_neurons=64, activation=keras.layers.advanced_activations.ELU,
+                            output_neurons=2, output_activation='linear',
+                            reduction_filter=None,
+                            name='Main_tail',
+                            bound_weight=False,
+                            dropout_rate=None,
+                            kernel_regularizer=None)
+        final_act = tail._keras_history[0]
+        assert final_act.name == 'Main_tail_final_act_linear'
+        final_dense = self.step_in(final_act)
+        assert final_act.name == 'Main_tail_final_act_linear'
+        assert final_dense.units == 2
+        assert final_dense.kernel_regularizer is None
+        inner_act = self.step_in(final_dense)
+        assert inner_act.name == 'Main_tail_act'
+        assert inner_act.__class__.__name__ == 'ELU'
+        inner_dense = self.step_in(inner_act)
+        assert inner_dense.name == 'Main_tail_inner_Dense'
+        assert inner_dense.units == 64
+        assert inner_dense.kernel_regularizer is None
+        flatten = self.step_in(inner_dense)
+        assert flatten.name == 'Main_tail'
+        input_layer = self.step_in(flatten)
+        assert input_layer.input_shape == (None, 7, 1, 2)
+
+    def test_flatten_tail_all_settings(self, model_input):
+        tail = flatten_tail(input_x=model_input, inner_neurons=64, activation=keras.layers.advanced_activations.ELU,
+                            output_neurons=3, output_activation='linear',
+                            reduction_filter=32,
+                            name='Main_tail_all',
+                            bound_weight=True,
+                            dropout_rate=.35,
+                            kernel_regularizer=keras.regularizers.l2())
+
+        final_act = tail._keras_history[0]
+        assert final_act.name == 'Main_tail_all_final_act_linear'
+
+        final_dense = self.step_in(final_act)
+        assert final_dense.name == 'Main_tail_all_out_Dense'
+        assert final_dense.units == 3
+        assert isinstance(final_dense.kernel_regularizer, keras.regularizers.L1L2)
+
+        final_dropout = self.step_in(final_dense)
+        assert final_dropout.name == 'Main_tail_all_Dropout_2'
+        assert final_dropout.rate == 0.35
+
+        inner_act = self.step_in(final_dropout)
+        assert inner_act.get_config() == {'name': 'activation_1', 'trainable': True, 'activation': 'tanh'}
+
+        inner_dense = self.step_in(inner_act)
+        assert inner_dense.units == 64
+        assert isinstance(inner_dense.kernel_regularizer, keras.regularizers.L1L2)
+
+        inner_dropout = self.step_in(inner_dense)
+        assert inner_dropout.get_config() == {'name': 'Main_tail_all_Dropout_1', 'trainable': True, 'rate': 0.35,
+                                              'noise_shape': None, 'seed': None}
+
+        flatten = self.step_in(inner_dropout)
+        assert flatten.get_config() == {'name': 'Main_tail_all', 'trainable': True, 'data_format': 'channels_last'}
+
+        reduc_act = self.step_in(flatten)
+        assert reduc_act.get_config() == {'name': 'Main_tail_all_conv_act', 'trainable': True, 'alpha': 1.0}
+
+        reduc_conv = self.step_in(reduc_act)
+
+        assert reduc_conv.kernel_size == (1, 1)
+        assert reduc_conv.name == 'Main_tail_all_Conv_1x1'
+        assert reduc_conv.filters == 32
+        assert isinstance(reduc_conv.kernel_regularizer, keras.regularizers.L1L2)
+
+        input_layer = self.step_in(reduc_conv)
+        assert input_layer.input_shape == (None, 7, 1, 2)
+