diff --git a/test/test_model_modules/test_inception_model.py b/test/test_model_modules/test_inception_model.py
index 01a58399affe56e8afad0b0c8d1da71506259522..fc1c6bb6aebe5bbd6d365a855fdc4ef872ba6655 100644
--- a/test/test_model_modules/test_inception_model.py
+++ b/test/test_model_modules/test_inception_model.py
@@ -2,37 +2,45 @@ import keras
 import pytest
 
 from src.model_modules.inception_model import InceptionModelBase
+from src.model_modules.advanced_paddings import ReflectionPadding2D, SymmetricPadding2D
 
 
 class TestInceptionModelBase:
 
     @pytest.fixture
     def base(self):
+        # import keras
         return InceptionModelBase()
 
     @pytest.fixture
     def input_x(self):
+        # import keras
         return keras.Input(shape=(32, 32, 3))
 
     @staticmethod
     def step_in(element, depth=1):
+        # import keras
         for _ in range(depth):
             element = element.input._keras_history[0]
         return element
 
     def test_init(self, base):
+        # import keras
         assert base.number_of_blocks == 0
         assert base.part_of_block == 0
         assert base.ord_base == 96
         assert base.act_number == 0
 
     def test_block_part_name(self, base):
+        # import keras
         assert base.block_part_name() == chr(96)
         base.part_of_block += 1
         assert base.block_part_name() == 'a'
 
     def test_create_conv_tower_3x3(self, base, input_x):
-        opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
+        # import keras
+        opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3),
+                'padding': 'SymPad2D'}
         tower = base.create_conv_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
@@ -48,8 +56,13 @@ class TestInceptionModelBase:
         assert conv_layer.kernel_size == (3, 3)
         assert conv_layer.strides == (1, 1)
         assert conv_layer.name == "Block_0a_3x3"
+        # check previous element of tower (padding)
+        pad_layer = self.step_in(conv_layer)
+        assert isinstance(pad_layer, SymmetricPadding2D)
+        assert pad_layer.padding == ((1, 1), (1, 1))
+        assert pad_layer.name == 'Block_0a_Pad'
         # check previous element of tower (activation)
-        act_layer2 = self.step_in(conv_layer)
+        act_layer2 = self.step_in(pad_layer)
         assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
         assert act_layer2.name == "Block_0a_act_1"
         # check previous element of tower (conv2D)
@@ -57,11 +70,12 @@ class TestInceptionModelBase:
         assert isinstance(conv_layer2, keras.layers.Conv2D)
         assert conv_layer2.filters == 64
         assert conv_layer2.kernel_size == (1, 1)
-        assert conv_layer2.padding == 'same'
+        assert conv_layer2.padding == 'valid'
         assert conv_layer2.name == 'Block_0a_1x1'
         assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
 
     def test_create_conv_tower_3x3_activation(self, base, input_x):
+        # import keras
         opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
         # create tower with standard activation function
         tower = base.create_conv_tower(activation='tanh', **opts)
@@ -77,6 +91,7 @@ class TestInceptionModelBase:
         assert act_layer.name == "Block_0b_act_2"
 
     def test_create_conv_tower_1x1(self, base, input_x):
+        # import keras
         opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (1, 1)}
         tower = base.create_conv_tower(**opts)
         # check last element of tower (activation)
@@ -96,6 +111,7 @@ class TestInceptionModelBase:
         assert conv_layer.input._keras_shape == (None, 32, 32, 3)
 
     def test_create_conv_towers(self, base, input_x):
+        # import keras
         opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
         _ = base.create_conv_tower(**opts)
         tower = base.create_conv_tower(**opts)
@@ -103,10 +119,12 @@ class TestInceptionModelBase:
         assert tower.name == 'Block_0b_act_2_1/Relu:0'
 
     def test_create_pool_tower(self, base, input_x):
+        # import keras
         opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
         tower = base.create_pool_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
+        # assert tower.name == 'Block_0a_act_1/Relu:0'
         assert tower.name == 'Block_0a_act_1_3/Relu:0'
         act_layer = tower._keras_history[0]
         assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
@@ -124,7 +142,12 @@ class TestInceptionModelBase:
         assert isinstance(pool_layer, keras.layers.pooling.MaxPooling2D)
         assert pool_layer.name == "Block_0a_MaxPool"
         assert pool_layer.pool_size == (3, 3)
-        assert pool_layer.padding == 'same'
+        assert pool_layer.padding == 'valid'
+        # check previous element of tower(padding)
+        pad_layer = self.step_in(pool_layer)
+        assert isinstance(pad_layer, keras.layers.convolutional.ZeroPadding2D)
+        assert pad_layer.name == "Block_0a_Pad"
+        assert pad_layer.padding == ((1, 1), (1, 1))
         # check avg pool tower
         opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
         tower = base.create_pool_tower(max_pooling=False, **opts)
@@ -132,12 +155,20 @@ class TestInceptionModelBase:
         assert isinstance(pool_layer, keras.layers.pooling.AveragePooling2D)
         assert pool_layer.name == "Block_0b_AvgPool"
         assert pool_layer.pool_size == (3, 3)
-        assert pool_layer.padding == 'same'
+        assert pool_layer.padding == 'valid'
 
     def test_inception_block(self, base, input_x):
-        conv = {'tower_1': {'reduction_filter': 64, 'tower_kernel': (3, 3), 'tower_filter': 64},
-                'tower_2': {'reduction_filter': 64, 'tower_kernel': (5, 5), 'tower_filter': 64, 'activation': 'tanh'}}
-        pool = {'pool_kernel': (3, 3), 'tower_filter': 64}
+        # import keras
+        conv = {'tower_1': {'reduction_filter': 64,
+                            'tower_kernel': (3, 3),
+                            'tower_filter': 64, },
+                'tower_2': {'reduction_filter': 64,
+                            'tower_kernel': (5, 5),
+                            'tower_filter': 64,
+                            'activation': 'tanh',
+                            'padding': 'SymPad2D', },
+                }
+        pool = {'pool_kernel': (3, 3), 'tower_filter': 64, 'padding': ReflectionPadding2D}
         opts = {'input_x': input_x, 'tower_conv_parts': conv, 'tower_pool_parts': pool}
         block = base.inception_block(**opts)
         assert base.number_of_blocks == 1
@@ -150,8 +181,19 @@ class TestInceptionModelBase:
         assert block_pool2.name == 'Block_1d_act_1/Relu:0'
         assert self.step_in(block_1a._keras_history[0]).name == "Block_1a_3x3"
         assert self.step_in(block_1b._keras_history[0]).name == "Block_1b_5x5"
+        assert self.step_in(block_1a._keras_history[0], depth=2).name == 'Block_1a_Pad'
+        assert isinstance(self.step_in(block_1a._keras_history[0], depth=2), keras.layers.ZeroPadding2D)
+        assert self.step_in(block_1b._keras_history[0], depth=2).name == 'Block_1b_Pad'
+        assert isinstance(self.step_in(block_1b._keras_history[0], depth=2), SymmetricPadding2D)
+        # pooling
         assert isinstance(self.step_in(block_pool1._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
+        assert self.step_in(block_pool1._keras_history[0], depth=3).name == 'Block_1c_Pad'
+        assert isinstance(self.step_in(block_pool1._keras_history[0], depth=3), ReflectionPadding2D)
+
         assert isinstance(self.step_in(block_pool2._keras_history[0], depth=2), keras.layers.pooling.AveragePooling2D)
+        assert self.step_in(block_pool2._keras_history[0], depth=3).name == 'Block_1d_Pad'
+        assert isinstance(self.step_in(block_pool2._keras_history[0], depth=3), ReflectionPadding2D)
+
         # next block
         opts['input_x'] = block
         opts['tower_pool_parts']['max_pooling'] = True
@@ -163,22 +205,71 @@ class TestInceptionModelBase:
         assert block_2a.name == 'Block_2a_act_2/Relu:0'
         assert block_2b.name == 'Block_2b_act_2_tanh/Tanh:0'
         assert block_pool.name == 'Block_2c_act_1/Relu:0'
+        # block 2a
         assert self.step_in(block_2a._keras_history[0]).name == "Block_2a_3x3"
+        assert self.step_in(block_2a._keras_history[0], depth=2).name == "Block_2a_Pad"
+        assert isinstance(self.step_in(block_2a._keras_history[0], depth=2), keras.layers.ZeroPadding2D)
+        # block 2b
         assert self.step_in(block_2b._keras_history[0]).name == "Block_2b_5x5"
+        assert self.step_in(block_2b._keras_history[0], depth=2).name == "Block_2b_Pad"
+        assert isinstance(self.step_in(block_2b._keras_history[0], depth=2), SymmetricPadding2D)
+        # block pool
         assert isinstance(self.step_in(block_pool._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
+        assert self.step_in(block_pool._keras_history[0], depth=3).name == 'Block_2c_Pad'
+        assert isinstance(self.step_in(block_pool._keras_history[0], depth=3), ReflectionPadding2D)
 
     def test_batch_normalisation(self, base, input_x):
+        # import keras
         base.part_of_block += 1
         bn = base.batch_normalisation(input_x)._keras_history[0]
         assert isinstance(bn, keras.layers.normalization.BatchNormalization)
         assert bn.name == "Block_0a_BN"
 
     def test_padding_layer_zero_padding(self, base, input_x):
-        base.part_of_block += 2
+        # import keras
         padding_size = ((1, 1), (0, 0))
         zp = base.padding_layer('ZeroPad2D')
+        assert zp == keras.layers.convolutional.ZeroPadding2D
+        assert base.padding_layer('ZeroPadding2D') == keras.layers.convolutional.ZeroPadding2D
+        assert base.padding_layer(keras.layers.ZeroPadding2D) == keras.layers.convolutional.ZeroPadding2D
         assert zp.__name__ == 'ZeroPadding2D'
         zp_ap = zp(padding=padding_size)(input_x)
         assert zp_ap._keras_history[0].padding == ((1, 1), (0, 0))
-        print('abc')
+
+    def test_padding_layer_sym_padding(self, base, input_x):
+        # import keras
+        padding_size = ((1, 1), (0, 0))
+        zp = base.padding_layer('SymPad2D')
+        assert zp == SymmetricPadding2D
+        assert base.padding_layer('SymmetricPadding2D') == SymmetricPadding2D
+        assert base.padding_layer(SymmetricPadding2D) == SymmetricPadding2D
+        assert zp.__name__ == 'SymmetricPadding2D'
+        zp_ap = zp(padding=padding_size)(input_x)
+        assert zp_ap._keras_history[0].padding == ((1, 1), (0, 0))
+
+    def test_padding_layer_ref_padding(self, base, input_x):
+        # import keras
+        padding_size = ((1, 1), (0, 0))
+        zp = base.padding_layer('RefPad2D')
+        assert zp == ReflectionPadding2D
+        assert base.padding_layer('ReflectionPadding2D') == ReflectionPadding2D
+        assert base.padding_layer(ReflectionPadding2D) == ReflectionPadding2D
+        assert zp.__name__ == 'ReflectionPadding2D'
+        zp_ap = zp(padding=padding_size)(input_x)
+        assert zp_ap._keras_history[0].padding == ((1, 1), (0, 0))
+
+    def test_padding_layer_raises(self, base, input_x):
+        # import keras
+        with pytest.raises(NotImplementedError) as einfo:
+            base.padding_layer('FalsePadding2D')
+        assert "`'FalsePadding2D'' is not implemented as padding. " \
+               "Use one of those: i) `RefPad2D', ii) `SymPad2D', iii) `ZeroPad2D'" in str(einfo.value)
+        with pytest.raises(TypeError) as einfo:
+            base.padding_layer(keras.layers.Conv2D)
+        assert "`Conv2D' is not a valid padding layer type. Use one of those: "\
+               "i) ReflectionPadding2D, ii) SymmetricPadding2D, iii) ZeroPadding2D" in str(einfo.value)
+
+
+
+