diff --git a/src/model_modules/inception_model.py b/src/model_modules/inception_model.py
index 1daddd84ede9131718e4ba4354fba5157270f2dc..1cb7656335495f0261abb434e4a203cb4e63887e 100644
--- a/src/model_modules/inception_model.py
+++ b/src/model_modules/inception_model.py
@@ -5,6 +5,7 @@ import logging
 
 import keras
 import keras.layers as layers
+from src.model_modules.advanced_paddings import PadUtils, ReflectionPadding2D, SymmetricPadding2D
 
 
 class InceptionModelBase:
@@ -53,33 +54,39 @@ class InceptionModelBase:
         regularizer = kwargs.get('regularizer', keras.regularizers.l2(0.01))
         bn_settings = kwargs.get('bn_settings', {})
         act_settings = kwargs.get('act_settings', {})
+        padding = kwargs.get('padding', 'ZeroPad2D')
         logging.debug(f'Inception Block with activation: {activation}')
 
         block_name = f'Block_{self.number_of_blocks}{self.block_part_name()}_{tower_kernel[0]}x{tower_kernel[1]}'
+        padding_size = PadUtils.get_padding_for_same(tower_kernel)
 
         if tower_kernel == (1, 1):
             tower = layers.Conv2D(tower_filter,
                                   tower_kernel,
-                                  padding='same',
+                                  padding='valid',
                                   kernel_regularizer=regularizer,
                                   name=block_name)(input_x)
-            tower = self.act(tower, activation, **act_settings)
+            # tower = self.act(tower, activation, **act_settings)
         else:
             tower = layers.Conv2D(reduction_filter,
                                   (1, 1),
-                                  padding='same',
+                                  padding='valid',
                                   kernel_regularizer=regularizer,
                                   name=f'Block_{self.number_of_blocks}{self.block_part_name()}_1x1')(input_x)
             tower = self.act(tower, activation, **act_settings)
 
+            tower = self.padding_layer(padding)(padding=padding_size,
+                                                name=f'Block_{self.number_of_blocks}{self.block_part_name()}_Pad'
+                                                )(tower)
+
             tower = layers.Conv2D(tower_filter,
                                   tower_kernel,
-                                  padding='same',
+                                  padding='valid',
                                   kernel_regularizer=regularizer,
                                   name=block_name)(tower)
-            if batch_normalisation:
-                tower = self.batch_normalisation(tower, **bn_settings)
-            tower = self.act(tower, activation, **act_settings)
+        if batch_normalisation:
+            tower = self.batch_normalisation(tower, **bn_settings)
+        tower = self.act(tower, activation, **act_settings)
 
         return tower
 
@@ -101,6 +108,29 @@ class InceptionModelBase:
         else:
             return act_name.__name__
 
+    @staticmethod
+    def padding_layer(padding):
+        allowed_paddings = {
+            'RefPad2D': ReflectionPadding2D, 'ReflectionPadding2D': ReflectionPadding2D,
+            'SymPad2D': SymmetricPadding2D, 'SymmetricPadding2D': SymmetricPadding2D,
+            'ZeroPad2D': keras.layers.ZeroPadding2D, 'ZeroPadding2D': keras.layers.ZeroPadding2D
+        }
+        if isinstance(padding, str):
+            try:
+                pad2d = allowed_paddings[padding]
+            except KeyError as einfo:
+                raise NotImplementedError(
+                    f"`{einfo}' is not implemented as padding. " 
+                    "Use one of those: i) `RefPad2D', ii) `SymPad2D', iii) `ZeroPad2D'")
+        else:
+            if padding in allowed_paddings.values():
+                pad2d = padding
+            else:
+                raise TypeError(f"`{padding.__name__}' is not a valid padding layer type. "
+                                "Use one of those: "
+                                "i) ReflectionPadding2D, ii) SymmetricPadding2D, iii) ZeroPadding2D")
+        return pad2d
+
     def create_pool_tower(self, input_x, pool_kernel, tower_filter, activation='relu', max_pooling=True, **kwargs):
         """
         This function creates a "MaxPooling tower block"
@@ -114,6 +144,8 @@ class InceptionModelBase:
         self.part_of_block += 1
         self.act_number = 1
         act_settings = kwargs.get('act_settings', {})
+        padding = kwargs.get('padding', 'ZeroPad2D')
+        padding_size = PadUtils.get_padding_for_same(kernel_size=pool_kernel)
 
         # pooling block
         block_name = f"Block_{self.number_of_blocks}{self.block_part_name()}_"
@@ -123,10 +155,12 @@ class InceptionModelBase:
         else:
             block_type = "AvgPool"
             pooling = layers.AveragePooling2D
-        tower = pooling(pool_kernel, strides=(1, 1), padding='same', name=block_name+block_type)(input_x)
+
+        tower = self.padding_layer(padding)(padding=padding_size, name=block_name+'Pad')(input_x)
+        tower = pooling(pool_kernel, strides=(1, 1), padding='valid', name=block_name+block_type)(tower)
 
         # convolution block
-        tower = layers.Conv2D(tower_filter, (1, 1), padding='same', name=block_name+"1x1")(tower)
+        tower = layers.Conv2D(tower_filter, (1, 1), padding='valid', name=block_name+"1x1")(tower)
         tower = self.act(tower, activation, **act_settings)
 
         return tower
@@ -138,16 +172,22 @@ class InceptionModelBase:
         :param tower_conv_parts: dict containing settings for parts of inception block; Example:
                                  tower_conv_parts = {'tower_1': {'reduction_filter': 32,
                                                                  'tower_filter': 64,
-                                                                 'tower_kernel': (3, 1)},
+                                                                 'tower_kernel': (3, 1),
+                                                                 'activation' : 'relu',
+                                                                 'padding' : 'SymPad2D'}
                                                      'tower_2': {'reduction_filter': 32,
                                                                  'tower_filter': 64,
-                                                                 'tower_kernel': (5, 1)},
+                                                                 'tower_kernel': (5, 1),
+                                                                 'activation' : LeakyReLU,
+                                                                 'padding' : keras.layers.ZeroPadding2D}
                                                      'tower_3': {'reduction_filter': 32,
                                                                  'tower_filter': 64,
-                                                                 'tower_kernel': (1, 1)},
+                                                                 'tower_kernel': (1, 1),
+                                                                 'activation' : ELU,
+                                                                 'padding' : src.model_modules.advanced_paddings.ReflectionPadding2D}
                                                     }
         :param tower_pool_parts: dict containing settings for pool part of inception block; Example:
-                                 tower_pool_parts = {'pool_kernel': (3, 1), 'tower_filter': 64}
+                                 tower_pool_parts = {'pool_kernel': (3, 1), 'tower_filter': 64, 'padding': 'RefPad2D'}
         :return:
         """
         self.number_of_blocks += 1
@@ -171,12 +211,41 @@ class InceptionModelBase:
         return block
 
 
+# if __name__ == '__main__':
+#     from keras.models import Model
+#     from keras.layers import Conv2D, Flatten, Dense, Input
+#     import numpy as np
+#
+#
+#     kernel_1 = (3, 3)
+#     kernel_2 = (5, 5)
+#     x = np.array(range(2000)).reshape(-1, 10, 10, 1)
+#     y = x.mean(axis=(1, 2))
+#
+#     x_input = Input(shape=x.shape[1:])
+#     pad1 = PadUtils.get_padding_for_same(kernel_size=kernel_1)
+#     x_out = InceptionModelBase.padding_layer('RefPad2D')(padding=pad1, name="RefPAD1")(x_input)
+#     # x_out = ReflectionPadding2D(padding=pad1, name="RefPAD")(x_input)
+#     x_out = Conv2D(5, kernel_size=kernel_1, activation='relu')(x_out)
+#
+#     pad2 = PadUtils.get_padding_for_same(kernel_size=kernel_2)
+#     x_out = InceptionModelBase.padding_layer(SymmetricPadding2D)(padding=pad2, name="SymPAD1")(x_out)
+#     # x_out = SymmetricPadding2D(padding=pad2, name="SymPAD")(x_out)
+#     x_out = Conv2D(2, kernel_size=kernel_2, activation='relu')(x_out)
+#     x_out = Flatten()(x_out)
+#     x_out = Dense(1, activation='linear')(x_out)
+#
+#     model = Model(inputs=x_input, outputs=x_out)
+#     model.compile('adam', loss='mse')
+#     model.summary()
+#     # model.fit(x, y, epochs=10)
+
 if __name__ == '__main__':
     print(__name__)
     from keras.datasets import cifar10
     from keras.utils import np_utils
     from keras.layers import Input
-    from keras.layers.advanced_activations import LeakyReLU
+    from keras.layers.advanced_activations import LeakyReLU, ELU
     from keras.optimizers import SGD
     from keras.layers import Dense, Flatten, Conv2D, MaxPooling2D
     from keras.models import Model
@@ -185,11 +254,17 @@ if __name__ == '__main__':
     conv_settings_dict = {'tower_1': {'reduction_filter': 64,
                                       'tower_filter': 64,
                                       'tower_kernel': (3, 3),
-                                      'activation': LeakyReLU},
+                                      'activation': LeakyReLU,},
                           'tower_2': {'reduction_filter': 64,
                                       'tower_filter': 64,
                                       'tower_kernel': (5, 5),
-                                      'activation': 'relu'}
+                                      'activation': 'relu',
+                                      'padding': 'SymPad2D'},
+                          'tower_3': {'reduction_filter': 64,
+                                      'tower_filter': 64,
+                                      'tower_kernel': (1, 1),
+                                      'activation': ELU,
+                                      'padding': ReflectionPadding2D}
                           }
     pool_settings_dict = {'pool_kernel': (3, 3),
                           'tower_filter': 64,
@@ -207,17 +282,21 @@ if __name__ == '__main__':
 
     # create inception net
     inception_net = InceptionModelBase()
-    output = inception_net.inception_block(input_img, conv_settings_dict, pool_settings_dict)
+    output = inception_net.inception_block(input_img, conv_settings_dict, pool_settings_dict, batch_normalisation=True)
     output = Flatten()(output)
     output = Dense(10, activation='softmax')(output)
     model = Model(inputs=input_img, outputs=output)
     print(model.summary())
 
     # compile
-    epochs = 10
+    epochs = 1
     lrate = 0.01
     decay = lrate/epochs
     sgd = SGD(lr=lrate, momentum=0.9, decay=decay, nesterov=False)
     model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
     print(X_train.shape)
     keras.utils.plot_model(model, to_file='model.pdf', show_shapes=True, show_layer_names=True)
+    # model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test))
+    print('test')
+
+
diff --git a/test/test_model_modules/test_inception_model.py b/test/test_model_modules/test_inception_model.py
index f9d30ea7740cbb0f07497e9898ae3d4f449216ac..9dee30788c34cd8d1a7572947ea2e568ac2006b7 100644
--- a/test/test_model_modules/test_inception_model.py
+++ b/test/test_model_modules/test_inception_model.py
@@ -1,8 +1,8 @@
 import keras
 import pytest
-import re
 
 from src.model_modules.inception_model import InceptionModelBase
+from src.model_modules.advanced_paddings import ReflectionPadding2D, SymmetricPadding2D
 from src.helpers import PyTestRegex
 
 
@@ -34,7 +34,8 @@ class TestInceptionModelBase:
         assert base.block_part_name() == 'a'
 
     def test_create_conv_tower_3x3(self, base, input_x):
-        opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
+        opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3),
+                'padding': 'SymPad2D'}
         tower = base.create_conv_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
@@ -46,12 +47,17 @@ class TestInceptionModelBase:
         conv_layer = self.step_in(act_layer)
         assert isinstance(conv_layer, keras.layers.Conv2D)
         assert conv_layer.filters == 32
-        assert conv_layer.padding == 'same'
+        assert conv_layer.padding == 'valid'
         assert conv_layer.kernel_size == (3, 3)
         assert conv_layer.strides == (1, 1)
         assert conv_layer.name == "Block_0a_3x3"
+        # check previous element of tower (padding)
+        pad_layer = self.step_in(conv_layer)
+        assert isinstance(pad_layer, SymmetricPadding2D)
+        assert pad_layer.padding == ((1, 1), (1, 1))
+        assert pad_layer.name == 'Block_0a_Pad'
         # check previous element of tower (activation)
-        act_layer2 = self.step_in(conv_layer)
+        act_layer2 = self.step_in(pad_layer)
         assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
         assert act_layer2.name == "Block_0a_act_1"
         # check previous element of tower (conv2D)
@@ -59,7 +65,49 @@ class TestInceptionModelBase:
         assert isinstance(conv_layer2, keras.layers.Conv2D)
         assert conv_layer2.filters == 64
         assert conv_layer2.kernel_size == (1, 1)
-        assert conv_layer2.padding == 'same'
+        assert conv_layer2.padding == 'valid'
+        assert conv_layer2.name == 'Block_0a_1x1'
+        assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
+
+    def test_create_conv_tower_3x3_batch_norm(self, base, input_x):
+        # import keras
+        opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3),
+                'padding': 'SymPad2D', 'batch_normalisation': True}
+        tower = base.create_conv_tower(**opts)
+        # check last element of tower (activation)
+        assert base.part_of_block == 1
+        # assert tower.name == 'Block_0a_act_2/Relu:0'
+        assert tower.name == 'Block_0a_act_2_1/Relu:0'
+        act_layer = tower._keras_history[0]
+        assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
+        assert act_layer.name == "Block_0a_act_2"
+        # check previous element of tower (batch_normal)
+        batch_layer = self.step_in(act_layer)
+        assert isinstance(batch_layer, keras.layers.BatchNormalization)
+        assert batch_layer.name == 'Block_0a_BN'
+        # check previous element of tower (conv2D)
+        conv_layer = self.step_in(batch_layer)
+        assert isinstance(conv_layer, keras.layers.Conv2D)
+        assert conv_layer.filters == 32
+        assert conv_layer.padding == 'valid'
+        assert conv_layer.kernel_size == (3, 3)
+        assert conv_layer.strides == (1, 1)
+        assert conv_layer.name == "Block_0a_3x3"
+        # check previous element of tower (padding)
+        pad_layer = self.step_in(conv_layer)
+        assert isinstance(pad_layer, SymmetricPadding2D)
+        assert pad_layer.padding == ((1, 1), (1, 1))
+        assert pad_layer.name == 'Block_0a_Pad'
+        # check previous element of tower (activation)
+        act_layer2 = self.step_in(pad_layer)
+        assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
+        assert act_layer2.name == "Block_0a_act_1"
+        # check previous element of tower (conv2D)
+        conv_layer2 = self.step_in(act_layer2)
+        assert isinstance(conv_layer2, keras.layers.Conv2D)
+        assert conv_layer2.filters == 64
+        assert conv_layer2.kernel_size == (1, 1)
+        assert conv_layer2.padding == 'valid'
         assert conv_layer2.name == 'Block_0a_1x1'
         assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
 
@@ -83,7 +131,7 @@ class TestInceptionModelBase:
         tower = base.create_conv_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
-        assert tower.name == 'Block_0a_act_1_1/Relu:0'
+        assert tower.name == 'Block_0a_act_1_2/Relu:0'
         act_layer = tower._keras_history[0]
         assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
         assert act_layer.name == "Block_0a_act_1"
@@ -91,7 +139,7 @@ class TestInceptionModelBase:
         conv_layer = self.step_in(act_layer)
         assert isinstance(conv_layer, keras.layers.Conv2D)
         assert conv_layer.filters == 32
-        assert conv_layer.padding == 'same'
+        assert conv_layer.padding == 'valid'
         assert conv_layer.kernel_size == (1, 1)
         assert conv_layer.strides == (1, 1)
         assert conv_layer.name == "Block_0a_1x1"
@@ -109,7 +157,7 @@ class TestInceptionModelBase:
         tower = base.create_pool_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
-        assert tower.name == 'Block_0a_act_1_3/Relu:0'
+        assert tower.name == 'Block_0a_act_1_4/Relu:0'
         act_layer = tower._keras_history[0]
         assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
         assert act_layer.name == "Block_0a_act_1"
@@ -117,7 +165,7 @@ class TestInceptionModelBase:
         conv_layer = self.step_in(act_layer)
         assert isinstance(conv_layer, keras.layers.Conv2D)
         assert conv_layer.filters == 32
-        assert conv_layer.padding == 'same'
+        assert conv_layer.padding == 'valid'
         assert conv_layer.kernel_size == (1, 1)
         assert conv_layer.strides == (1, 1)
         assert conv_layer.name == "Block_0a_1x1"
@@ -126,7 +174,12 @@ class TestInceptionModelBase:
         assert isinstance(pool_layer, keras.layers.pooling.MaxPooling2D)
         assert pool_layer.name == "Block_0a_MaxPool"
         assert pool_layer.pool_size == (3, 3)
-        assert pool_layer.padding == 'same'
+        assert pool_layer.padding == 'valid'
+        # check previous element of tower(padding)
+        pad_layer = self.step_in(pool_layer)
+        assert isinstance(pad_layer, keras.layers.convolutional.ZeroPadding2D)
+        assert pad_layer.name == "Block_0a_Pad"
+        assert pad_layer.padding == ((1, 1), (1, 1))
         # check avg pool tower
         opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
         tower = base.create_pool_tower(max_pooling=False, **opts)
@@ -134,12 +187,19 @@ class TestInceptionModelBase:
         assert isinstance(pool_layer, keras.layers.pooling.AveragePooling2D)
         assert pool_layer.name == "Block_0b_AvgPool"
         assert pool_layer.pool_size == (3, 3)
-        assert pool_layer.padding == 'same'
+        assert pool_layer.padding == 'valid'
 
     def test_inception_block(self, base, input_x):
-        conv = {'tower_1': {'reduction_filter': 64, 'tower_kernel': (3, 3), 'tower_filter': 64},
-                'tower_2': {'reduction_filter': 64, 'tower_kernel': (5, 5), 'tower_filter': 64, 'activation': 'tanh'}}
-        pool = {'pool_kernel': (3, 3), 'tower_filter': 64}
+        conv = {'tower_1': {'reduction_filter': 64,
+                            'tower_kernel': (3, 3),
+                            'tower_filter': 64, },
+                'tower_2': {'reduction_filter': 64,
+                            'tower_kernel': (5, 5),
+                            'tower_filter': 64,
+                            'activation': 'tanh',
+                            'padding': 'SymPad2D', },
+                }
+        pool = {'pool_kernel': (3, 3), 'tower_filter': 64, 'padding': ReflectionPadding2D}
         opts = {'input_x': input_x, 'tower_conv_parts': conv, 'tower_pool_parts': pool}
         block = base.inception_block(**opts)
         assert base.number_of_blocks == 1
@@ -153,8 +213,18 @@ class TestInceptionModelBase:
         assert block_pool2.name == PyTestRegex(r'Block_1d_act_1(_\d*)?/Relu:0')
         assert self.step_in(block_1a._keras_history[0]).name == "Block_1a_3x3"
         assert self.step_in(block_1b._keras_history[0]).name == "Block_1b_5x5"
+        assert self.step_in(block_1a._keras_history[0], depth=2).name == 'Block_1a_Pad'
+        assert isinstance(self.step_in(block_1a._keras_history[0], depth=2), keras.layers.ZeroPadding2D)
+        assert self.step_in(block_1b._keras_history[0], depth=2).name == 'Block_1b_Pad'
+        assert isinstance(self.step_in(block_1b._keras_history[0], depth=2), SymmetricPadding2D)
+        # pooling
         assert isinstance(self.step_in(block_pool1._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
+        assert self.step_in(block_pool1._keras_history[0], depth=3).name == 'Block_1c_Pad'
+        assert isinstance(self.step_in(block_pool1._keras_history[0], depth=3), ReflectionPadding2D)
+
         assert isinstance(self.step_in(block_pool2._keras_history[0], depth=2), keras.layers.pooling.AveragePooling2D)
+        assert self.step_in(block_pool2._keras_history[0], depth=3).name == 'Block_1d_Pad'
+        assert isinstance(self.step_in(block_pool2._keras_history[0], depth=3), ReflectionPadding2D)
         # check naming of concat layer
         assert block.name == PyTestRegex('Block_1_Co(_\d*)?/concat:0')
         assert block._keras_history[0].name == 'Block_1_Co'
@@ -171,15 +241,84 @@ class TestInceptionModelBase:
         assert block_2b.name == PyTestRegex(r'Block_2b_act_2_tanh(_\d*)?/Tanh:0')
         assert block_pool.name == PyTestRegex(r'Block_2c_act_1(_\d*)?/Relu:0')
         assert self.step_in(block_2a._keras_history[0]).name == "Block_2a_3x3"
+        assert self.step_in(block_2a._keras_history[0], depth=2).name == "Block_2a_Pad"
+        assert isinstance(self.step_in(block_2a._keras_history[0], depth=2), keras.layers.ZeroPadding2D)
+        # block 2b
         assert self.step_in(block_2b._keras_history[0]).name == "Block_2b_5x5"
+        assert self.step_in(block_2b._keras_history[0], depth=2).name == "Block_2b_Pad"
+        assert isinstance(self.step_in(block_2b._keras_history[0], depth=2), SymmetricPadding2D)
+        # block pool
         assert isinstance(self.step_in(block_pool._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
+        assert self.step_in(block_pool._keras_history[0], depth=3).name == 'Block_2c_Pad'
+        assert isinstance(self.step_in(block_pool._keras_history[0], depth=3), ReflectionPadding2D)
         # check naming of concat layer
         assert block.name == PyTestRegex(r'Block_2_Co(_\d*)?/concat:0')
         assert block._keras_history[0].name == 'Block_2_Co'
         assert isinstance(block._keras_history[0], keras.layers.merge.Concatenate)
 
+    def test_inception_block_invalid_batchnorm(self, base, input_x):
+        conv = {'tower_1': {'reduction_filter': 64,
+                            'tower_kernel': (3, 3),
+                            'tower_filter': 64, },
+                'tower_2': {'reduction_filter': 64,
+                            'tower_kernel': (5, 5),
+                            'tower_filter': 64,
+                            'activation': 'tanh',
+                            'padding': 'SymPad2D', },
+                }
+        pool = {'pool_kernel': (3, 3), 'tower_filter': 64, 'padding': ReflectionPadding2D, 'max_pooling': 'yes'}
+        opts = {'input_x': input_x, 'tower_conv_parts': conv, 'tower_pool_parts': pool, }
+        with pytest.raises(AttributeError) as einfo:
+            block = base.inception_block(**opts)
+        assert "max_pooling has to be either a bool or empty. Given was: yes" in str(einfo.value)
+
     def test_batch_normalisation(self, base, input_x):
         base.part_of_block += 1
         bn = base.batch_normalisation(input_x)._keras_history[0]
         assert isinstance(bn, keras.layers.normalization.BatchNormalization)
         assert bn.name == "Block_0a_BN"
+
+    def test_padding_layer_zero_padding(self, base, input_x):
+        padding_size = ((1, 1), (0, 0))
+        zp = base.padding_layer('ZeroPad2D')
+        assert zp == keras.layers.convolutional.ZeroPadding2D
+        assert base.padding_layer('ZeroPadding2D') == keras.layers.convolutional.ZeroPadding2D
+        assert base.padding_layer(keras.layers.ZeroPadding2D) == keras.layers.convolutional.ZeroPadding2D
+        assert zp.__name__ == 'ZeroPadding2D'
+        zp_ap = zp(padding=padding_size)(input_x)
+        assert zp_ap._keras_history[0].padding == ((1, 1), (0, 0))
+
+    def test_padding_layer_sym_padding(self, base, input_x):
+        padding_size = ((1, 1), (0, 0))
+        zp = base.padding_layer('SymPad2D')
+        assert zp == SymmetricPadding2D
+        assert base.padding_layer('SymmetricPadding2D') == SymmetricPadding2D
+        assert base.padding_layer(SymmetricPadding2D) == SymmetricPadding2D
+        assert zp.__name__ == 'SymmetricPadding2D'
+        zp_ap = zp(padding=padding_size)(input_x)
+        assert zp_ap._keras_history[0].padding == ((1, 1), (0, 0))
+
+    def test_padding_layer_ref_padding(self, base, input_x):
+        padding_size = ((1, 1), (0, 0))
+        zp = base.padding_layer('RefPad2D')
+        assert zp == ReflectionPadding2D
+        assert base.padding_layer('ReflectionPadding2D') == ReflectionPadding2D
+        assert base.padding_layer(ReflectionPadding2D) == ReflectionPadding2D
+        assert zp.__name__ == 'ReflectionPadding2D'
+        zp_ap = zp(padding=padding_size)(input_x)
+        assert zp_ap._keras_history[0].padding == ((1, 1), (0, 0))
+
+    def test_padding_layer_raises(self, base, input_x):
+        with pytest.raises(NotImplementedError) as einfo:
+            base.padding_layer('FalsePadding2D')
+        assert "`'FalsePadding2D'' is not implemented as padding. " \
+               "Use one of those: i) `RefPad2D', ii) `SymPad2D', iii) `ZeroPad2D'" in str(einfo.value)
+        with pytest.raises(TypeError) as einfo:
+            base.padding_layer(keras.layers.Conv2D)
+        assert "`Conv2D' is not a valid padding layer type. Use one of those: "\
+               "i) ReflectionPadding2D, ii) SymmetricPadding2D, iii) ZeroPadding2D" in str(einfo.value)
+
+
+
+
+