diff --git a/src/model_modules/inception_model.py b/src/model_modules/inception_model.py
index c369885349a4208fd579d735a4b2da6a59542a0b..c3fe27476272b18b4e4630e641f43ab03b555f9b 100644
--- a/src/model_modules/inception_model.py
+++ b/src/model_modules/inception_model.py
@@ -5,6 +5,7 @@ import logging
 
 import keras
 import keras.layers as layers
+from src.model_modules.advanced_paddings import PadUtils, ReflectionPadding2D, SymmetricPadding2D
 
 
 class InceptionModelBase:
@@ -56,25 +57,30 @@ class InceptionModelBase:
         logging.debug(f'Inception Block with activation: {activation}')
 
         block_name = f'Block_{self.number_of_blocks}{self.block_part_name()}_{tower_kernel[0]}x{tower_kernel[1]}'
+        padding_size = PadUtils.get_padding_for_same(tower_kernel)
 
         if tower_kernel == (1, 1):
             tower = layers.Conv2D(tower_filter,
                                   tower_kernel,
-                                  padding='same',
+                                  padding='valid',
                                   kernel_regularizer=regularizer,
                                   name=block_name)(input_x)
             # tower = self.act(tower, activation, **act_settings)
         else:
             tower = layers.Conv2D(reduction_filter,
                                   (1, 1),
-                                  padding='same',
+                                  padding='valid',
                                   kernel_regularizer=regularizer,
                                   name=f'Block_{self.number_of_blocks}{self.block_part_name()}_1x1')(input_x)
             tower = self.act(tower, activation, **act_settings)
 
+            tower = self.padding_layer('SymPad2D')(padding=padding_size,
+                                                   name=f'Block_{self.number_of_blocks}{self.block_part_name()}_Pad'
+                                                   )(tower)
+
             tower = layers.Conv2D(tower_filter,
                                   tower_kernel,
-                                  padding='same',
+                                  padding='valid',
                                   kernel_regularizer=regularizer,
                                   name=block_name)(tower)
         if batch_normalisation:
@@ -101,6 +107,29 @@ class InceptionModelBase:
         else:
             return act_name.__name__
 
+    @staticmethod
+    def padding_layer(padding):
+        allowed_paddings = {
+            'RefPad2D': ReflectionPadding2D, 'ReflectionPadding2D': ReflectionPadding2D,
+            'SymPad2D': SymmetricPadding2D, 'SymmetricPadding2D': SymmetricPadding2D,
+            'ZeroPad2D': keras.layers.ZeroPadding2D, 'ZeroPadding2D': keras.layers.ZeroPadding2D
+        }
+        if isinstance(padding, str):
+            try:
+                pad2D = allowed_paddings[padding]
+            except KeyError as einfo:
+                raise NotImplementedError(
+                    f"`{einfo}' is not implemented as padding. " 
+                    "Use one of those: i) `RefPad2D', ii) `SymPad2D', iii) `ZeroPad2D'")
+        else:
+            if padding in allowed_paddings.values():
+                pad2D = padding
+            else:
+                raise ValueError(f"`{padding.__name__}' is not a valid padding padding. "
+                                 "Use one of those: "
+                                 "i) ReflectionPadding2D, ii) `SymmetricPadding2D', iii) `ZeroPadding2D'")
+        return pad2D
+
     def create_pool_tower(self, input_x, pool_kernel, tower_filter, activation='relu', max_pooling=True, **kwargs):
         """
         This function creates a "MaxPooling tower block"
@@ -114,6 +143,7 @@ class InceptionModelBase:
         self.part_of_block += 1
         self.act_number = 1
         act_settings = kwargs.get('act_settings', {})
+        padding_size = PadUtils.get_padding_for_same(kernel_size=pool_kernel)
 
         # pooling block
         block_name = f"Block_{self.number_of_blocks}{self.block_part_name()}_"
@@ -123,7 +153,9 @@ class InceptionModelBase:
         else:
             block_type = "AvgPool"
             pooling = layers.AveragePooling2D
-        tower = pooling(pool_kernel, strides=(1, 1), padding='same', name=block_name+block_type)(input_x)
+
+        tower = self.padding_layer('SymPad2D')(padding=padding_size, name=block_name+'Pad')(input_x)
+        tower = pooling(pool_kernel, strides=(1, 1), padding='valid', name=block_name+block_type)(tower)
 
         # convolution block
         tower = layers.Conv2D(tower_filter, (1, 1), padding='same', name=block_name+"1x1")(tower)
@@ -169,6 +201,35 @@ class InceptionModelBase:
         return block
 
 
+if __name__ == '__main__':
+    from keras.models import Model
+    from keras.layers import Conv2D, Flatten, Dense, Input
+    import numpy as np
+
+
+    kernel_1 = (3, 3)
+    kernel_2 = (5, 5)
+    x = np.array(range(2000)).reshape(-1, 10, 10, 1)
+    y = x.mean(axis=(1, 2))
+
+    x_input = Input(shape=x.shape[1:])
+    pad1 = PadUtils.get_padding_for_same(kernel_size=kernel_1)
+    x_out = InceptionModelBase.padding_layer('RefPad2D')(padding=pad1, name="RefPAD1")(x_input)
+    # x_out = ReflectionPadding2D(padding=pad1, name="RefPAD")(x_input)
+    x_out = Conv2D(5, kernel_size=kernel_1, activation='relu')(x_out)
+
+    pad2 = PadUtils.get_padding_for_same(kernel_size=kernel_2)
+    x_out = InceptionModelBase.padding_layer(SymmetricPadding2D)(padding=pad2, name="SymPAD1")(x_out)
+    # x_out = SymmetricPadding2D(padding=pad2, name="SymPAD")(x_out)
+    x_out = Conv2D(2, kernel_size=kernel_2, activation='relu')(x_out)
+    x_out = Flatten()(x_out)
+    x_out = Dense(1, activation='linear')(x_out)
+
+    model = Model(inputs=x_input, outputs=x_out)
+    model.compile('adam', loss='mse')
+    model.summary()
+    # model.fit(x, y, epochs=10)
+
 if __name__ == '__main__':
     print(__name__)
     from keras.datasets import cifar10
@@ -212,10 +273,13 @@ if __name__ == '__main__':
     print(model.summary())
 
     # compile
-    epochs = 10
+    epochs = 1
     lrate = 0.01
     decay = lrate/epochs
     sgd = SGD(lr=lrate, momentum=0.9, decay=decay, nesterov=False)
     model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
     print(X_train.shape)
     keras.utils.plot_model(model, to_file='model.pdf', show_shapes=True, show_layer_names=True)
+    # model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test))
+
+