diff --git a/src/model_modules/advanced_paddings.py b/src/model_modules/advanced_paddings.py
index 97fcadb833ce8e4dc45044f6319f4d3d0ead97d1..f5a386fb69f0e51b134a96789223d1c0af65557b 100644
--- a/src/model_modules/advanced_paddings.py
+++ b/src/model_modules/advanced_paddings.py
@@ -12,7 +12,7 @@ from keras.utils.generic_utils import transpose_shape
 from keras.backend.common import normalize_data_format
 
 
-class pad_utils:
+class PadUtils:
     """
     Helper class for advanced paddings
     """
@@ -76,8 +76,12 @@ class pad_utils:
                                  'Found: ' + str(padding))
             height_padding = conv_utils.normalize_tuple(padding[0], 2,
                                                         '1st entry of padding')
+            if not all(k >= 0 for k in height_padding):
+                raise ValueError(f"The `1st entry of padding` argument must be >= 0. Received: {padding[0]} of type {type(padding[0])}")
             width_padding = conv_utils.normalize_tuple(padding[1], 2,
                                                        '2nd entry of padding')
+            if not all(k >= 0 for k in width_padding):
+                raise ValueError(f"The `2nd entry of padding` argument must be >= 0. Received: {padding[1]} of type {type(padding[1])}")
             normalized_padding = (height_padding, width_padding)
         else:
             raise ValueError('`padding` should be either an int, '
@@ -85,7 +89,7 @@ class pad_utils:
                              '(symmetric_height_pad, symmetric_width_pad), '
                              'or a tuple of 2 tuples of 2 ints '
                              '((top_pad, bottom_pad), (left_pad, right_pad)). '
-                             'Found: ' + str(padding))
+                             f'Found: {padding} of type {type(padding)}')
         return normalized_padding
 
 
@@ -151,13 +155,13 @@ class ReflectionPadding2D(_ZeroPadding):
                  padding=(1, 1),
                  data_format=None,
                  **kwargs):
-        normalized_padding = pad_utils.check_padding_format(padding=padding)
+        normalized_padding = PadUtils.check_padding_format(padding=padding)
         super(ReflectionPadding2D, self).__init__(normalized_padding,
                                                   data_format,
                                                   **kwargs)
 
     def call(self, inputs, mask=None):
-        pattern = pad_utils.spatial_2d_padding(padding=self.padding, data_format=self.data_format)
+        pattern = PadUtils.spatial_2d_padding(padding=self.padding, data_format=self.data_format)
         return tf.pad(inputs, pattern, 'REFLECT')
 
 
@@ -222,13 +226,13 @@ class SymmetricPadding2D(_ZeroPadding):
                  padding=(1, 1),
                  data_format=None,
                  **kwargs):
-        normalized_padding = pad_utils.check_padding_format(padding=padding)
+        normalized_padding = PadUtils.check_padding_format(padding=padding)
         super(SymmetricPadding2D, self).__init__(normalized_padding,
                                                  data_format,
                                                  **kwargs)
 
     def call(self, inputs, mask=None):
-        pattern = pad_utils.spatial_2d_padding(padding=self.padding, data_format=self.data_format)
+        pattern = PadUtils.spatial_2d_padding(padding=self.padding, data_format=self.data_format)
         return tf.pad(inputs, pattern, 'SYMMETRIC')
 
 
@@ -241,11 +245,11 @@ if __name__ == '__main__':
     y = x.mean(axis=(1, 2))
 
     x_input = Input(shape=x.shape[1:])
-    pad1 = pad_utils.get_padding_for_same(kernel_1)
+    pad1 = PadUtils.get_padding_for_same(kernel_1)
     x_out = ReflectionPadding2D(padding=pad1, name="RefPAD")(x_input)
-    x_out = Conv2D(1, kernel_size=kernel_1, activation='relu')(x_out)
+    x_out = Conv2D(5, kernel_size=kernel_1, activation='relu')(x_out)
 
-    pad2 = pad_utils.get_padding_for_same(kernel_2)
+    pad2 = PadUtils.get_padding_for_same(kernel_2)
     x_out = SymmetricPadding2D(padding=pad2, name="SymPAD")(x_out)
     x_out = Conv2D(2, kernel_size=kernel_2, activation='relu')(x_out)
     x_out = Flatten()(x_out)
@@ -254,6 +258,5 @@ if __name__ == '__main__':
     model = Model(inputs=x_input, outputs=x_out)
     model.compile('adam', loss='mse')
     model.summary()
-    # hist = model.fit(x, y, epochs=10)