diff --git a/mlair/model_modules/advanced_paddings.py b/mlair/model_modules/advanced_paddings.py
index b0f6c9c25dfd4eacb65e5cb59a267296a7f1ab72..7a16c2b6a0799fd67a7026e14ab046c434a54731 100644
--- a/mlair/model_modules/advanced_paddings.py
+++ b/mlair/model_modules/advanced_paddings.py
@@ -8,6 +8,7 @@ from typing import Union, Tuple
 
 import numpy as np
 import tensorflow as tf
+from tensorflow.keras import backend as K
 # from tensorflow.keras.backend.common import normalize_data_format
 from tensorflow.keras.layers import ZeroPadding2D
 # from tensorflow.keras.layers.convolutional import _ZeroPadding
@@ -194,6 +195,49 @@ class PadUtils:
         return normalized_padding
 
 
+class InputSpec(object):
+    """Specifies the ndim, dtype and shape of every input to a layer.
+    Every layer should expose (if appropriate) an `input_spec` attribute:
+    a list of instances of InputSpec (one per input tensor).
+    A None entry in a shape is compatible with any dimension,
+    a None shape is compatible with any shape.
+    # Arguments
+        dtype: Expected datatype of the input.
+        shape: Shape tuple, expected shape of the input
+            (may include None for unchecked axes).
+        ndim: Integer, expected rank of the input.
+        max_ndim: Integer, maximum rank of the input.
+        min_ndim: Integer, minimum rank of the input.
+        axes: Dictionary mapping integer axes to
+            a specific dimension value.
+    """
+
+    def __init__(self, dtype=None,
+                 shape=None,
+                 ndim=None,
+                 max_ndim=None,
+                 min_ndim=None,
+                 axes=None):
+        self.dtype = dtype
+        self.shape = shape
+        if shape is not None:
+            self.ndim = len(shape)
+        else:
+            self.ndim = ndim
+        self.max_ndim = max_ndim
+        self.min_ndim = min_ndim
+        self.axes = axes or {}
+
+    def __repr__(self):
+        spec = [('dtype=' + str(self.dtype)) if self.dtype else '',
+                ('shape=' + str(self.shape)) if self.shape else '',
+                ('ndim=' + str(self.ndim)) if self.ndim else '',
+                ('max_ndim=' + str(self.max_ndim)) if self.max_ndim else '',
+                ('min_ndim=' + str(self.min_ndim)) if self.min_ndim else '',
+                ('axes=' + str(self.axes)) if self.axes else '']
+        return 'InputSpec(%s)' % ', '.join(x for x in spec if x)
+
+
 class _ZeroPadding(Layer):
     """Abstract nD ZeroPadding layer (private, used as implementation base).
     # Arguments
diff --git a/mlair/model_modules/model_class.py b/mlair/model_modules/model_class.py
index be4f4b22715d8a8e75cd52b9f819cb391c15b354..1165999ebea04618e136268bd7440d4d5989233e 100644
--- a/mlair/model_modules/model_class.py
+++ b/mlair/model_modules/model_class.py
@@ -123,9 +123,9 @@ __date__ = '2020-05-12'
 import tensorflow.keras as keras
 
 from mlair.model_modules import AbstractModelClass
-#from mlair.model_modules.inception_model import InceptionModelBase
+from mlair.model_modules.inception_model import InceptionModelBase
 from mlair.model_modules.flatten import flatten_tail
-#from mlair.model_modules.advanced_paddings import PadUtils, Padding2D, SymmetricPadding2D
+from mlair.model_modules.advanced_paddings import PadUtils, Padding2D, SymmetricPadding2D
 from mlair.model_modules.loss import l_p_loss