diff --git a/test/test_model_modules/test_inception_model.py b/test/test_model_modules/test_inception_model.py
index 7ab88f41d049ab10d96c4562e9c7543fb5538ad3..0ed975d054841d9d4cfb8b4c964fa0cd2d4e2667 100644
--- a/test/test_model_modules/test_inception_model.py
+++ b/test/test_model_modules/test_inception_model.py
@@ -5,6 +5,8 @@ from mlair.helpers import PyTestRegex
 from mlair.model_modules.advanced_paddings import ReflectionPadding2D, SymmetricPadding2D
 from mlair.model_modules.inception_model import InceptionModelBase
 
+from tensorflow.python.keras.layers.advanced_activations import ELU, ReLU, LeakyReLU
+
 
 class TestInceptionModelBase:
 
@@ -41,7 +43,7 @@ class TestInceptionModelBase:
         assert base.part_of_block == 1
         assert tower.name == 'Block_0a_act_2/Relu:0'
         act_layer = tower._keras_history[0]
-        assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
+        assert isinstance(act_layer, ReLU)
         assert act_layer.name == "Block_0a_act_2"
         # check previous element of tower (conv2D)
         conv_layer = self.step_in(act_layer)
@@ -58,7 +60,7 @@ class TestInceptionModelBase:
         assert pad_layer.name == 'Block_0a_Pad'
         # check previous element of tower (activation)
         act_layer2 = self.step_in(pad_layer)
-        assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
+        assert isinstance(act_layer2, ReLU)
         assert act_layer2.name == "Block_0a_act_1"
         # check previous element of tower (conv2D)
         conv_layer2 = self.step_in(act_layer2)
@@ -67,19 +69,18 @@ class TestInceptionModelBase:
         assert conv_layer2.kernel_size == (1, 1)
         assert conv_layer2.padding == 'valid'
         assert conv_layer2.name == 'Block_0a_1x1'
-        assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
+        assert conv_layer2.input_shape == (None, 32, 32, 3)
 
     def test_create_conv_tower_3x3_batch_norm(self, base, input_x):
-        # import keras
         opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3),
                 'padding': 'SymPad2D', 'batch_normalisation': True}
         tower = base.create_conv_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
         # assert tower.name == 'Block_0a_act_2/Relu:0'
-        assert tower.name == 'Block_0a_act_2_1/Relu:0'
+        assert tower.name == 'Block_0a_act_2/Relu:0'
         act_layer = tower._keras_history[0]
-        assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
+        assert isinstance(act_layer, ReLU)
         assert act_layer.name == "Block_0a_act_2"
         # check previous element of tower (batch_normal)
         batch_layer = self.step_in(act_layer)
@@ -100,7 +101,7 @@ class TestInceptionModelBase:
         assert pad_layer.name == 'Block_0a_Pad'
         # check previous element of tower (activation)
         act_layer2 = self.step_in(pad_layer)
-        assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
+        assert isinstance(act_layer2, ReLU)
         assert act_layer2.name == "Block_0a_act_1"
         # check previous element of tower (conv2D)
         conv_layer2 = self.step_in(act_layer2)
@@ -109,7 +110,7 @@ class TestInceptionModelBase:
         assert conv_layer2.kernel_size == (1, 1)
         assert conv_layer2.padding == 'valid'
         assert conv_layer2.name == 'Block_0a_1x1'
-        assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
+        assert conv_layer2.input_shape == (None, 32, 32, 3)
 
     def test_create_conv_tower_3x3_activation(self, base, input_x):
         opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
@@ -117,13 +118,13 @@ class TestInceptionModelBase:
         tower = base.create_conv_tower(activation='tanh', **opts)
         assert tower.name == 'Block_0a_act_2_tanh/Tanh:0'
         act_layer = tower._keras_history[0]
-        assert isinstance(act_layer, keras.layers.core.Activation)
+        assert isinstance(act_layer, keras.layers.Activation)
         assert act_layer.name == "Block_0a_act_2_tanh"
         # create tower with activation function class
         tower = base.create_conv_tower(activation=keras.layers.LeakyReLU, **opts)
         assert tower.name == 'Block_0b_act_2/LeakyRelu:0'
         act_layer = tower._keras_history[0]
-        assert isinstance(act_layer, keras.layers.advanced_activations.LeakyReLU)
+        assert isinstance(act_layer, LeakyReLU)
         assert act_layer.name == "Block_0b_act_2"
 
     def test_create_conv_tower_1x1(self, base, input_x):
@@ -131,9 +132,9 @@ class TestInceptionModelBase:
         tower = base.create_conv_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
-        assert tower.name == 'Block_0a_act_1_2/Relu:0'
+        assert tower.name == 'Block_0a_act_1/Relu:0'
         act_layer = tower._keras_history[0]
-        assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
+        assert isinstance(act_layer, ReLU)
         assert act_layer.name == "Block_0a_act_1"
         # check previous element of tower (conv2D)
         conv_layer = self.step_in(act_layer)
@@ -143,23 +144,23 @@ class TestInceptionModelBase:
         assert conv_layer.kernel_size == (1, 1)
         assert conv_layer.strides == (1, 1)
         assert conv_layer.name == "Block_0a_1x1"
-        assert conv_layer.input._keras_shape == (None, 32, 32, 3)
+        assert conv_layer.input_shape == (None, 32, 32, 3)
 
     def test_create_conv_towers(self, base, input_x):
         opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
         _ = base.create_conv_tower(**opts)
         tower = base.create_conv_tower(**opts)
         assert base.part_of_block == 2
-        assert tower.name == 'Block_0b_act_2_1/Relu:0'
+        assert tower.name == 'Block_0b_act_2/Relu:0'
 
     def test_create_pool_tower(self, base, input_x):
         opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
         tower = base.create_pool_tower(**opts)
         # check last element of tower (activation)
         assert base.part_of_block == 1
-        assert tower.name == 'Block_0a_act_1_4/Relu:0'
+        assert tower.name == 'Block_0a_act_1/Relu:0'
         act_layer = tower._keras_history[0]
-        assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
+        assert isinstance(act_layer, ReLU)
         assert act_layer.name == "Block_0a_act_1"
         # check previous element of tower (conv2D)
         conv_layer = self.step_in(act_layer)
@@ -171,20 +172,20 @@ class TestInceptionModelBase:
         assert conv_layer.name == "Block_0a_1x1"
         # check previous element of tower (maxpool)
         pool_layer = self.step_in(conv_layer)
-        assert isinstance(pool_layer, keras.layers.pooling.MaxPooling2D)
+        assert isinstance(pool_layer, keras.layers.MaxPooling2D)
         assert pool_layer.name == "Block_0a_MaxPool"
         assert pool_layer.pool_size == (3, 3)
         assert pool_layer.padding == 'valid'
         # check previous element of tower(padding)
         pad_layer = self.step_in(pool_layer)
-        assert isinstance(pad_layer, keras.layers.convolutional.ZeroPadding2D)
+        assert isinstance(pad_layer, keras.layers.ZeroPadding2D)
         assert pad_layer.name == "Block_0a_Pad"
         assert pad_layer.padding == ((1, 1), (1, 1))
         # check avg pool tower
         opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
         tower = base.create_pool_tower(max_pooling=False, **opts)
         pool_layer = self.step_in(tower._keras_history[0], depth=2)
-        assert isinstance(pool_layer, keras.layers.pooling.AveragePooling2D)
+        assert isinstance(pool_layer, keras.layers.AveragePooling2D)
         assert pool_layer.name == "Block_0b_AvgPool"
         assert pool_layer.pool_size == (3, 3)
         assert pool_layer.padding == 'valid'
@@ -218,17 +219,17 @@ class TestInceptionModelBase:
         assert self.step_in(block_1b._keras_history[0], depth=2).name == 'Block_1b_Pad'
         assert isinstance(self.step_in(block_1b._keras_history[0], depth=2), SymmetricPadding2D)
         # pooling
-        assert isinstance(self.step_in(block_pool1._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
+        assert isinstance(self.step_in(block_pool1._keras_history[0], depth=2), keras.layers.MaxPooling2D)
         assert self.step_in(block_pool1._keras_history[0], depth=3).name == 'Block_1c_Pad'
         assert isinstance(self.step_in(block_pool1._keras_history[0], depth=3), ReflectionPadding2D)
 
-        assert isinstance(self.step_in(block_pool2._keras_history[0], depth=2), keras.layers.pooling.AveragePooling2D)
+        assert isinstance(self.step_in(block_pool2._keras_history[0], depth=2), keras.layers.AveragePooling2D)
         assert self.step_in(block_pool2._keras_history[0], depth=3).name == 'Block_1d_Pad'
         assert isinstance(self.step_in(block_pool2._keras_history[0], depth=3), ReflectionPadding2D)
         # check naming of concat layer
         assert block.name == PyTestRegex(r'Block_1_Co(_\d*)?/concat:0')
         assert block._keras_history[0].name == 'Block_1_Co'
-        assert isinstance(block._keras_history[0], keras.layers.merge.Concatenate)
+        assert isinstance(block._keras_history[0], keras.layers.Concatenate)
         # next block
         opts['input_x'] = block
         opts['tower_pool_parts']['max_pooling'] = True
@@ -248,13 +249,13 @@ class TestInceptionModelBase:
         assert self.step_in(block_2b._keras_history[0], depth=2).name == "Block_2b_Pad"
         assert isinstance(self.step_in(block_2b._keras_history[0], depth=2), SymmetricPadding2D)
         # block pool
-        assert isinstance(self.step_in(block_pool._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
+        assert isinstance(self.step_in(block_pool._keras_history[0], depth=2), keras.layers.MaxPooling2D)
         assert self.step_in(block_pool._keras_history[0], depth=3).name == 'Block_2c_Pad'
         assert isinstance(self.step_in(block_pool._keras_history[0], depth=3), ReflectionPadding2D)
         # check naming of concat layer
         assert block.name == PyTestRegex(r'Block_2_Co(_\d*)?/concat:0')
         assert block._keras_history[0].name == 'Block_2_Co'
-        assert isinstance(block._keras_history[0], keras.layers.merge.Concatenate)
+        assert isinstance(block._keras_history[0], keras.layers.Concatenate)
 
     def test_inception_block_invalid_batchnorm(self, base, input_x):
         conv = {'tower_1': {'reduction_filter': 64,
@@ -275,5 +276,5 @@ class TestInceptionModelBase:
     def test_batch_normalisation(self, base, input_x):
         base.part_of_block += 1
         bn = base.batch_normalisation(input_x)._keras_history[0]
-        assert isinstance(bn, keras.layers.normalization.BatchNormalization)
+        assert isinstance(bn, keras.layers.BatchNormalization)
         assert bn.name == "Block_0a_BN"
diff --git a/test/test_model_modules/test_model_class.py b/test/test_model_modules/test_model_class.py
index b3c6f7d9c6ff2d09c3e7de85c8a68ce19b3de557..f171fb8e899e728ce9747ae9dd9dfdc366ad7fa1 100644
--- a/test/test_model_modules/test_model_class.py
+++ b/test/test_model_modules/test_model_class.py
@@ -21,7 +21,7 @@ class TestIntelliO3_ts_architecture:
 
     def test_set_model(self, mpm):
         assert isinstance(mpm.model, keras.Model)
-        assert mpm.model.layers[0].output_shape == (None, 7, 1, 9)
+        assert mpm.model.layers[0].output_shape == [(None, 7, 1, 9)]
         # check output dimensions
         if isinstance(mpm.model.output_shape, tuple):
             assert mpm.model.output_shape == (None, 4)