diff --git a/test/test_model_modules/test_inception_model.py b/test/test_model_modules/test_inception_model.py index fc1c6bb6aebe5bbd6d365a855fdc4ef872ba6655..6847b24f738428550f4b59faf4c00f962b90208e 100644 --- a/test/test_model_modules/test_inception_model.py +++ b/test/test_model_modules/test_inception_model.py @@ -44,6 +44,7 @@ class TestInceptionModelBase: tower = base.create_conv_tower(**opts) # check last element of tower (activation) assert base.part_of_block == 1 + # assert tower.name == 'Block_0a_act_2_1/Relu:0' assert tower.name == 'Block_0a_act_2/Relu:0' act_layer = tower._keras_history[0] assert isinstance(act_layer, keras.layers.advanced_activations.ReLU) @@ -74,6 +75,48 @@ class TestInceptionModelBase: assert conv_layer2.name == 'Block_0a_1x1' assert conv_layer2.input._keras_shape == (None, 32, 32, 3) + def test_create_conv_tower_3x3_batch_norm(self, base, input_x): + # import keras + opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3), + 'padding': 'SymPad2D', 'batch_normalisation': True} + tower = base.create_conv_tower(**opts) + # check last element of tower (activation) + assert base.part_of_block == 1 + # assert tower.name == 'Block_0a_act_2/Relu:0' + assert tower.name == 'Block_0a_act_2_1/Relu:0' + act_layer = tower._keras_history[0] + assert isinstance(act_layer, keras.layers.advanced_activations.ReLU) + assert act_layer.name == "Block_0a_act_2" + # check previous element of tower (batch_normal) + batch_layer = self.step_in(act_layer) + assert isinstance(batch_layer, keras.layers.BatchNormalization) + assert batch_layer.name == 'Block_0a_BN' + # check previous element of tower (conv2D) + conv_layer = self.step_in(batch_layer) + assert isinstance(conv_layer, keras.layers.Conv2D) + assert conv_layer.filters == 32 + assert conv_layer.padding == 'valid' + assert conv_layer.kernel_size == (3, 3) + assert conv_layer.strides == (1, 1) + assert conv_layer.name == "Block_0a_3x3" + # check previous element of tower (padding) + pad_layer = self.step_in(conv_layer) + assert isinstance(pad_layer, SymmetricPadding2D) + assert pad_layer.padding == ((1, 1), (1, 1)) + assert pad_layer.name == 'Block_0a_Pad' + # check previous element of tower (activation) + act_layer2 = self.step_in(pad_layer) + assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU) + assert act_layer2.name == "Block_0a_act_1" + # check previous element of tower (conv2D) + conv_layer2 = self.step_in(act_layer2) + assert isinstance(conv_layer2, keras.layers.Conv2D) + assert conv_layer2.filters == 64 + assert conv_layer2.kernel_size == (1, 1) + assert conv_layer2.padding == 'valid' + assert conv_layer2.name == 'Block_0a_1x1' + assert conv_layer2.input._keras_shape == (None, 32, 32, 3) + def test_create_conv_tower_3x3_activation(self, base, input_x): # import keras opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)} @@ -96,7 +139,8 @@ class TestInceptionModelBase: tower = base.create_conv_tower(**opts) # check last element of tower (activation) assert base.part_of_block == 1 - assert tower.name == 'Block_0a_act_1_1/Relu:0' + assert tower.name == 'Block_0a_act_1_2/Relu:0' + # assert tower.name == 'Block_0a_act_1_1/Relu:0' act_layer = tower._keras_history[0] assert isinstance(act_layer, keras.layers.advanced_activations.ReLU) assert act_layer.name == "Block_0a_act_1" @@ -125,7 +169,7 @@ class TestInceptionModelBase: # check last element of tower (activation) assert base.part_of_block == 1 # assert tower.name == 'Block_0a_act_1/Relu:0' - assert tower.name == 'Block_0a_act_1_3/Relu:0' + assert tower.name == 'Block_0a_act_1_4/Relu:0' act_layer = tower._keras_history[0] assert isinstance(act_layer, keras.layers.advanced_activations.ReLU) assert act_layer.name == "Block_0a_act_1" @@ -218,6 +262,22 @@ class TestInceptionModelBase: assert self.step_in(block_pool._keras_history[0], depth=3).name == 'Block_2c_Pad' assert isinstance(self.step_in(block_pool._keras_history[0], depth=3), ReflectionPadding2D) + def test_inception_block_invalid_batchnorm(self, base, input_x): + conv = {'tower_1': {'reduction_filter': 64, + 'tower_kernel': (3, 3), + 'tower_filter': 64, }, + 'tower_2': {'reduction_filter': 64, + 'tower_kernel': (5, 5), + 'tower_filter': 64, + 'activation': 'tanh', + 'padding': 'SymPad2D', }, + } + pool = {'pool_kernel': (3, 3), 'tower_filter': 64, 'padding': ReflectionPadding2D, 'max_pooling': 'yes'} + opts = {'input_x': input_x, 'tower_conv_parts': conv, 'tower_pool_parts': pool, } + with pytest.raises(AttributeError) as einfo: + block = base.inception_block(**opts) + assert "max_pooling has to be either a bool or empty. Given was: yes" in str(einfo.value) + def test_batch_normalisation(self, base, input_x): # import keras base.part_of_block += 1