Skip to content
Snippets Groups Projects
Commit 573b3e19 authored by Felix Kleinert's avatar Felix Kleinert
Browse files

update tests: check for BN layers; some standard keras namings seems to have some incons. #62

parent 73a5fdd5
Branches
Tags
2 merge requests!59Develop,!53Felix issue062 apply advanced pooling in inception block feat
Pipeline #31047 passed
...@@ -44,6 +44,7 @@ class TestInceptionModelBase: ...@@ -44,6 +44,7 @@ class TestInceptionModelBase:
tower = base.create_conv_tower(**opts) tower = base.create_conv_tower(**opts)
# check last element of tower (activation) # check last element of tower (activation)
assert base.part_of_block == 1 assert base.part_of_block == 1
# assert tower.name == 'Block_0a_act_2_1/Relu:0'
assert tower.name == 'Block_0a_act_2/Relu:0' assert tower.name == 'Block_0a_act_2/Relu:0'
act_layer = tower._keras_history[0] act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.ReLU) assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
...@@ -74,6 +75,48 @@ class TestInceptionModelBase: ...@@ -74,6 +75,48 @@ class TestInceptionModelBase:
assert conv_layer2.name == 'Block_0a_1x1' assert conv_layer2.name == 'Block_0a_1x1'
assert conv_layer2.input._keras_shape == (None, 32, 32, 3) assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
def test_create_conv_tower_3x3_batch_norm(self, base, input_x):
# import keras
opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3),
'padding': 'SymPad2D', 'batch_normalisation': True}
tower = base.create_conv_tower(**opts)
# check last element of tower (activation)
assert base.part_of_block == 1
# assert tower.name == 'Block_0a_act_2/Relu:0'
assert tower.name == 'Block_0a_act_2_1/Relu:0'
act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
assert act_layer.name == "Block_0a_act_2"
# check previous element of tower (batch_normal)
batch_layer = self.step_in(act_layer)
assert isinstance(batch_layer, keras.layers.BatchNormalization)
assert batch_layer.name == 'Block_0a_BN'
# check previous element of tower (conv2D)
conv_layer = self.step_in(batch_layer)
assert isinstance(conv_layer, keras.layers.Conv2D)
assert conv_layer.filters == 32
assert conv_layer.padding == 'valid'
assert conv_layer.kernel_size == (3, 3)
assert conv_layer.strides == (1, 1)
assert conv_layer.name == "Block_0a_3x3"
# check previous element of tower (padding)
pad_layer = self.step_in(conv_layer)
assert isinstance(pad_layer, SymmetricPadding2D)
assert pad_layer.padding == ((1, 1), (1, 1))
assert pad_layer.name == 'Block_0a_Pad'
# check previous element of tower (activation)
act_layer2 = self.step_in(pad_layer)
assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
assert act_layer2.name == "Block_0a_act_1"
# check previous element of tower (conv2D)
conv_layer2 = self.step_in(act_layer2)
assert isinstance(conv_layer2, keras.layers.Conv2D)
assert conv_layer2.filters == 64
assert conv_layer2.kernel_size == (1, 1)
assert conv_layer2.padding == 'valid'
assert conv_layer2.name == 'Block_0a_1x1'
assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
def test_create_conv_tower_3x3_activation(self, base, input_x): def test_create_conv_tower_3x3_activation(self, base, input_x):
# import keras # import keras
opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)} opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
...@@ -96,7 +139,8 @@ class TestInceptionModelBase: ...@@ -96,7 +139,8 @@ class TestInceptionModelBase:
tower = base.create_conv_tower(**opts) tower = base.create_conv_tower(**opts)
# check last element of tower (activation) # check last element of tower (activation)
assert base.part_of_block == 1 assert base.part_of_block == 1
assert tower.name == 'Block_0a_act_1_1/Relu:0' assert tower.name == 'Block_0a_act_1_2/Relu:0'
# assert tower.name == 'Block_0a_act_1_1/Relu:0'
act_layer = tower._keras_history[0] act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.ReLU) assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
assert act_layer.name == "Block_0a_act_1" assert act_layer.name == "Block_0a_act_1"
...@@ -125,7 +169,7 @@ class TestInceptionModelBase: ...@@ -125,7 +169,7 @@ class TestInceptionModelBase:
# check last element of tower (activation) # check last element of tower (activation)
assert base.part_of_block == 1 assert base.part_of_block == 1
# assert tower.name == 'Block_0a_act_1/Relu:0' # assert tower.name == 'Block_0a_act_1/Relu:0'
assert tower.name == 'Block_0a_act_1_3/Relu:0' assert tower.name == 'Block_0a_act_1_4/Relu:0'
act_layer = tower._keras_history[0] act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.ReLU) assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
assert act_layer.name == "Block_0a_act_1" assert act_layer.name == "Block_0a_act_1"
...@@ -218,6 +262,22 @@ class TestInceptionModelBase: ...@@ -218,6 +262,22 @@ class TestInceptionModelBase:
assert self.step_in(block_pool._keras_history[0], depth=3).name == 'Block_2c_Pad' assert self.step_in(block_pool._keras_history[0], depth=3).name == 'Block_2c_Pad'
assert isinstance(self.step_in(block_pool._keras_history[0], depth=3), ReflectionPadding2D) assert isinstance(self.step_in(block_pool._keras_history[0], depth=3), ReflectionPadding2D)
def test_inception_block_invalid_batchnorm(self, base, input_x):
conv = {'tower_1': {'reduction_filter': 64,
'tower_kernel': (3, 3),
'tower_filter': 64, },
'tower_2': {'reduction_filter': 64,
'tower_kernel': (5, 5),
'tower_filter': 64,
'activation': 'tanh',
'padding': 'SymPad2D', },
}
pool = {'pool_kernel': (3, 3), 'tower_filter': 64, 'padding': ReflectionPadding2D, 'max_pooling': 'yes'}
opts = {'input_x': input_x, 'tower_conv_parts': conv, 'tower_pool_parts': pool, }
with pytest.raises(AttributeError) as einfo:
block = base.inception_block(**opts)
assert "max_pooling has to be either a bool or empty. Given was: yes" in str(einfo.value)
def test_batch_normalisation(self, base, input_x): def test_batch_normalisation(self, base, input_x):
# import keras # import keras
base.part_of_block += 1 base.part_of_block += 1
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment