Skip to content
Snippets Groups Projects
Commit 1c4d0a1f authored by leufen1's avatar leufen1
Browse files

updated tests for inception model

parent 755ea62e
Branches
Tags
2 merge requests!6updated inception model and data prep class,!3updated inception model
......@@ -14,6 +14,12 @@ class TestInceptionModelBase:
def input_x(self):
return keras.Input(shape=(32, 32, 3))
@staticmethod
def step_in(element, depth=1):
for _ in range(depth):
element = element.input._keras_history[0]
return element
def test_init(self, base):
assert base.number_of_blocks == 0
assert base.part_of_block == 0
......@@ -28,89 +34,141 @@ class TestInceptionModelBase:
def test_create_conv_tower_3x3(self, base, input_x):
opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
tower = base.create_conv_tower(**opts)
# check second element of tower
# check last element of tower (activation)
assert base.part_of_block == 1
assert isinstance(tower, tf.Tensor)
assert tower.name == 'Block_0a_3x3/Relu:0'
assert isinstance(tower._keras_history[0], keras.layers.Conv2D)
assert tower._keras_history[0].filters == 32
assert isinstance(tower._keras_history[0].input, tf.Tensor)
assert tower._keras_history[0].padding == 'same'
assert tower._keras_history[0].kernel_size == (3, 3)
assert tower._keras_history[0].strides == (1, 1)
# check first element of tower
assert isinstance(tower._keras_history[0].input._keras_history[0], keras.layers.Conv2D)
assert tower._keras_history[0].input._keras_history[0].filters == 64
assert tower._keras_history[0].input._keras_history[0].kernel_size == (1, 1)
assert tower._keras_history[0].input._keras_history[0].padding == 'same'
assert tower._keras_history[0].input._keras_history[0].name == 'Block_0a_1x1'
assert tower._keras_history[0].input._keras_history[0].input._keras_shape == (None, 32, 32, 3)
assert tower.name == 'Block_0a_act_2/Relu:0'
act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
assert act_layer.name == "Block_0a_act_2"
# check previous element of tower (conv2D)
conv_layer = self.step_in(act_layer)
assert isinstance(conv_layer, keras.layers.Conv2D)
assert conv_layer.filters == 32
assert conv_layer.padding == 'same'
assert conv_layer.kernel_size == (3, 3)
assert conv_layer.strides == (1, 1)
assert conv_layer.name == "Block_0a_3x3"
# check previous element of tower (activation)
act_layer2 = self.step_in(conv_layer)
assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
assert act_layer2.name == "Block_0a_act_1"
# check previous element of tower (conv2D)
conv_layer2 = self.step_in(act_layer2)
assert isinstance(conv_layer2, keras.layers.Conv2D)
assert conv_layer2.filters == 64
assert conv_layer2.kernel_size == (1, 1)
assert conv_layer2.padding == 'same'
assert conv_layer2.name == 'Block_0a_1x1'
assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
def test_create_conv_tower_3x3_activation(self, base, input_x):
opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
# create tower with standard activation function
tower = base.create_conv_tower(activation='tanh', **opts)
assert tower.name == 'Block_0a_act_2_tanh/Tanh:0'
act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.core.Activation)
assert act_layer.name == "Block_0a_act_2_tanh"
# create tower with activation function class
tower = base.create_conv_tower(activation=keras.layers.LeakyReLU, **opts)
assert tower.name == 'Block_0b_act_2/LeakyRelu:0'
act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.LeakyReLU)
assert act_layer.name == "Block_0b_act_2"
def test_create_conv_tower_1x1(self, base, input_x):
opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (1, 1)}
tower = base.create_conv_tower(**opts)
# check second element of tower
# check last element of tower (activation)
assert base.part_of_block == 1
assert isinstance(tower, tf.Tensor)
assert tower.name == 'Block_0a_1x1_1/Relu:0'
assert isinstance(tower._keras_history[0], keras.layers.Conv2D)
assert tower._keras_history[0].filters == 32
assert isinstance(tower._keras_history[0].input, tf.Tensor)
assert tower._keras_history[0].input._keras_shape == (None, 32, 32, 3)
assert tower._keras_history[0].padding == 'same'
assert tower._keras_history[0].kernel_size == (1, 1)
assert tower._keras_history[0].strides == (1, 1)
assert tower.name == 'Block_0a_act_1_1/Relu:0'
act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
assert act_layer.name == "Block_0a_act_1"
# check previous element of tower (conv2D)
conv_layer = self.step_in(act_layer)
assert isinstance(conv_layer, keras.layers.Conv2D)
assert conv_layer.filters == 32
assert conv_layer.padding == 'same'
assert conv_layer.kernel_size == (1, 1)
assert conv_layer.strides == (1, 1)
assert conv_layer.name == "Block_0a_1x1"
assert conv_layer.input._keras_shape == (None, 32, 32, 3)
def test_create_conv_towers(self, base, input_x):
opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
_ = base.create_conv_tower(**opts)
tower = base.create_conv_tower(**opts)
assert base.part_of_block == 2
assert tower.name == 'Block_0b_3x3/Relu:0'
assert tower.name == 'Block_0b_act_2_1/Relu:0'
def test_create_pool_tower(self, base, input_x):
opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
tower = base.create_pool_tower(**opts)
# check second element of tower
assert base.part_of_block == 0
assert isinstance(tower, tf.Tensor)
assert tower.name == 'conv2d_1/Relu:0'
assert isinstance(tower._keras_history[0], keras.layers.Conv2D)
assert tower._keras_history[0].filters == 32
assert tower._keras_history[0].padding == 'same'
assert tower._keras_history[0].kernel_size == (1, 1)
assert tower._keras_history[0].strides == (1, 1)
# check first element of tower
assert isinstance(tower._keras_history[0].input, tf.Tensor)
assert tower._keras_history[0].input._keras_history[0].pool_size == (3, 3)
assert tower._keras_history[0].input._keras_history[0].padding == 'same'
assert tower._keras_history[0].input._keras_history[0].name == 'max_pooling2d_1'
# check last element of tower (activation)
assert base.part_of_block == 1
assert tower.name == 'Block_0a_act_1_3/Relu:0'
act_layer = tower._keras_history[0]
assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
assert act_layer.name == "Block_0a_act_1"
# check previous element of tower (conv2D)
conv_layer = self.step_in(act_layer)
assert isinstance(conv_layer, keras.layers.Conv2D)
assert conv_layer.filters == 32
assert conv_layer.padding == 'same'
assert conv_layer.kernel_size == (1, 1)
assert conv_layer.strides == (1, 1)
assert conv_layer.name == "Block_0a_1x1"
# check previous element of tower (maxpool)
pool_layer = self.step_in(conv_layer)
assert isinstance(pool_layer, keras.layers.pooling.MaxPooling2D)
assert pool_layer.name == "Block_0a_MaxPool"
assert pool_layer.pool_size == (3, 3)
assert pool_layer.padding == 'same'
# check avg pool tower
opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
tower = base.create_pool_tower(max_pooling=False, **opts)
pool_layer = self.step_in(tower._keras_history[0], depth=2)
assert isinstance(pool_layer, keras.layers.pooling.AveragePooling2D)
assert pool_layer.name == "Block_0b_AvgPool"
assert pool_layer.pool_size == (3, 3)
assert pool_layer.padding == 'same'
def test_inception_block(self, base, input_x):
conv = {'tower_1': {'reduction_filter': 64, 'tower_kernel': (3, 3), 'tower_filter': 64},
'tower_2': {'reduction_filter': 64, 'tower_kernel': (5, 5), 'tower_filter': 64}}
'tower_2': {'reduction_filter': 64, 'tower_kernel': (5, 5), 'tower_filter': 64, 'activation': 'tanh'}}
pool = {'pool_kernel': (3, 3), 'tower_filter': 64}
opts = {'input_x': input_x, 'tower_conv_parts': conv, 'tower_pool_parts': pool}
block = base.inception_block(**opts)
assert base.number_of_blocks == 1
concatenated = block._keras_history[0].input
assert len(concatenated) == 3
block_1a, block_1b, block_pool = concatenated
assert block_1a.name == 'Block_1a_3x3/Relu:0'
assert block_1b.name == 'Block_1b_5x5/Relu:0'
assert block_pool.name == 'conv2d_1/Relu:0'
assert len(concatenated) == 4
block_1a, block_1b, block_pool1, block_pool2 = concatenated
assert block_1a.name == 'Block_1a_act_2/Relu:0'
assert block_1b.name == 'Block_1b_act_2_tanh/Tanh:0'
assert block_pool1.name == 'Block_1c_act_1/Relu:0'
assert block_pool2.name == 'Block_1d_act_1/Relu:0'
assert self.step_in(block_1a._keras_history[0]).name == "Block_1a_3x3"
assert self.step_in(block_1b._keras_history[0]).name == "Block_1b_5x5"
assert isinstance(self.step_in(block_pool1._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
assert isinstance(self.step_in(block_pool2._keras_history[0], depth=2), keras.layers.pooling.AveragePooling2D)
# next block
opts['input_x'] = block
opts['tower_pool_parts']['max_pooling'] = True
block = base.inception_block(**opts)
assert base.number_of_blocks == 2
concatenated = block._keras_history[0].input
assert len(concatenated) == 3
block_1a, block_1b, block_pool = concatenated
assert block_1a.name == 'Block_2a_3x3/Relu:0'
assert block_1b.name == 'Block_2b_5x5/Relu:0'
assert block_pool.name == 'conv2d_2/Relu:0'
m = keras.models.Model(input=input_x, output=block)
keras.utils.plot_model(m, to_file='model.pdf', show_shapes=True, show_layer_names=True)
block_2a, block_2b, block_pool = concatenated
assert block_2a.name == 'Block_2a_act_2/Relu:0'
assert block_2b.name == 'Block_2b_act_2_tanh/Tanh:0'
assert block_pool.name == 'Block_2c_act_1/Relu:0'
assert self.step_in(block_2a._keras_history[0]).name == "Block_2a_3x3"
assert self.step_in(block_2b._keras_history[0]).name == "Block_2b_5x5"
assert isinstance(self.step_in(block_pool._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
def test_batch_normalisation(self):
pass
def test_batch_normalisation(self, base, input_x):
base.part_of_block += 1
bn = base.batch_normalisation(input_x)._keras_history[0]
assert isinstance(bn, keras.layers.normalization.BatchNormalization)
assert bn.name == "Block_0a_BN"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment