Skip to content
Snippets Groups Projects
Select Git revision
  • 35c75d1c51cdf6685e2413ece1382f1fba9ed822
  • master default protected
  • installable
  • release-v0.1.5 protected
  • release-v0.1.4 protected
  • release-v0.1.3 protected
  • release-v0.1.2 protected
  • release-v0.1.1 protected
  • release-v0.1.0 protected
  • release-v0.0.10 protected
  • release-v0.0.9 protected
  • release-v0.0.8 protected
  • release-v0.0.7 protected
  • release-v0.0.6 protected
  • release-v0.0.5 protected
  • release-v0.0.4 protected
  • release-v0.0.3 protected
  • release-v0.0.2 protected
  • release-v0.0.1 protected
19 results

test_connection.py

Blame
  • test_inception_model.py 8.42 KiB
    import keras
    import pytest
    
    from src.model_modules.inception_model import InceptionModelBase
    
    
    class TestInceptionModelBase:
    
        @pytest.fixture
        def base(self):
            return InceptionModelBase()
    
        @pytest.fixture
        def input_x(self):
            return keras.Input(shape=(32, 32, 3))
    
        @staticmethod
        def step_in(element, depth=1):
            for _ in range(depth):
                element = element.input._keras_history[0]
            return element
    
        def test_init(self, base):
            assert base.number_of_blocks == 0
            assert base.part_of_block == 0
            assert base.ord_base == 96
            assert base.act_number == 0
    
        def test_block_part_name(self, base):
            assert base.block_part_name() == chr(96)
            base.part_of_block += 1
            assert base.block_part_name() == 'a'
    
        def test_create_conv_tower_3x3(self, base, input_x):
            opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
            tower = base.create_conv_tower(**opts)
            # check last element of tower (activation)
            assert base.part_of_block == 1
            assert tower.name == 'Block_0a_act_2/Relu:0'
            act_layer = tower._keras_history[0]
            assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
            assert act_layer.name == "Block_0a_act_2"
            # check previous element of tower (conv2D)
            conv_layer = self.step_in(act_layer)
            assert isinstance(conv_layer, keras.layers.Conv2D)
            assert conv_layer.filters == 32
            assert conv_layer.padding == 'same'
            assert conv_layer.kernel_size == (3, 3)
            assert conv_layer.strides == (1, 1)
            assert conv_layer.name == "Block_0a_3x3"
            # check previous element of tower (activation)
            act_layer2 = self.step_in(conv_layer)
            assert isinstance(act_layer2, keras.layers.advanced_activations.ReLU)
            assert act_layer2.name == "Block_0a_act_1"
            # check previous element of tower (conv2D)
            conv_layer2 = self.step_in(act_layer2)
            assert isinstance(conv_layer2, keras.layers.Conv2D)
            assert conv_layer2.filters == 64
            assert conv_layer2.kernel_size == (1, 1)
            assert conv_layer2.padding == 'same'
            assert conv_layer2.name == 'Block_0a_1x1'
            assert conv_layer2.input._keras_shape == (None, 32, 32, 3)
    
        def test_create_conv_tower_3x3_activation(self, base, input_x):
            opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
            # create tower with standard activation function
            tower = base.create_conv_tower(activation='tanh', **opts)
            assert tower.name == 'Block_0a_act_2_tanh/Tanh:0'
            act_layer = tower._keras_history[0]
            assert isinstance(act_layer, keras.layers.core.Activation)
            assert act_layer.name == "Block_0a_act_2_tanh"
            # create tower with activation function class
            tower = base.create_conv_tower(activation=keras.layers.LeakyReLU, **opts)
            assert tower.name == 'Block_0b_act_2/LeakyRelu:0'
            act_layer = tower._keras_history[0]
            assert isinstance(act_layer, keras.layers.advanced_activations.LeakyReLU)
            assert act_layer.name == "Block_0b_act_2"
    
        def test_create_conv_tower_1x1(self, base, input_x):
            opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (1, 1)}
            tower = base.create_conv_tower(**opts)
            # check last element of tower (activation)
            assert base.part_of_block == 1
            assert tower.name == 'Block_0a_act_1_1/Relu:0'
            act_layer = tower._keras_history[0]
            assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
            assert act_layer.name == "Block_0a_act_1"
            # check previous element of tower (conv2D)
            conv_layer = self.step_in(act_layer)
            assert isinstance(conv_layer, keras.layers.Conv2D)
            assert conv_layer.filters == 32
            assert conv_layer.padding == 'same'
            assert conv_layer.kernel_size == (1, 1)
            assert conv_layer.strides == (1, 1)
            assert conv_layer.name == "Block_0a_1x1"
            assert conv_layer.input._keras_shape == (None, 32, 32, 3)
    
        def test_create_conv_towers(self, base, input_x):
            opts = {'input_x': input_x, 'reduction_filter': 64, 'tower_filter': 32, 'tower_kernel': (3, 3)}
            _ = base.create_conv_tower(**opts)
            tower = base.create_conv_tower(**opts)
            assert base.part_of_block == 2
            assert tower.name == 'Block_0b_act_2_1/Relu:0'
    
        def test_create_pool_tower(self, base, input_x):
            opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
            tower = base.create_pool_tower(**opts)
            # check last element of tower (activation)
            assert base.part_of_block == 1
            assert tower.name == 'Block_0a_act_1_3/Relu:0'
            act_layer = tower._keras_history[0]
            assert isinstance(act_layer, keras.layers.advanced_activations.ReLU)
            assert act_layer.name == "Block_0a_act_1"
            # check previous element of tower (conv2D)
            conv_layer = self.step_in(act_layer)
            assert isinstance(conv_layer, keras.layers.Conv2D)
            assert conv_layer.filters == 32
            assert conv_layer.padding == 'same'
            assert conv_layer.kernel_size == (1, 1)
            assert conv_layer.strides == (1, 1)
            assert conv_layer.name == "Block_0a_1x1"
            # check previous element of tower (maxpool)
            pool_layer = self.step_in(conv_layer)
            assert isinstance(pool_layer, keras.layers.pooling.MaxPooling2D)
            assert pool_layer.name == "Block_0a_MaxPool"
            assert pool_layer.pool_size == (3, 3)
            assert pool_layer.padding == 'same'
            # check avg pool tower
            opts = {'input_x': input_x, 'pool_kernel': (3, 3), 'tower_filter': 32}
            tower = base.create_pool_tower(max_pooling=False, **opts)
            pool_layer = self.step_in(tower._keras_history[0], depth=2)
            assert isinstance(pool_layer, keras.layers.pooling.AveragePooling2D)
            assert pool_layer.name == "Block_0b_AvgPool"
            assert pool_layer.pool_size == (3, 3)
            assert pool_layer.padding == 'same'
    
        def test_inception_block(self, base, input_x):
            conv = {'tower_1': {'reduction_filter': 64, 'tower_kernel': (3, 3), 'tower_filter': 64},
                    'tower_2': {'reduction_filter': 64, 'tower_kernel': (5, 5), 'tower_filter': 64, 'activation': 'tanh'}}
            pool = {'pool_kernel': (3, 3), 'tower_filter': 64}
            opts = {'input_x': input_x, 'tower_conv_parts': conv, 'tower_pool_parts': pool}
            block = base.inception_block(**opts)
            assert base.number_of_blocks == 1
            concatenated = block._keras_history[0].input
            assert len(concatenated) == 4
            block_1a, block_1b, block_pool1, block_pool2 = concatenated
            assert block_1a.name == 'Block_1a_act_2/Relu:0'  # <- sometimes keras changes given name (I don't know why yet)
            assert block_1b.name == 'Block_1b_act_2_tanh/Tanh:0'
            assert block_pool1.name == 'Block_1c_act_1/Relu:0'
            assert block_pool2.name == 'Block_1d_act_1/Relu:0'
            assert self.step_in(block_1a._keras_history[0]).name == "Block_1a_3x3"
            assert self.step_in(block_1b._keras_history[0]).name == "Block_1b_5x5"
            assert isinstance(self.step_in(block_pool1._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
            assert isinstance(self.step_in(block_pool2._keras_history[0], depth=2), keras.layers.pooling.AveragePooling2D)
            # next block
            opts['input_x'] = block
            opts['tower_pool_parts']['max_pooling'] = True
            block = base.inception_block(**opts)
            assert base.number_of_blocks == 2
            concatenated = block._keras_history[0].input
            assert len(concatenated) == 3
            block_2a, block_2b, block_pool = concatenated
            assert block_2a.name == 'Block_2a_act_2/Relu:0'
            assert block_2b.name == 'Block_2b_act_2_tanh/Tanh:0'
            assert block_pool.name == 'Block_2c_act_1/Relu:0'
            assert self.step_in(block_2a._keras_history[0]).name == "Block_2a_3x3"
            assert self.step_in(block_2b._keras_history[0]).name == "Block_2b_5x5"
            assert isinstance(self.step_in(block_pool._keras_history[0], depth=2), keras.layers.pooling.MaxPooling2D)
    
        def test_batch_normalisation(self, base, input_x):
            base.part_of_block += 1
            bn = base.batch_normalisation(input_x)._keras_history[0]
            assert isinstance(bn, keras.layers.normalization.BatchNormalization)
            assert bn.name == "Block_0a_BN"