Skip to content
Snippets Groups Projects
Commit 72bc0b23 authored by Felix Kleinert's avatar Felix Kleinert
Browse files

include call of padding layers within an inception block #62

parent 60c5244a
Branches
Tags
2 merge requests!59Develop,!53Felix issue062 apply advanced pooling in inception block feat
......@@ -54,6 +54,7 @@ class InceptionModelBase:
regularizer = kwargs.get('regularizer', keras.regularizers.l2(0.01))
bn_settings = kwargs.get('bn_settings', {})
act_settings = kwargs.get('act_settings', {})
padding = kwargs.get('padding', 'ZeroPad2D')
logging.debug(f'Inception Block with activation: {activation}')
block_name = f'Block_{self.number_of_blocks}{self.block_part_name()}_{tower_kernel[0]}x{tower_kernel[1]}'
......@@ -74,7 +75,7 @@ class InceptionModelBase:
name=f'Block_{self.number_of_blocks}{self.block_part_name()}_1x1')(input_x)
tower = self.act(tower, activation, **act_settings)
tower = self.padding_layer('SymPad2D')(padding=padding_size,
tower = self.padding_layer(padding)(padding=padding_size,
name=f'Block_{self.number_of_blocks}{self.block_part_name()}_Pad'
)(tower)
......@@ -116,19 +117,19 @@ class InceptionModelBase:
}
if isinstance(padding, str):
try:
pad2D = allowed_paddings[padding]
pad2d = allowed_paddings[padding]
except KeyError as einfo:
raise NotImplementedError(
f"`{einfo}' is not implemented as padding. "
"Use one of those: i) `RefPad2D', ii) `SymPad2D', iii) `ZeroPad2D'")
else:
if padding in allowed_paddings.values():
pad2D = padding
pad2d = padding
else:
raise ValueError(f"`{padding.__name__}' is not a valid padding padding. "
raise TypeError(f"`{padding.__name__}' is not a valid padding layer type. "
"Use one of those: "
"i) ReflectionPadding2D, ii) `SymmetricPadding2D', iii) `ZeroPadding2D'")
return pad2D
"i) ReflectionPadding2D, ii) SymmetricPadding2D, iii) ZeroPadding2D")
return pad2d
def create_pool_tower(self, input_x, pool_kernel, tower_filter, activation='relu', max_pooling=True, **kwargs):
"""
......@@ -143,6 +144,7 @@ class InceptionModelBase:
self.part_of_block += 1
self.act_number = 1
act_settings = kwargs.get('act_settings', {})
padding = kwargs.get('padding', 'ZeroPad2D')
padding_size = PadUtils.get_padding_for_same(kernel_size=pool_kernel)
# pooling block
......@@ -154,11 +156,11 @@ class InceptionModelBase:
block_type = "AvgPool"
pooling = layers.AveragePooling2D
tower = self.padding_layer('SymPad2D')(padding=padding_size, name=block_name+'Pad')(input_x)
tower = self.padding_layer(padding)(padding=padding_size, name=block_name+'Pad')(input_x)
tower = pooling(pool_kernel, strides=(1, 1), padding='valid', name=block_name+block_type)(tower)
# convolution block
tower = layers.Conv2D(tower_filter, (1, 1), padding='same', name=block_name+"1x1")(tower)
tower = layers.Conv2D(tower_filter, (1, 1), padding='valid', name=block_name+"1x1")(tower)
tower = self.act(tower, activation, **act_settings)
return tower
......@@ -170,16 +172,22 @@ class InceptionModelBase:
:param tower_conv_parts: dict containing settings for parts of inception block; Example:
tower_conv_parts = {'tower_1': {'reduction_filter': 32,
'tower_filter': 64,
'tower_kernel': (3, 1)},
'tower_kernel': (3, 1),
'activation' : 'relu',
'padding' : 'SymPad2D'}
'tower_2': {'reduction_filter': 32,
'tower_filter': 64,
'tower_kernel': (5, 1)},
'tower_kernel': (5, 1),
'activation' : LeakyReLU,
'padding' : keras.layers.ZeroPadding2D}
'tower_3': {'reduction_filter': 32,
'tower_filter': 64,
'tower_kernel': (1, 1)},
'tower_kernel': (1, 1),
'activation' : ELU,
'padding' : src.model_modules.advanced_paddings.ReflectionPadding2D}
}
:param tower_pool_parts: dict containing settings for pool part of inception block; Example:
tower_pool_parts = {'pool_kernel': (3, 1), 'tower_filter': 64}
tower_pool_parts = {'pool_kernel': (3, 1), 'tower_filter': 64, 'padding': 'RefPad2D'}
:return:
"""
self.number_of_blocks += 1
......@@ -201,41 +209,41 @@ class InceptionModelBase:
return block
if __name__ == '__main__':
from keras.models import Model
from keras.layers import Conv2D, Flatten, Dense, Input
import numpy as np
kernel_1 = (3, 3)
kernel_2 = (5, 5)
x = np.array(range(2000)).reshape(-1, 10, 10, 1)
y = x.mean(axis=(1, 2))
x_input = Input(shape=x.shape[1:])
pad1 = PadUtils.get_padding_for_same(kernel_size=kernel_1)
x_out = InceptionModelBase.padding_layer('RefPad2D')(padding=pad1, name="RefPAD1")(x_input)
# x_out = ReflectionPadding2D(padding=pad1, name="RefPAD")(x_input)
x_out = Conv2D(5, kernel_size=kernel_1, activation='relu')(x_out)
pad2 = PadUtils.get_padding_for_same(kernel_size=kernel_2)
x_out = InceptionModelBase.padding_layer(SymmetricPadding2D)(padding=pad2, name="SymPAD1")(x_out)
# x_out = SymmetricPadding2D(padding=pad2, name="SymPAD")(x_out)
x_out = Conv2D(2, kernel_size=kernel_2, activation='relu')(x_out)
x_out = Flatten()(x_out)
x_out = Dense(1, activation='linear')(x_out)
model = Model(inputs=x_input, outputs=x_out)
model.compile('adam', loss='mse')
model.summary()
# model.fit(x, y, epochs=10)
# if __name__ == '__main__':
# from keras.models import Model
# from keras.layers import Conv2D, Flatten, Dense, Input
# import numpy as np
#
#
# kernel_1 = (3, 3)
# kernel_2 = (5, 5)
# x = np.array(range(2000)).reshape(-1, 10, 10, 1)
# y = x.mean(axis=(1, 2))
#
# x_input = Input(shape=x.shape[1:])
# pad1 = PadUtils.get_padding_for_same(kernel_size=kernel_1)
# x_out = InceptionModelBase.padding_layer('RefPad2D')(padding=pad1, name="RefPAD1")(x_input)
# # x_out = ReflectionPadding2D(padding=pad1, name="RefPAD")(x_input)
# x_out = Conv2D(5, kernel_size=kernel_1, activation='relu')(x_out)
#
# pad2 = PadUtils.get_padding_for_same(kernel_size=kernel_2)
# x_out = InceptionModelBase.padding_layer(SymmetricPadding2D)(padding=pad2, name="SymPAD1")(x_out)
# # x_out = SymmetricPadding2D(padding=pad2, name="SymPAD")(x_out)
# x_out = Conv2D(2, kernel_size=kernel_2, activation='relu')(x_out)
# x_out = Flatten()(x_out)
# x_out = Dense(1, activation='linear')(x_out)
#
# model = Model(inputs=x_input, outputs=x_out)
# model.compile('adam', loss='mse')
# model.summary()
# # model.fit(x, y, epochs=10)
if __name__ == '__main__':
print(__name__)
from keras.datasets import cifar10
from keras.utils import np_utils
from keras.layers import Input
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.advanced_activations import LeakyReLU, ELU
from keras.optimizers import SGD
from keras.layers import Dense, Flatten, Conv2D, MaxPooling2D
from keras.models import Model
......@@ -244,11 +252,17 @@ if __name__ == '__main__':
conv_settings_dict = {'tower_1': {'reduction_filter': 64,
'tower_filter': 64,
'tower_kernel': (3, 3),
'activation': LeakyReLU},
'activation': LeakyReLU,},
'tower_2': {'reduction_filter': 64,
'tower_filter': 64,
'tower_kernel': (5, 5),
'activation': 'relu'}
'activation': 'relu',
'padding': 'SymPad2D'},
'tower_3': {'reduction_filter': 64,
'tower_filter': 64,
'tower_kernel': (1, 1),
'activation': ELU,
'padding': ReflectionPadding2D}
}
pool_settings_dict = {'pool_kernel': (3, 3),
'tower_filter': 64,
......@@ -266,7 +280,7 @@ if __name__ == '__main__':
# create inception net
inception_net = InceptionModelBase()
output = inception_net.inception_block(input_img, conv_settings_dict, pool_settings_dict)
output = inception_net.inception_block(input_img, conv_settings_dict, pool_settings_dict, batch_normalisation=True)
output = Flatten()(output)
output = Dense(10, activation='softmax')(output)
model = Model(inputs=input_img, outputs=output)
......@@ -281,5 +295,6 @@ if __name__ == '__main__':
print(X_train.shape)
keras.utils.plot_model(model, to_file='model.pdf', show_shapes=True, show_layer_names=True)
# model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test))
print('test')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment