Skip to content
Snippets Groups Projects
Commit e0d362f0 authored by Felix Kleinert's avatar Felix Kleinert
Browse files

integrate advanced paddings into inception blocks #62

parent b2ddff8c
No related branches found
No related tags found
2 merge requests!59Develop,!53Felix issue062 apply advanced pooling in inception block feat
...@@ -5,6 +5,7 @@ import logging ...@@ -5,6 +5,7 @@ import logging
import keras import keras
import keras.layers as layers import keras.layers as layers
from src.model_modules.advanced_paddings import PadUtils, ReflectionPadding2D, SymmetricPadding2D
class InceptionModelBase: class InceptionModelBase:
...@@ -56,25 +57,30 @@ class InceptionModelBase: ...@@ -56,25 +57,30 @@ class InceptionModelBase:
logging.debug(f'Inception Block with activation: {activation}') logging.debug(f'Inception Block with activation: {activation}')
block_name = f'Block_{self.number_of_blocks}{self.block_part_name()}_{tower_kernel[0]}x{tower_kernel[1]}' block_name = f'Block_{self.number_of_blocks}{self.block_part_name()}_{tower_kernel[0]}x{tower_kernel[1]}'
padding_size = PadUtils.get_padding_for_same(tower_kernel)
if tower_kernel == (1, 1): if tower_kernel == (1, 1):
tower = layers.Conv2D(tower_filter, tower = layers.Conv2D(tower_filter,
tower_kernel, tower_kernel,
padding='same', padding='valid',
kernel_regularizer=regularizer, kernel_regularizer=regularizer,
name=block_name)(input_x) name=block_name)(input_x)
# tower = self.act(tower, activation, **act_settings) # tower = self.act(tower, activation, **act_settings)
else: else:
tower = layers.Conv2D(reduction_filter, tower = layers.Conv2D(reduction_filter,
(1, 1), (1, 1),
padding='same', padding='valid',
kernel_regularizer=regularizer, kernel_regularizer=regularizer,
name=f'Block_{self.number_of_blocks}{self.block_part_name()}_1x1')(input_x) name=f'Block_{self.number_of_blocks}{self.block_part_name()}_1x1')(input_x)
tower = self.act(tower, activation, **act_settings) tower = self.act(tower, activation, **act_settings)
tower = self.padding_layer('SymPad2D')(padding=padding_size,
name=f'Block_{self.number_of_blocks}{self.block_part_name()}_Pad'
)(tower)
tower = layers.Conv2D(tower_filter, tower = layers.Conv2D(tower_filter,
tower_kernel, tower_kernel,
padding='same', padding='valid',
kernel_regularizer=regularizer, kernel_regularizer=regularizer,
name=block_name)(tower) name=block_name)(tower)
if batch_normalisation: if batch_normalisation:
...@@ -101,6 +107,29 @@ class InceptionModelBase: ...@@ -101,6 +107,29 @@ class InceptionModelBase:
else: else:
return act_name.__name__ return act_name.__name__
@staticmethod
def padding_layer(padding):
allowed_paddings = {
'RefPad2D': ReflectionPadding2D, 'ReflectionPadding2D': ReflectionPadding2D,
'SymPad2D': SymmetricPadding2D, 'SymmetricPadding2D': SymmetricPadding2D,
'ZeroPad2D': keras.layers.ZeroPadding2D, 'ZeroPadding2D': keras.layers.ZeroPadding2D
}
if isinstance(padding, str):
try:
pad2D = allowed_paddings[padding]
except KeyError as einfo:
raise NotImplementedError(
f"`{einfo}' is not implemented as padding. "
"Use one of those: i) `RefPad2D', ii) `SymPad2D', iii) `ZeroPad2D'")
else:
if padding in allowed_paddings.values():
pad2D = padding
else:
raise ValueError(f"`{padding.__name__}' is not a valid padding padding. "
"Use one of those: "
"i) ReflectionPadding2D, ii) `SymmetricPadding2D', iii) `ZeroPadding2D'")
return pad2D
def create_pool_tower(self, input_x, pool_kernel, tower_filter, activation='relu', max_pooling=True, **kwargs): def create_pool_tower(self, input_x, pool_kernel, tower_filter, activation='relu', max_pooling=True, **kwargs):
""" """
This function creates a "MaxPooling tower block" This function creates a "MaxPooling tower block"
...@@ -114,6 +143,7 @@ class InceptionModelBase: ...@@ -114,6 +143,7 @@ class InceptionModelBase:
self.part_of_block += 1 self.part_of_block += 1
self.act_number = 1 self.act_number = 1
act_settings = kwargs.get('act_settings', {}) act_settings = kwargs.get('act_settings', {})
padding_size = PadUtils.get_padding_for_same(kernel_size=pool_kernel)
# pooling block # pooling block
block_name = f"Block_{self.number_of_blocks}{self.block_part_name()}_" block_name = f"Block_{self.number_of_blocks}{self.block_part_name()}_"
...@@ -123,7 +153,9 @@ class InceptionModelBase: ...@@ -123,7 +153,9 @@ class InceptionModelBase:
else: else:
block_type = "AvgPool" block_type = "AvgPool"
pooling = layers.AveragePooling2D pooling = layers.AveragePooling2D
tower = pooling(pool_kernel, strides=(1, 1), padding='same', name=block_name+block_type)(input_x)
tower = self.padding_layer('SymPad2D')(padding=padding_size, name=block_name+'Pad')(input_x)
tower = pooling(pool_kernel, strides=(1, 1), padding='valid', name=block_name+block_type)(tower)
# convolution block # convolution block
tower = layers.Conv2D(tower_filter, (1, 1), padding='same', name=block_name+"1x1")(tower) tower = layers.Conv2D(tower_filter, (1, 1), padding='same', name=block_name+"1x1")(tower)
...@@ -169,6 +201,35 @@ class InceptionModelBase: ...@@ -169,6 +201,35 @@ class InceptionModelBase:
return block return block
if __name__ == '__main__':
from keras.models import Model
from keras.layers import Conv2D, Flatten, Dense, Input
import numpy as np
kernel_1 = (3, 3)
kernel_2 = (5, 5)
x = np.array(range(2000)).reshape(-1, 10, 10, 1)
y = x.mean(axis=(1, 2))
x_input = Input(shape=x.shape[1:])
pad1 = PadUtils.get_padding_for_same(kernel_size=kernel_1)
x_out = InceptionModelBase.padding_layer('RefPad2D')(padding=pad1, name="RefPAD1")(x_input)
# x_out = ReflectionPadding2D(padding=pad1, name="RefPAD")(x_input)
x_out = Conv2D(5, kernel_size=kernel_1, activation='relu')(x_out)
pad2 = PadUtils.get_padding_for_same(kernel_size=kernel_2)
x_out = InceptionModelBase.padding_layer(SymmetricPadding2D)(padding=pad2, name="SymPAD1")(x_out)
# x_out = SymmetricPadding2D(padding=pad2, name="SymPAD")(x_out)
x_out = Conv2D(2, kernel_size=kernel_2, activation='relu')(x_out)
x_out = Flatten()(x_out)
x_out = Dense(1, activation='linear')(x_out)
model = Model(inputs=x_input, outputs=x_out)
model.compile('adam', loss='mse')
model.summary()
# model.fit(x, y, epochs=10)
if __name__ == '__main__': if __name__ == '__main__':
print(__name__) print(__name__)
from keras.datasets import cifar10 from keras.datasets import cifar10
...@@ -212,10 +273,13 @@ if __name__ == '__main__': ...@@ -212,10 +273,13 @@ if __name__ == '__main__':
print(model.summary()) print(model.summary())
# compile # compile
epochs = 10 epochs = 1
lrate = 0.01 lrate = 0.01
decay = lrate/epochs decay = lrate/epochs
sgd = SGD(lr=lrate, momentum=0.9, decay=decay, nesterov=False) sgd = SGD(lr=lrate, momentum=0.9, decay=decay, nesterov=False)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy']) model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
print(X_train.shape) print(X_train.shape)
keras.utils.plot_model(model, to_file='model.pdf', show_shapes=True, show_layer_names=True) keras.utils.plot_model(model, to_file='model.pdf', show_shapes=True, show_layer_names=True)
# model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment