Skip to content
Snippets Groups Projects

Resolve "release v1.4.0"

Merged Ghost User requested to merge release_v1.4.0 into master
1 file
+ 113
0
Compare changes
  • Side-by-side
  • Inline
+ 113
0
__author__ = "Lukas Leufen"
__date__ = '2021-02-'
from functools import reduce, partial
from mlair.model_modules import AbstractModelClass
from mlair.helpers import select_from_dict
from mlair.model_modules.loss import var_loss, custom_loss
from mlair.model_modules.advanced_paddings import PadUtils, Padding2D, SymmetricPadding2D
import keras
class CNN(AbstractModelClass):
_activation = {"relu": keras.layers.ReLU, "tanh": partial(keras.layers.Activation, "tanh"),
"sigmoid": partial(keras.layers.Activation, "sigmoid"),
"linear": partial(keras.layers.Activation, "linear"),
"selu": partial(keras.layers.Activation, "selu")}
_initializer = {"selu": keras.initializers.lecun_normal()}
_optimizer = {"adam": keras.optimizers.adam}
_regularizer = {"l1": keras.regularizers.l1, "l2": keras.regularizers.l2, "l1_l2": keras.regularizers.l1_l2}
_requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad"]
def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear",
optimizer="adam", regularizer=None, **kwargs):
assert len(input_shape) == 1
assert len(output_shape) == 1
super().__init__(input_shape[0], output_shape[0])
# settings
self.activation = self._set_activation(activation)
self.activation_name = activation
self.activation_output = self._set_activation(activation_output)
self.activation_output_name = activation_output
self.kernel_initializer = self._initializer.get(activation, "glorot_uniform")
self.kernel_regularizer = self._set_regularizer(regularizer, **kwargs)
self.optimizer = self._set_optimizer(optimizer, **kwargs)
# apply to model
self.set_model()
self.set_compile_options()
self.set_custom_objects(loss=custom_loss([keras.losses.mean_squared_error, var_loss]), var_loss=var_loss)
def _set_activation(self, activation):
try:
return self._activation.get(activation.lower())
except KeyError:
raise AttributeError(f"Given activation {activation} is not supported in this model class.")
def _set_optimizer(self, optimizer, **kwargs):
try:
opt_name = optimizer.lower()
opt = self._optimizer.get(opt_name)
opt_kwargs = {}
if opt_name == "adam":
opt_kwargs = select_from_dict(kwargs, ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad"])
return opt(**opt_kwargs)
except KeyError:
raise AttributeError(f"Given optimizer {optimizer} is not supported in this model class.")
def _set_regularizer(self, regularizer, **kwargs):
if regularizer is None or (isinstance(regularizer, str) and regularizer.lower() == "none"):
return None
try:
reg_name = regularizer.lower()
reg = self._regularizer.get(reg_name)
reg_kwargs = {}
if reg_name in ["l1", "l2"]:
reg_kwargs = select_from_dict(kwargs, reg_name, remove_none=True)
if reg_name in reg_kwargs:
reg_kwargs["l"] = reg_kwargs.pop(reg_name)
elif reg_name == "l1_l2":
reg_kwargs = select_from_dict(kwargs, ["l1", "l2"], remove_none=True)
return reg(**reg_kwargs)
except KeyError:
raise AttributeError(f"Given regularizer {regularizer} is not supported in this model class.")
def set_model(self):
"""
Build the model.
"""
x_input = keras.layers.Input(shape=self._input_shape)
kernel = (1, 1)
pad_size = PadUtils.get_padding_for_same(kernel)
x_in = Padding2D("SymPad2D")(padding=pad_size, name="SymPad")(x_input)
x_in = keras.layers.Conv2D(filters=16, kernel_size=kernel,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Conv2D(filters=32, kernel_size=kernel,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation()(x_in)
x_in = Padding2D("SymPad2D")(padding=pad_size, name="SymPad")(x_in)
x_in = keras.layers.Conv2D(filters=64, kernel_size=kernel,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Flatten()(x_in)
x_in = keras.layers.Dense(64, kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(16, kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(self._output_shape)(x_in)
out = self.activation_output(name=f"{self.activation_output_name}_output")(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out])
def set_compile_options(self):
self.compile_options = {"loss": [custom_loss([keras.losses.mean_squared_error, var_loss])],
"metrics": ["mse", "mae", var_loss]}
Loading