Skip to content
Snippets Groups Projects
Commit 72f271c6 authored by leufen1's avatar leufen1
Browse files

new FCN module, model classes can now use external parameters if provided

parent 55ecf226
Branches
Tags
7 merge requests!319add all changes of dev into release v1.4.0 branch,!318Resolve "release v1.4.0",!283Merge latest develop into falcos issue,!264Merge develop into felix_issue287_tech-wrf-datahandler-should-inherit-from-singlestationdatahandler,!259Draft: Resolve "WRF-Datahandler should inherit from SingleStationDatahandler",!258Lukas issue284 feat create fcn model class,!255Resolve "create FCN model class"
Pipeline #61081 passed
__author__ = "Lukas Leufen"
__date__ = '2021-02-'
from mlair.model_modules import AbstractModelClass
from mlair.helpers import select_from_dict
import keras
class FCN_64_32_16(AbstractModelClass):
"""
A customised model 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the output layer depending
on the window_lead_time parameter.
"""
def __init__(self, input_shape: list, output_shape: list):
"""
Sets model and loss depending on the given arguments.
:param input_shape: list of input shapes (expect len=1 with shape=(window_hist, station, variables))
:param output_shape: list of output shapes (expect len=1 with shape=(window_forecast))
"""
assert len(input_shape) == 1
assert len(output_shape) == 1
super().__init__(input_shape[0], output_shape[0])
# settings
self.dropout_rate = 0.1
self.regularizer = keras.regularizers.l2(0.1)
self.activation = keras.layers.PReLU
# apply to model
self.set_model()
self.set_compile_options()
self.set_custom_objects(loss=self.compile_options['loss'])
def set_model(self):
"""
Build the model.
"""
x_input = keras.layers.Input(shape=self._input_shape)
x_in = keras.layers.Flatten(name='{}'.format("major"))(x_input)
x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(16, name='{}_Dense_16'.format("major"))(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(self._output_shape, name='{}_Dense'.format("major"))(x_in)
out_main = self.activation()(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out_main])
def set_compile_options(self):
self.initial_lr = 1e-2
self.optimizer = keras.optimizers.adam(lr=self.initial_lr)
self.compile_options = {"loss": [keras.losses.mean_squared_error], "metrics": ["mse", "mae"]}
class FCN(AbstractModelClass):
"""
A customised model 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the output layer depending
on the window_lead_time parameter.
"""
_activation = {"relu": keras.layers.ReLU(), "tanh": keras.layers.Activation("tanh"),
"sigmoid": keras.layers.Activation("sigmoid")}
_optimizer = {"adam": keras.optimizers.adam, "sgd": keras.optimizers.SGD}
_requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov"]
def __init__(self, input_shape: list, output_shape: list, activation="relu", optimizer="adam",
layers=1, neurons=10, **kwargs):
"""
Sets model and loss depending on the given arguments.
:param input_shape: list of input shapes (expect len=1 with shape=(window_hist, station, variables))
:param output_shape: list of output shapes (expect len=1 with shape=(window_forecast))
"""
assert len(input_shape) == 1
assert len(output_shape) == 1
super().__init__(input_shape[0], output_shape[0])
# settings
self.activation = self._set_activation(activation)
self.optimizer = self._set_optimizer(optimizer, **kwargs)
self.layer_configuration = (layers, neurons)
# apply to model
self.set_model()
self.set_compile_options()
# self.set_custom_objects(loss=self.compile_options['loss'])
def _set_activation(self, activation):
try:
return self._activation.get(activation.lower())
except KeyError:
raise AttributeError(f"Given activation {activation} is not supported in this model class.")
def _set_optimizer(self, optimizer, **kwargs):
try:
opt_name = optimizer.lower()
opt = self._optimizer.get(opt_name)
opt_kwargs = {}
if opt_name == "adam":
opt_kwargs = select_from_dict(kwargs, ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad"])
elif opt_name == "sgd":
opt_kwargs = select_from_dict(kwargs, ["lr", "momentum", "decay", "nesterov"])
return opt(**opt_kwargs)
except KeyError:
raise AttributeError(f"Given optimizer {optimizer} is not supported in this model class.")
def set_model(self):
"""
Build the model.
"""
x_input = keras.layers.Input(shape=self._input_shape)
x_in = keras.layers.Flatten()(x_input)
n_layer, n_hidden = self.layer_configuration
for layer in range(n_layer):
x_in = keras.layers.Dense(n_hidden)(x_in)
x_in = self.activation(x_in)
x_in = keras.layers.Dense(self._output_shape)(x_in)
out = self.activation(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out])
def set_compile_options(self):
self.compile_options = {"loss": [keras.losses.mean_squared_error], "metrics": ["mse", "mae"]}
......@@ -56,7 +56,6 @@ class ModelSetup(RunEnvironment):
"""Initialise and run model setup."""
super().__init__()
self.model = None
# path = self.data_store.get("experiment_path")
exp_name = self.data_store.get("experiment_name")
path = self.data_store.get("model_path")
self.scope = "model"
......@@ -138,9 +137,10 @@ class ModelSetup(RunEnvironment):
def build_model(self):
"""Build model using input and output shapes from data store."""
args_list = ["input_shape", "output_shape"]
args = self.data_store.create_args_dict(args_list, self.scope)
# args_list = ["input_shape", "output_shape"]
model = self.data_store.get("model_class")
args_list = model.requirements()
args = self.data_store.create_args_dict(args_list, self.scope)
self.model = model(**args)
self.get_model_settings()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment