From 7a9b091d071a8a327d0adc169ecd82d97a906f50 Mon Sep 17 00:00:00 2001
From: lukas leufen <l.leufen@fz-juelich.de>
Date: Mon, 20 Jan 2020 10:19:58 +0100
Subject: [PATCH] build new model MyBranchedModel with 3 output branches for
 better workflow testing

---
 src/model_modules/model_class.py | 85 ++++++++++++++++++++++++++++++++
 1 file changed, 85 insertions(+)

diff --git a/src/model_modules/model_class.py b/src/model_modules/model_class.py
index 1a8f7c4c..0e31cd66 100644
--- a/src/model_modules/model_class.py
+++ b/src/model_modules/model_class.py
@@ -154,3 +154,88 @@ class MyLittleModel(AbstractModelClass):
         """
 
         self.loss = keras.losses.mean_squared_error
+
+
+class MyBranchedModel(AbstractModelClass):
+
+    """
+    A customised model
+
+
+    with a 1x1 Conv, and 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the
+    output layer depending on the window_lead_time parameter. Dropout is used between the Convolution and the first
+    Dense layer.
+    """
+
+    def __init__(self, window_history_size, window_lead_time, channels):
+
+        """
+        Sets model and loss depending on the given arguments.
+        :param activation: activation function
+        :param window_history_size: number of historical time steps included in the input data
+        :param channels: number of variables used in input data
+        :param regularizer: <not used here>
+        :param dropout_rate: dropout rate used in the model [0, 1)
+        :param window_lead_time: number of time steps to forecast in the output layer
+        """
+
+        super().__init__()
+
+        # settings
+        self.window_history_size = window_history_size
+        self.window_lead_time = window_lead_time
+        self.channels = channels
+        self.dropout_rate = 0.1
+        self.regularizer = keras.regularizers.l2(0.1)
+        self.initial_lr = 1e-2
+        self.optimizer = keras.optimizers.SGD(lr=self.initial_lr, momentum=0.9)
+        self.lr_decay = helpers.LearningRateDecay(base_lr=self.initial_lr, drop=.94, epochs_drop=10)
+        self.epochs = 2
+        self.batch_size = int(256)
+        self.activation = keras.layers.PReLU
+
+        # apply to model
+        self.set_model()
+        self.set_loss()
+
+    def set_model(self):
+
+        """
+        Build the model.
+        :param activation: activation function
+        :param window_history_size: number of historical time steps included in the input data
+        :param channels: number of variables used in input data
+        :param dropout_rate: dropout rate used in the model [0, 1)
+        :param window_lead_time: number of time steps to forecast in the output layer
+        :return: built keras model
+        """
+
+        # add 1 to window_size to include current time step t0
+        x_input = keras.layers.Input(shape=(self.window_history_size + 1, 1, self.channels))
+        x_in = keras.layers.Conv2D(32, (1, 1), padding='same', name='{}_Conv_1x1'.format("major"))(x_input)
+        x_in = self.activation(name='{}_conv_act'.format("major"))(x_in)
+        x_in = keras.layers.Flatten(name='{}'.format("major"))(x_in)
+        x_in = keras.layers.Dropout(self.dropout_rate, name='{}_Dropout_1'.format("major"))(x_in)
+        x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in)
+        x_in = self.activation()(x_in)
+        out_minor_1 = keras.layers.Dense(self.window_lead_time, name='{}_Dense'.format("minor_1"))(x_in)
+        out_minor_1 = self.activation()(out_minor_1)
+        x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in)
+        x_in = self.activation()(x_in)
+        out_minor_2 = keras.layers.Dense(self.window_lead_time, name='{}_Dense'.format("minor_2"))(x_in)
+        out_minor_2 = self.activation()(out_minor_2)
+        x_in = keras.layers.Dense(16, name='{}_Dense_16'.format("major"))(x_in)
+        x_in = self.activation()(x_in)
+        x_in = keras.layers.Dense(self.window_lead_time, name='{}_Dense'.format("major"))(x_in)
+        out_main = self.activation()(x_in)
+        self.model = keras.Model(inputs=x_input, outputs=[out_minor_1, out_minor_2, out_main])
+
+    def set_loss(self):
+
+        """
+        Set the loss
+        :return: loss function
+        """
+
+        self.loss = [keras.losses.mean_absolute_error] + [keras.losses.mean_squared_error] + \
+                    [keras.losses.mean_squared_error]
-- 
GitLab