Skip to content
Snippets Groups Projects
Commit 91feab0c authored by leufen1's avatar leufen1
Browse files

MyLittleModel consists now only on Dense layers, included bug fix /close #169

parent ed0ae581
No related branches found
No related tags found
4 merge requests!146Develop,!145Resolve "new release v0.12.0",!141refac simplify mylittlemodel,!138Resolve "Advanced Documentation"
Pipeline #45965 passed
......@@ -351,9 +351,8 @@ class AbstractModelClass(ABC):
class MyLittleModel(AbstractModelClass):
"""
A customised model with a 1x1 Conv, and 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the
output layer depending on the window_lead_time parameter. Dropout is used between the Convolution and the first
Dense layer.
A customised model 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the output layer depending
on the window_lead_time parameter.
"""
def __init__(self, shape_inputs: list, shape_outputs: list):
......@@ -382,13 +381,8 @@ class MyLittleModel(AbstractModelClass):
"""
Build the model.
"""
# add 1 to window_size to include current time step t0
x_input = keras.layers.Input(shape=self.shape_inputs)
x_in = keras.layers.Conv2D(32, (1, 1), padding='same', name='{}_Conv_1x1'.format("major"))(x_input)
x_in = self.activation(name='{}_conv_act'.format("major"))(x_in)
x_in = keras.layers.Flatten(name='{}'.format("major"))(x_in)
x_in = keras.layers.Dropout(self.dropout_rate, name='{}_Dropout_1'.format("major"))(x_in)
x_in = keras.layers.Flatten(name='{}'.format("major"))(x_input)
x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in)
......
......@@ -81,16 +81,12 @@ class PostProcessing(RunEnvironment):
def _run(self):
# ols model
with TimeTracking():
self.train_ols_model()
logging.info("take a look on the next reported time measure. If this increases a lot, one should think to "
"skip train_ols_model() whenever it is possible to save time.")
# forecasts
with TimeTracking():
self.make_prediction()
logging.info("take a look on the next reported time measure. If this increases a lot, one should think to "
"skip make_prediction() whenever it is possible to save time.")
# skill scores on test data
self.calculate_test_score()
# bootstraps
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment