diff --git a/mlair/model_modules/model_class.py b/mlair/model_modules/model_class.py index bba1d8bd7b95b6aba1a6390a6b0eba384e6780a7..0e69d22012a592b30c6ffdf9ed6082c47a291f90 100644 --- a/mlair/model_modules/model_class.py +++ b/mlair/model_modules/model_class.py @@ -351,9 +351,8 @@ class AbstractModelClass(ABC): class MyLittleModel(AbstractModelClass): """ - A customised model with a 1x1 Conv, and 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the - output layer depending on the window_lead_time parameter. Dropout is used between the Convolution and the first - Dense layer. + A customised model 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the output layer depending + on the window_lead_time parameter. """ def __init__(self, shape_inputs: list, shape_outputs: list): @@ -382,13 +381,8 @@ class MyLittleModel(AbstractModelClass): """ Build the model. """ - - # add 1 to window_size to include current time step t0 x_input = keras.layers.Input(shape=self.shape_inputs) - x_in = keras.layers.Conv2D(32, (1, 1), padding='same', name='{}_Conv_1x1'.format("major"))(x_input) - x_in = self.activation(name='{}_conv_act'.format("major"))(x_in) - x_in = keras.layers.Flatten(name='{}'.format("major"))(x_in) - x_in = keras.layers.Dropout(self.dropout_rate, name='{}_Dropout_1'.format("major"))(x_in) + x_in = keras.layers.Flatten(name='{}'.format("major"))(x_input) x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in) x_in = self.activation()(x_in) x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in) diff --git a/mlair/run_modules/post_processing.py b/mlair/run_modules/post_processing.py index d4f409ec503ba0ae37bdd1d1bec4b0207eec453c..b4af7a754335e8da6d29870b1a0c4152d7dc9af5 100644 --- a/mlair/run_modules/post_processing.py +++ b/mlair/run_modules/post_processing.py @@ -81,16 +81,12 @@ class PostProcessing(RunEnvironment): def _run(self): # ols model - with TimeTracking(): - self.train_ols_model() - logging.info("take a look on the next reported time measure. If this increases a lot, one should think to " - "skip train_ols_model() whenever it is possible to save time.") + self.train_ols_model() # forecasts - with TimeTracking(): - self.make_prediction() - logging.info("take a look on the next reported time measure. If this increases a lot, one should think to " - "skip make_prediction() whenever it is possible to save time.") + self.make_prediction() + + # skill scores on test data self.calculate_test_score() # bootstraps