Skip to content
Snippets Groups Projects
Commit e4d4c48c authored by Felix Kleinert's avatar Felix Kleinert
Browse files

mupdate setctorial test models

parent f200a4d3
No related branches found
No related tags found
1 merge request!259Draft: Resolve "WRF-Datahandler should inherit from SingleStationDatahandler"
Pipeline #74864 failed
......@@ -20,7 +20,7 @@ class PadUtils:
"""Helper class for advanced padding."""
@staticmethod
def get_padding_for_same(kernel_size: Tuple[int], strides: int = 1) -> Tuple[int]:
def get_padding_for_same(kernel_size: Tuple[int, int], strides: int = 1) -> Tuple[int]:
"""
Calculate padding size to keep input and output dimensions equal for a given kernel size.
......
......@@ -476,9 +476,9 @@ class MyLSTMModel(AbstractModelClass):
super().__init__(input_shape[0], output_shape[0])
# settings
self.dropout_rate = 0.4
self.dropout_rate = 0.1
self.stateful = False
self.initial_lr = 1e-5
self.initial_lr = 1e-4
# apply to model
self.set_model()
......@@ -487,9 +487,9 @@ class MyLSTMModel(AbstractModelClass):
def set_model(self):
x_input = keras.layers.Input(shape=self._input_shape)
x_in = keras.layers.LSTM(16, return_sequences=True, name="First_LSTM", dropout=self.dropout_rate, stateful=self.stateful)(x_input)
x_in = keras.layers.Dropout(self.dropout_rate)(x_in)
x_in = keras.layers.LSTM(16, name="Second_LSTM", dropout=self.dropout_rate, stateful=self.stateful)(x_in)
x_in = keras.layers.LSTM(16*2*2*2, return_sequences=True, name="First_LSTM", dropout=self.dropout_rate, stateful=self.stateful)(x_input)
# x_in = keras.layers.Dropout(self.dropout_rate)(x_in)
x_in = keras.layers.LSTM(16*2*2*2, name="Second_LSTM", dropout=self.dropout_rate, stateful=self.stateful)(x_in)
out_main = keras.layers.Dense(self._output_shape, name='Output_Dense')(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out_main])
......@@ -612,7 +612,7 @@ class MyCNNModelSect(AbstractModelClass):
first_filters = 16
pad_size1 = PadUtils.get_padding_for_same(first_kernel)
pool_kernel = (3,1)
pool_kernel = (3, 1)
pad_size_pool = PadUtils.get_padding_for_same(pool_kernel)
second_kernel = (3, 1)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment