Skip to content
Snippets Groups Projects
Commit 2d42ebc4 authored by Felix Kleinert's avatar Felix Kleinert
Browse files

prepare for submission

parent 561634f5
Branches
Tags
No related merge requests found
......@@ -589,13 +589,13 @@ class MyUnet(AbstractModelClass):
def __init__(self, input_shape: list, output_shape: list):
super().__init__(input_shape[0], output_shape[0])
self.first_filter_size = 16*2#self._input_shape[-1] # 16
self.lstm_units = 64 * 2
self.first_filter_size = 16 #16*2#self._input_shape[-1] # 16
self.lstm_units = 64 * 2 #* 2
self.kernel_size = (3, 1) # (3,1)
self.activation = "elu"
self.pool_size = (2, 1)
# self.dropout = .25
self.dropout = .15 #.2
self.kernel_regularizer = keras.regularizers.l1_l2(l1=0.01, l2=0.01)
self.bias_regularizer = keras.regularizers.l1_l2(l1=0.01, l2=0.01)
......@@ -617,7 +617,7 @@ class MyUnet(AbstractModelClass):
c1 = keras.layers.Conv2D(self.first_filter_size, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer,
bias_regularizer=self.bias_regularizer)(c1)
c1 = keras.layers.Dropout(0.1)(c1)
c1 = keras.layers.Dropout(self.dropout)(c1)
c1 = Padding2D("SymPad2D")(padding=pad_size)(c1)
c1 = keras.layers.Conv2D(self.first_filter_size, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, name='c1',
......@@ -630,7 +630,7 @@ class MyUnet(AbstractModelClass):
c2 = keras.layers.Conv2D(self.first_filter_size * 2, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer,
bias_regularizer=self.bias_regularizer)(c2)
c2 = keras.layers.Dropout(0.1)(c2)
c2 = keras.layers.Dropout(self.dropout)(c2)
c2 = Padding2D("SymPad2D")(padding=pad_size)(c2)
c2 = keras.layers.Conv2D(self.first_filter_size * 2, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, name='c2',
......@@ -643,7 +643,7 @@ class MyUnet(AbstractModelClass):
c3 = keras.layers.Conv2D(self.first_filter_size * 4, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer,
bias_regularizer=self.bias_regularizer)(c3)
c3 = keras.layers.Dropout(0.2)(c3)
c3 = keras.layers.Dropout(self.dropout*2)(c3)
c3 = Padding2D("SymPad2D")(padding=pad_size)(c3)
c3 = keras.layers.Conv2D(self.first_filter_size * 4, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, name='c3',
......@@ -674,7 +674,7 @@ class MyUnet(AbstractModelClass):
kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer,
bias_regularizer=self.bias_regularizer)(c7)
c7 = keras.layers.concatenate([c7, c4_2], name="Concat_2nd_LSTM")
c7 = keras.layers.Dropout(0.2)(c7)
c7 = keras.layers.Dropout(self.dropout*2)(c7)
c7 = Padding2D("SymPad2D")(padding=pad_size)(c7)
c7 = keras.layers.Conv2D(self.first_filter_size * 4, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, name='c7_to_u8',
......@@ -690,7 +690,7 @@ class MyUnet(AbstractModelClass):
c8 = keras.layers.Conv2D(self.first_filter_size * 2, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer,
bias_regularizer=self.bias_regularizer)(c8)
c8 = keras.layers.Dropout(0.1)(c8)
c8 = keras.layers.Dropout(self.dropout)(c8)
c8 = Padding2D("SymPad2D")(padding=pad_size)(c8)
c8 = keras.layers.Conv2D(self.first_filter_size * 2, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, name='c8_to_u9',
......@@ -705,7 +705,7 @@ class MyUnet(AbstractModelClass):
c9 = keras.layers.Conv2D(self.first_filter_size, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, kernel_regularizer=self.kernel_regularizer,
bias_regularizer=self.bias_regularizer)(c9)
c9 = keras.layers.Dropout(0.1)(c9)
c9 = keras.layers.Dropout(self.dropout)(c9)
c9 = Padding2D("SymPad2D")(padding=pad_size)(c9)
c9 = keras.layers.Conv2D(self.first_filter_size, self.kernel_size, activation=self.activation,
kernel_initializer=self.kernel_initializer, name='c9',
......@@ -714,6 +714,7 @@ class MyUnet(AbstractModelClass):
# outputs = keras.layers.Conv2D(1, (1, 1), activation='sigmoid')(c9)
dl = keras.layers.Flatten()(c9)
dl = keras.layers.Dropout(self.dropout)(dl)
outputs = keras.layers.Dense(units=self._output_shape)(dl)
self.model = keras.Model(inputs=[input_train], outputs=[outputs])
......
......@@ -113,6 +113,10 @@ class PostProcessing(RunEnvironment):
self.make_prediction(self.test_data)
self.make_prediction(self.train_val_data)
# forecasts on train and val data
self.make_prediction(self.train_data)
self.make_prediction(self.val_data)
# calculate error metrics on test data
self.calculate_test_score()
......
......@@ -8,7 +8,7 @@ from mlair.workflows import DefaultWorkflow
from mlair.helpers import remove_items
from mlair.configuration.defaults import DEFAULT_PLOT_LIST
from mlair.model_modules.model_class import IntelliO3TsArchitecture, MyLSTMModel, MyCNNModel, MyCNNModelSect, MyLuongAttentionLSTMModel
from mlair.model_modules.model_class import IntelliO3TsArchitecture, MyLSTMModel, MyCNNModel, MyCNNModelSect, MyLuongAttentionLSTMModel, MyUnet
import os
......@@ -16,7 +16,6 @@ import os
def load_stations():
import json
try:
filename = 'supplement/station_list_north_german_plain_rural.json'
filename = 'supplement/WRF_coord_list_from_IntelliO3.json'
with open(filename, 'r') as jfile:
stations = json.load(jfile)
......@@ -26,40 +25,24 @@ def load_stations():
def main(parser_args):
plots = remove_items(DEFAULT_PLOT_LIST, "PlotConditionalQuantiles")
do_not_plot = ["PlotDataHistogram", "PlotAvailability"]
plots = remove_items(DEFAULT_PLOT_LIST, do_not_plot)
workflow = DefaultWorkflow( stations=load_stations(),
# stations=["DEBW087","DEBW013", "DEBW107", "DEBW076"],
lazy_preprocessing=False,
train_model=False, create_new_model=True, network="UBA",
evaluate_bootstraps=False, # plot_list=["PlotCompetitiveSkillScore"],
# competitors=["test_model", "test_model2"],
# competitor_path=os.path.join(os.getcwd(), "data", "comp_test"),
competitors=["baseline", "sector_baseline"],
competitor_path="/p/scratch/deepacf/kleinert1/IASS_proc_monthyl/competitors/o3",
evaluate_feature_importance=False,
feature_importance_bootstrap_type="group_of_variables",
feature_importance_create_new_bootstraps=False,
feature_importance_bootstrap_method="zero_mean",
plot_list=plots,
#competitors=["NNb", "NN1s"],
#competitor_path="/p/scratch/deepacf/kleinert1/IASS_proc_monthyl/competitors/o3",
uncertainty_estimate_block_length="7d",
train_min_length=1, val_min_length=1, test_min_length=1,
# data_handler=DataHandlerSingleStation,
# data_handler=DataHandlerSingleGridColumn,
epochs=100,
epochs=300,
window_lead_time=4,
window_history_size=6,
# stations=["coords__48_8479__10_0963", "coords__51_8376__14_1417",
# "coords__50_7536__7_0827", "coords__51_4070__6_9656",
# "coords__49_8421__7_8662", "coords__49_7410__7_1935",
# "coords__51_1566__11_8182", "coords__51_4065__6_9660",
# "coords__50_7333__7_1000", "coords__50_0000__8_0000",
# "coords__48_7444__7_6000", "coords__51_0000__11_0000",
# "coords__52_7555__8_1000", "coords__50_0000__2_0000",
# "coords__51_7666__8_6000", "coords__50_0000__3_0000",
# "coords__45_7777__9_1000", "coords__50_0000__4_0000",
# ],
# data_handler=DataHandlerWRF,
data_handler=DataHandlerMainSectWRF, #,
# data_path="/p/scratch/deepacf/kleinert1/IASS_proc_monthyl",
#data_path="/p/scratch/deepacf/kleinert1/IASS_proc",
#data_path="/p/project/deepacf/intelliaq/kleinert1/DATA/WRF_CHEM_soft_ln_small_test",
# data_path="/media/felix/INTENSO/WRF_CHEM/hourly/cdo_output_test/jan_test",
# data_path="/p/scratch/deepacf/intelliaq/kleinert1/IASS_proc_monthly/monthly2009",
# data_path="/p/scratch/deepacf/intelliaq/kleinert1/IASS_proc_monthly/monthly_count_test",
data_path = "/p/scratch/deepacf/intelliaq/kleinert1/IASS_proc_monthly/monthly2009_2010-03",
#data_path="/p/scratch/deepacf/intelliaq/kleinert1/IASS_proc_monthly/monthly_01-03",
common_file_starter="wrfout_d01",
......@@ -83,13 +66,15 @@ def main(parser_args):
# 'CLDFRA': {"method": "min_max", "min": 0., "max": 1.},
},
# variables=['T2', 'o3', 'wdir10ll', 'wspd10ll', 'no', 'no2', 'co', 'PSFC', 'PBLH', 'CLDFRA'],
variables=['T2', 'o3', 'wdir10ll', 'wspd10ll', 'no', 'no2', 'co', 'PSFC', 'PBLH'],
variables=['T2', 'o3', 'wdir10ll', 'wspd10ll', 'no', 'no2', 'co', 'PSFC', 'PBLH', 'Q2'],
target_var='o3',
target_var_unit="ppb",
vars_for_unit_conv={'o3': 'ppbv'},
# statistics_per_var={'T2': None, 'o3': None, 'wdir10ll': None, 'wspd10ll': None,
# 'no': None, 'no2': None, 'co': None, 'PSFC': None, 'PBLH': None, 'CLDFRA': None, },
statistics_per_var={'T2': "average_values", 'o3': "dma8eu", 'wdir10ll': "average_values",
'wspd10ll': "average_values", 'no': "dma8eu", 'no2': "dma8eu", 'co': "dma8eu",
'PSFC': "average_values", 'PBLH': "average_values",
'PSFC': "average_values", 'PBLH': "average_values", 'Q2':"average_values",
# 'CLDFRA': "average_values",
},
# variables=['T2', 'Q2', 'PBLH', 'U10ll', 'V10ll', 'wdir10ll', 'wspd10ll'],
......@@ -141,11 +126,8 @@ def main(parser_args):
batch_size=64*2*2,
interpolation_limit=0,
as_image_like_data_format=False,
# model=MyLSTMModel,
model=MyLuongAttentionLSTMModel,
# model=MyCNNModelSect,
# model=MyCNNModel,
as_image_like_data_format=True,
model=MyUnet,
**parser_args.__dict__)
workflow.run()
......
......@@ -3,7 +3,7 @@
#SBATCH --nodes=1
#SBATCH --output=HPC_logging/mlt-out.%j
#SBATCH --error=HPC_logging/mlt-err.%j
#SBATCH --time=06:00:00
#SBATCH --time=24:00:00
#SBATCH --gres=gpu:4
#SBATCH --mail-type=ALL
#SBATCH --mail-user=f.kleinert@fz-juelich.de
......@@ -13,6 +13,6 @@ source venv_hdfml/bin/activate
timestamp=`date +"%Y-%m-%d_%H%M-%S"`
export PYTHONPATH=${PWD}/venv_hdfml/lib/python3.6/site-packages:${PYTHONPATH}
export PYTHONPATH=${PWD}/venv_hdfml/lib/python3.8/site-packages:${PYTHONPATH}
srun --cpu-bind=none python run_wrf_dh_sector3.py --experiment_date=${timestamp}_WRF_sector
srun --cpu-bind=none python run_wrf_dh_sector3.py --experiment_date=${timestamp}_WRF_sector3
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment