Skip to content
Snippets Groups Projects
Commit 33c89180 authored by lukas leufen's avatar lukas leufen
Browse files

Merge branch 'develop' into 'lukas_issue448_feat_load-model-from-path'

Develop

See merge request !511
parents e60168f4 be4ae8c2
Branches
Tags
4 merge requests!522filter can now combine obs, forecast, and apriori for first iteration. Further...,!521Resolve "release v2.4.0",!512Lukas issue448 feat load model from path,!511Develop
Pipeline #139207 failed
...@@ -64,6 +64,7 @@ class PreProcessing(RunEnvironment): ...@@ -64,6 +64,7 @@ class PreProcessing(RunEnvironment):
if snapshot_load_path is None: if snapshot_load_path is None:
stations = self.data_store.get("stations") stations = self.data_store.get("stations")
data_handler = self.data_store.get("data_handler") data_handler = self.data_store.get("data_handler")
self._load_apriori()
_, valid_stations = self.validate_station(data_handler, stations, _, valid_stations = self.validate_station(data_handler, stations,
"preprocessing") # , store_processed_data=False) "preprocessing") # , store_processed_data=False)
if len(valid_stations) == 0: if len(valid_stations) == 0:
...@@ -318,6 +319,30 @@ class PreProcessing(RunEnvironment): ...@@ -318,6 +319,30 @@ class PreProcessing(RunEnvironment):
attrs[k] = dict(attrs.get(k, {}), **{station: v}) attrs[k] = dict(attrs.get(k, {}), **{station: v})
for k, v in attrs.items(): for k, v in attrs.items():
self.data_store.set(k, v) self.data_store.set(k, v)
self._store_apriori()
def _store_apriori(self):
apriori = self.data_store.get_default("apriori", default=None)
if apriori:
experiment_path = self.data_store.get("experiment_path")
path = os.path.join(experiment_path, "data", "apriori")
store_file = os.path.join(path, "apriori.pickle")
if not os.path.exists(path):
path_config.check_path_and_create(path)
with open(store_file, "wb") as f:
dill.dump(apriori, f, protocol=4)
logging.debug(f"Store apriori options locally for later use at: {store_file}")
def _load_apriori(self):
if self.data_store.get_default("apriori", default=None) is None:
apriori_file = self.data_store.get_default("apriori_file", None)
if apriori_file is not None:
if os.path.exists(apriori_file):
logging.info(f"use apriori data from given file: {apriori_file}")
with open(apriori_file, "rb") as pickle_file:
self.data_store.set("apriori", dill.load(pickle_file))
else:
logging.info(f"cannot load apriori file: {apriori_file}. Use fresh calculation from data.")
def transformation(self, data_handler: AbstractDataHandler, stations): def transformation(self, data_handler: AbstractDataHandler, stations):
calculate_fresh_transformation = self.data_store.get_default("calculate_fresh_transformation", True) calculate_fresh_transformation = self.data_store.get_default("calculate_fresh_transformation", True)
...@@ -435,7 +460,7 @@ class PreProcessing(RunEnvironment): ...@@ -435,7 +460,7 @@ class PreProcessing(RunEnvironment):
"neighbors", "plot_list", "plot_path", "regularizer", "restore_best_model_weights", "neighbors", "plot_list", "plot_path", "regularizer", "restore_best_model_weights",
"snapshot_load_path", "snapshot_path", "stations", "tmp_path", "train_model", "snapshot_load_path", "snapshot_path", "stations", "tmp_path", "train_model",
"transformation", "use_multiprocessing", "cams_data_path", "cams_interp_method", "transformation", "use_multiprocessing", "cams_data_path", "cams_interp_method",
"do_bias_free_evaluation"] "do_bias_free_evaluation", "apriori_file", "model_path"]
data_handler = self.data_store.get("data_handler") data_handler = self.data_store.get("data_handler")
model_class = self.data_store.get("model_class") model_class = self.data_store.get("model_class")
excluded_params = list(set(excluded_params + data_handler.store_attributes() + model_class.requirements())) excluded_params = list(set(excluded_params + data_handler.store_attributes() + model_class.requirements()))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment