Skip to content
Snippets Groups Projects
Commit 8414d618 authored by leufen1's avatar leufen1
Browse files

applied corrections to load data correctly

parent 369a19a2
No related branches found
No related tags found
7 merge requests!468first implementation of toar-data-v2, can load data (but cannot process these...,!467Resolve "release v2.2.0",!455update for reqs,!448Resolve "update HPC environment",!446first implementation of toar-data-v2, can load data (but cannot process these...,!445Draft: Resolve "update HPC environment",!437Resolve "era5 data"
Pipeline #103457 passed
...@@ -60,7 +60,7 @@ class DataHandlerMixedSamplingSingleStation(DataHandlerSingleStation): ...@@ -60,7 +60,7 @@ class DataHandlerMixedSamplingSingleStation(DataHandlerSingleStation):
self.set_inputs_and_targets() self.set_inputs_and_targets()
def load_and_interpolate(self, ind) -> [xr.DataArray, pd.DataFrame]: def load_and_interpolate(self, ind) -> [xr.DataArray, pd.DataFrame]:
vars = [self.variables, self.target_var][ind] vars = [self.variables, self.target_var]
stats_per_var = helpers.select_from_dict(self.statistics_per_var, vars[ind]) stats_per_var = helpers.select_from_dict(self.statistics_per_var, vars[ind])
data, self.meta = self.load_data(self.path[ind], self.station, stats_per_var, self.sampling[ind], data, self.meta = self.load_data(self.path[ind], self.station, stats_per_var, self.sampling[ind],
self.station_type, self.network, self.store_data_locally, self.data_origin, self.station_type, self.network, self.store_data_locally, self.data_origin,
...@@ -115,7 +115,7 @@ class DataHandlerMixedSamplingWithFilterSingleStation(DataHandlerMixedSamplingSi ...@@ -115,7 +115,7 @@ class DataHandlerMixedSamplingWithFilterSingleStation(DataHandlerMixedSamplingSi
def make_input_target(self): def make_input_target(self):
""" """
A FIR filter is applied on the input data that has hourly resolution. Lables Y are provided as aggregated values A FIR filter is applied on the input data that has hourly resolution. Labels Y are provided as aggregated values
with daily resolution. with daily resolution.
""" """
self._data = tuple(map(self.load_and_interpolate, [0, 1])) # load input (0) and target (1) data self._data = tuple(map(self.load_and_interpolate, [0, 1])) # load input (0) and target (1) data
...@@ -143,7 +143,7 @@ class DataHandlerMixedSamplingWithFilterSingleStation(DataHandlerMixedSamplingSi ...@@ -143,7 +143,7 @@ class DataHandlerMixedSamplingWithFilterSingleStation(DataHandlerMixedSamplingSi
def load_and_interpolate(self, ind) -> [xr.DataArray, pd.DataFrame]: def load_and_interpolate(self, ind) -> [xr.DataArray, pd.DataFrame]:
start, end = self.update_start_end(ind) start, end = self.update_start_end(ind)
vars = [self.variables, self.target_var][ind] vars = [self.variables, self.target_var]
stats_per_var = helpers.select_from_dict(self.statistics_per_var, vars[ind]) stats_per_var = helpers.select_from_dict(self.statistics_per_var, vars[ind])
data, self.meta = self.load_data(self.path[ind], self.station, stats_per_var, self.sampling[ind], data, self.meta = self.load_data(self.path[ind], self.station, stats_per_var, self.sampling[ind],
...@@ -353,6 +353,7 @@ class DataHandlerMixedSamplingWithClimateAndFirFilter(DataHandlerMixedSamplingWi ...@@ -353,6 +353,7 @@ class DataHandlerMixedSamplingWithClimateAndFirFilter(DataHandlerMixedSamplingWi
sp_keys = {k: copy.deepcopy(kwargs[k]) for k in cls.data_handler_unfiltered.requirements() if k in kwargs} sp_keys = {k: copy.deepcopy(kwargs[k]) for k in cls.data_handler_unfiltered.requirements() if k in kwargs}
sp_keys = cls.build_update_transformation(sp_keys, dh_type="unfiltered_chem") sp_keys = cls.build_update_transformation(sp_keys, dh_type="unfiltered_chem")
cls.prepare_build(sp_keys, chem_vars, cls.chem_indicator) cls.prepare_build(sp_keys, chem_vars, cls.chem_indicator)
cls.correct_overwrite_option(sp_keys)
sp_chem_unfiltered = cls.data_handler_unfiltered(station, **sp_keys) sp_chem_unfiltered = cls.data_handler_unfiltered(station, **sp_keys)
if len(meteo_vars) > 0: if len(meteo_vars) > 0:
cls.set_data_handler_fir_pos(**kwargs) cls.set_data_handler_fir_pos(**kwargs)
...@@ -364,11 +365,18 @@ class DataHandlerMixedSamplingWithClimateAndFirFilter(DataHandlerMixedSamplingWi ...@@ -364,11 +365,18 @@ class DataHandlerMixedSamplingWithClimateAndFirFilter(DataHandlerMixedSamplingWi
sp_keys = {k: copy.deepcopy(kwargs[k]) for k in cls.data_handler_unfiltered.requirements() if k in kwargs} sp_keys = {k: copy.deepcopy(kwargs[k]) for k in cls.data_handler_unfiltered.requirements() if k in kwargs}
sp_keys = cls.build_update_transformation(sp_keys, dh_type="unfiltered_meteo") sp_keys = cls.build_update_transformation(sp_keys, dh_type="unfiltered_meteo")
cls.prepare_build(sp_keys, meteo_vars, cls.meteo_indicator) cls.prepare_build(sp_keys, meteo_vars, cls.meteo_indicator)
cls.correct_overwrite_option(sp_keys)
sp_meteo_unfiltered = cls.data_handler_unfiltered(station, **sp_keys) sp_meteo_unfiltered = cls.data_handler_unfiltered(station, **sp_keys)
dp_args = {k: copy.deepcopy(kwargs[k]) for k in cls.own_args("id_class") if k in kwargs} dp_args = {k: copy.deepcopy(kwargs[k]) for k in cls.own_args("id_class") if k in kwargs}
return cls(sp_chem, sp_meteo, sp_chem_unfiltered, sp_meteo_unfiltered, chem_vars, meteo_vars, **dp_args) return cls(sp_chem, sp_meteo, sp_chem_unfiltered, sp_meteo_unfiltered, chem_vars, meteo_vars, **dp_args)
@classmethod
def correct_overwrite_option(cls, kwargs):
"""Set `overwrite_local_data=False`."""
if "overwrite_local_data" in kwargs:
kwargs["overwrite_local_data"] = False
@classmethod @classmethod
def set_data_handler_fir_pos(cls, **kwargs): def set_data_handler_fir_pos(cls, **kwargs):
""" """
......
...@@ -395,11 +395,11 @@ class DataHandlerSingleStation(AbstractDataHandler): ...@@ -395,11 +395,11 @@ class DataHandlerSingleStation(AbstractDataHandler):
era5_stats, join_stats = statistics_per_var, statistics_per_var era5_stats, join_stats = statistics_per_var, statistics_per_var
# load data # load data
if era5_origin is not None and len(era5_origin) > 0: if era5_origin is not None and len(era5_stats) > 0:
# load era5 data # load era5 data
df_era5, meta_era5 = era5.load_era5(station_name=station, stat_var=era5_stats, sampling=sampling, df_era5, meta_era5 = era5.load_era5(station_name=station, stat_var=era5_stats, sampling=sampling,
data_origin=era5_origin) data_origin=era5_origin)
if join_origin is None or len(join_stats.keys()) > 0: if join_origin is None or len(join_stats) > 0:
# load join data # load join data
df_join, meta_join = join.download_join(station_name=station, stat_var=join_stats, station_type=station_type, df_join, meta_join = join.download_join(station_name=station, stat_var=join_stats, station_type=station_type,
network_name=network, sampling=sampling, data_origin=join_origin) network_name=network, sampling=sampling, data_origin=join_origin)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment