Skip to content
Snippets Groups Projects
Commit 8dfad2e0 authored by leufen1's avatar leufen1
Browse files

update toar service url

parent ff8b044a
No related branches found
No related tags found
3 merge requests!522filter can now combine obs, forecast, and apriori for first iteration. Further...,!521Resolve "release v2.4.0",!518Resolve "Use Toar statistics api v2"
Pipeline #142798 canceled
......@@ -10,7 +10,7 @@ def toar_data_v2_settings(sampling="daily") -> Tuple[str, Dict]:
:return: Service url and optional headers
"""
if sampling == "daily": # pragma: no branch
TOAR_SERVICE_URL = "https://toar-data.fz-juelich.de/statistics/api/v1/"
TOAR_SERVICE_URL = "https://toar-data.fz-juelich.de/api/v2/analysis/statistics/"
headers = {}
elif sampling == "hourly" or sampling == "meta":
TOAR_SERVICE_URL = "https://toar-data.fz-juelich.de/api/v2/"
......
......@@ -331,7 +331,7 @@ class DataHandlerSingleStation(AbstractDataHandler):
file_name = self._set_file_name(path, station, statistics_per_var)
meta_file = self._set_meta_file_name(path, station, statistics_per_var)
if self.overwrite_local_data is True:
logging.debug(f"overwrite_local_data is true, therefore reload {file_name}")
logging.debug(f"{self.station[0]}: overwrite_local_data is true, therefore reload {file_name}")
if os.path.exists(file_name):
os.remove(file_name)
if os.path.exists(meta_file):
......@@ -339,22 +339,22 @@ class DataHandlerSingleStation(AbstractDataHandler):
data, meta = data_sources.download_data(file_name, meta_file, station, statistics_per_var, sampling,
store_data_locally=store_data_locally, data_origin=data_origin,
time_dim=self.time_dim, target_dim=self.target_dim, iter_dim=self.iter_dim)
logging.debug(f"loaded new data")
logging.debug(f"{self.station[0]}: loaded new data")
else:
try:
logging.debug(f"try to load local data from: {file_name}")
logging.debug(f"{self.station[0]}: try to load local data from: {file_name}")
data = xr.open_dataarray(file_name)
meta = pd.read_csv(meta_file, index_col=0)
self.check_station_meta(meta, station, data_origin, statistics_per_var)
logging.debug("loading finished")
logging.debug(f"{self.station[0]}: loading finished")
except FileNotFoundError as e:
logging.debug(e)
logging.debug(f"load new data")
logging.debug(f"{self.station[0]}: {e}")
logging.debug(f"{self.station[0]}: load new data")
data, meta = data_sources.download_data(file_name, meta_file, station, statistics_per_var, sampling,
store_data_locally=store_data_locally, data_origin=data_origin,
time_dim=self.time_dim, target_dim=self.target_dim,
iter_dim=self.iter_dim)
logging.debug("loading finished")
logging.debug(f"{self.station[0]}: loading finished")
# create slices and check for negative concentration.
data = self._slice_prep(data, start=start, end=end)
data = self.check_for_negative_concentrations(data)
......@@ -372,7 +372,7 @@ class DataHandlerSingleStation(AbstractDataHandler):
if v is None or k not in meta.index:
continue
if meta.at[k, station[0]] != v:
logging.debug(f"meta data does not agree with given request for {k}: {v} (requested) != "
logging.debug(f"{station[0]}: meta data does not agree with given request for {k}: {v} (requested) != "
f"{meta.at[k, station[0]]} (local). Raise FileNotFoundError to trigger new "
f"grapping from web.")
raise FileNotFoundError
......
......@@ -374,7 +374,7 @@ class DataHandlerClimateFirFilterSingleStation(DataHandlerFirFilterSingleStation
def apply_filter(self):
"""Apply FIR filter only on inputs."""
self.apriori = self.apriori.get(str(self)) if isinstance(self.apriori, dict) else self.apriori
logging.info(f"{self.station}: call ClimateFIRFilter")
logging.info(f"{self.station[0]}: call ClimateFIRFilter")
climate_filter = ClimateFIRFilter(self.input_data.astype("float32"), self.fs, self.filter_order,
self.filter_cutoff_freq,
self.filter_window_type, time_dim=self.time_dim, var_dim=self.target_dim,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment