Skip to content
Snippets Groups Projects

Resolve "release v1.4.0"

Merged Ghost User requested to merge release_v1.4.0 into master
1 file
+ 0
32
Compare changes
  • Side-by-side
  • Inline
@@ -285,38 +285,6 @@ class PreProcessing(RunEnvironment):
for k, v in attrs.items():
self.data_store.set(k, v)
def validate_station_old(self, data_handler: AbstractDataHandler, set_stations, set_name=None,
store_processed_data=True):
"""
Check if all given stations in `all_stations` are valid.
Valid means, that there is data available for the given time range (is included in `kwargs`). The shape and the
loading time are logged in debug mode.
:return: Corrected list containing only valid station IDs.
"""
t_outer = TimeTracking()
logging.info(f"check valid stations started{' (%s)' % (set_name if set_name is not None else 'all')}")
# calculate transformation using train data
if set_name == "train":
logging.info("setup transformation using train data exclusively")
self.transformation(data_handler, set_stations)
# start station check
collection = DataCollection()
valid_stations = []
kwargs = self.data_store.create_args_dict(data_handler.requirements(), scope=set_name)
for station in set_stations:
try:
dp = data_handler.build(station, name_affix=set_name, store_processed_data=store_processed_data,
**kwargs)
collection.add(dp)
valid_stations.append(station)
except (AttributeError, EmptyQueryResult):
continue
logging.info(f"run for {t_outer} to check {len(set_stations)} station(s). Found {len(collection)}/"
f"{len(set_stations)} valid stations.")
return collection, valid_stations
def transformation(self, data_handler: AbstractDataHandler, stations):
if hasattr(data_handler, "transformation"):
kwargs = self.data_store.create_args_dict(data_handler.requirements(), scope="train")
Loading