diff --git a/mlair/data_handler/default_data_handler.py b/mlair/data_handler/default_data_handler.py index 799dcc45f66cd0519d172a62d15f911823907816..291bbc6616314db61282c380a6b3e105d8b6248a 100644 --- a/mlair/data_handler/default_data_handler.py +++ b/mlair/data_handler/default_data_handler.py @@ -256,6 +256,7 @@ class DefaultDataHandler(AbstractDataHandler): if multiprocessing.cpu_count() > 1: # parallel solution logging.info("use parallel transformation approach") pool = multiprocessing.Pool() + logging.info(f"running {getattr(pool, '_processes')} processes in parallel") output = [ pool.apply_async(f_proc, args=(cls.data_handler_transformation, station), kwds=sp_keys) for station in set_stations] diff --git a/mlair/run_modules/pre_processing.py b/mlair/run_modules/pre_processing.py index 0238ad0e2e8f85558d4e1172bfb65759bff35ce1..4add6abbc98c2b0d97f3512a011c0ff5a3aefa70 100644 --- a/mlair/run_modules/pre_processing.py +++ b/mlair/run_modules/pre_processing.py @@ -264,6 +264,7 @@ class PreProcessing(RunEnvironment): if multiprocessing.cpu_count() > 1: # parallel solution logging.info("use parallel validate station approach") pool = multiprocessing.Pool() + logging.info(f"running {getattr(pool, '_processes')} processes in parallel") output = [ pool.apply_async(f_proc, args=(data_handler, station, set_name, store_processed_data), kwds=kwargs) for station in set_stations]