Skip to content
Snippets Groups Projects
Commit 0b316a00 authored by Felix Kleinert's avatar Felix Kleinert
Browse files

add run_fkt

parent 4f41a384
Branches
Tags
1 merge request!259Draft: Resolve "WRF-Datahandler should inherit from SingleStationDatahandler"
Pipeline #65145 passed
...@@ -53,7 +53,6 @@ def cut_data(file, sn_icoord=(130, 210), we_icoord=(160, 220), bt_icood=(0, 10)) ...@@ -53,7 +53,6 @@ def cut_data(file, sn_icoord=(130, 210), we_icoord=(160, 220), bt_icood=(0, 10))
west_east_vars = {d for d in data for i in data[d].dims if "west_east" == i} west_east_vars = {d for d in data for i in data[d].dims if "west_east" == i}
bottom_top_vars = {d for d in data for i in data[d].dims if "bottom_top" == i} bottom_top_vars = {d for d in data for i in data[d].dims if "bottom_top" == i}
south_north_stag_vars = {d for d in data for i in data[d].dims if "south_north_stag" == i} south_north_stag_vars = {d for d in data for i in data[d].dims if "south_north_stag" == i}
west_east_stag_vars = {d for d in data for i in data[d].dims if "west_east_stag" == i} west_east_stag_vars = {d for d in data for i in data[d].dims if "west_east_stag" == i}
bottom_top_stag_vars = {d for d in data for i in data[d].dims if "bottom_top_stag" == i} bottom_top_stag_vars = {d for d in data for i in data[d].dims if "bottom_top_stag" == i}
...@@ -90,7 +89,8 @@ def cut_data(file, sn_icoord=(130, 210), we_icoord=(160, 220), bt_icood=(0, 10)) ...@@ -90,7 +89,8 @@ def cut_data(file, sn_icoord=(130, 210), we_icoord=(160, 220), bt_icood=(0, 10))
data_cut = center_data1_3D.update(center_data2D).update(scalar_data).update(sn_stag_data).update(we_stag_data).update(bt_stag_data) data_cut = center_data1_3D.update(center_data2D).update(scalar_data).update(sn_stag_data).update(we_stag_data).update(bt_stag_data)
assert len(data) == len(data_cut) assert len(data) == len(data_cut)
data_cut = data_cut.compute() # data_cut = data_cut.chunk("auto")
data_cut = dask.compute(data_cut)[0]
try: try:
data.close() data.close()
except: except:
...@@ -204,6 +204,19 @@ def run_apply_async_multiprocessing(func, argument_list, num_processes): ...@@ -204,6 +204,19 @@ def run_apply_async_multiprocessing(func, argument_list, num_processes):
return result_list_tqdm return result_list_tqdm
def run_extaction(path, new_path, start_time, end_time):
coords_file = glob.glob(os.path.join(os.path.split(path)[0], "coords.nc"))
coords_file_new = [os.path.join(new_path, os.path.basename(p)) for p in coords_file]
c = f_proc_coords(coords_file[0], coords_file_new[0])
path_list = get_files(path, start_time, end_time)
path_list_new = [os.path.join(new_path, os.path.basename(p)) for p in path_list]
print(f"found {len(path_list)} files")
num_processes = min([psutil.cpu_count(logical=False), len(path_list), 16])
result_list = run_apply_async_multiprocessing(func=f_proc, argument_list=(path_list, path_list_new),
num_processes=num_processes)
if __name__ == "__main__": if __name__ == "__main__":
path = "/home/felix/Data/WRF-Chem/upload_aura_2021-02-24/2009" path = "/home/felix/Data/WRF-Chem/upload_aura_2021-02-24/2009"
new_path = "/home/felix/Data/WRF-Chem/test_cut_nc/" new_path = "/home/felix/Data/WRF-Chem/test_cut_nc/"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment