Skip to content
Snippets Groups Projects
Commit 04f2fc51 authored by Carsten Hinz's avatar Carsten Hinz
Browse files

increased duration before abording check for results.

excluded already processed year.
parent 6f303961
No related branches found
No related tags found
1 merge request!11Creation of first beta release version
%% Cell type:code id: tags:
``` python
from datetime import datetime as dt
from collections import namedtuple
from pathlib import Path
from toargridding.toar_rest_client import AnalysisServiceDownload, Connection
from toargridding.grids import RegularGrid
from toargridding.gridding import get_gridded_toar_data
from toargridding.metadata import TimeSample
```
%% Cell type:code id: tags:
``` python
#creation of request.
Config = namedtuple("Config", ["grid", "time", "variables", "stats"])
grid = RegularGrid( lat_resolution=1.9, lon_resolution=2.5, )
configs = dict()
for year in range (0,19):
for year in range (1,19):
valid_data = Config(
grid,
TimeSample( start=dt(2000+year,1,1), end=dt(2000+year,12,31), sampling="daily"),#possibly adopt range:-)
["mole_fraction_of_ozone_in_air"],#variable name
[ "dma8epax" ]# change to dma8epa_strict
)
configs[f"test_ta{year}"] = valid_data
```
%% Cell type:code id: tags:
``` python
#CAVE: the request takes over 30min per requested year. Therefore this cell needs to be executed at different times to check, if the results are ready for download.
#the processing is done on the server of the TOAR database.
#a restart of the cell continues the request to the REST API if the requested data are ready for download
# The download can also take a few minutes
stats_endpoint = "https://toar-data.fz-juelich.de/api/v2/analysis/statistics/"
cache_basepath = Path("cache")
result_basepath = Path("results")
cache_basepath.mkdir(exist_ok=True)
result_basepath.mkdir(exist_ok=True)
analysis_service = AnalysisServiceDownload(stats_endpoint=stats_endpoint, cache_dir=cache_basepath, sample_dir=result_basepath, use_downloaded=True)
Connection.DEBUG=True
#here we adopt the durations before, a request is stopped.
#the default value is 30 minutes.
#waiting up to 3h for one request
analysis_service.connection.setRequestTimes(interval_min=45, maxWait_min=3*60)
analysis_service.connection.setRequestTimes(interval_min=45, maxWait_min=12*60)
for person, config in configs.items():
print(f"\nProcessing {person}:")
print(f"--------------------")
datasets, metadatas = get_gridded_toar_data(
analysis_service=analysis_service,
grid=config.grid,
time=config.time,
variables=config.variables,
stats=config.stats,
)
for dataset, metadata in zip(datasets, metadatas):
dataset.to_netcdf(result_basepath / f"{metadata.get_id()}_{config.grid.get_id()}.nc")
print(metadata.get_id())
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment