Skip to content
Snippets Groups Projects
Commit 9f2480ac authored by Carsten Hinz's avatar Carsten Hinz
Browse files

edited get sample data to test other station classification

fiexed bug in produce data manyStations
parent ba6d8393
No related branches found
No related tags found
1 merge request!11Creation of first beta release version
%% Cell type:code id: tags:
``` python
from datetime import datetime, timedelta
from toargridding.metadata import TimeSample, Metadata
sampling = "daily" # FIXME check monthly !!!
start = datetime(2010, 1, 1)
end = datetime(2011, 1, 1)
end = datetime(2010, 2, 1)
statistics_endpoint = "https://toar-data.fz-juelich.de/api/v2/analysis/statistics/"
statistic = "mean"
time = TimeSample(start, end, sampling=sampling)
# { "station_type_of_area" : "urban" } category is not known
metadata = Metadata.construct("mole_fraction_of_ozone_in_air", time, statistic, { "toar1_category" : "RuralLowElevation"})#
#metadata = Metadata.construct("mole_fraction_of_ozone_in_air", time, statistic, { "toar1_category" : "RuralLowElevation"})#
metadata = Metadata.construct("mole_fraction_of_ozone_in_air", time, statistic, { "type_of_area" : "Urban" })
start_time = datetime.now()
print(start_time)
```
%% Cell type:code id: tags:
``` python
from pathlib import Path
from toargridding.toar_rest_client import AnalysisServiceDownload
#creation of output directories
toargridding_base_path = Path(".")
cache_dir = toargridding_base_path / "results"
download_dir = toargridding_base_path / "data"
cache_dir.mkdir(parents=True, exist_ok=True)
download_dir.mkdir(parents=True, exist_ok=True)
analysis_service = AnalysisServiceDownload(statistics_endpoint, cache_dir, download_dir)
results = analysis_service.get_data(metadata)
end_time = datetime.now()
print(end_time-start_time)
```
%% Cell type:code id: tags:
``` python
```
......
%% Cell type:code id: tags:
``` python
from datetime import datetime as dt
from collections import namedtuple
from pathlib import Path
from toargridding.toar_rest_client import AnalysisServiceDownload
from toargridding.grids import RegularGrid
from toargridding.gridding import get_gridded_toar_data
from toargridding.metadata import TimeSample
```
%% Cell type:code id: tags:
``` python
#creation of request.
Config = namedtuple("Config", ["grid", "time", "variables", "stats","moreOptions"])
Config = namedtuple("Config", ["grid", "time", "variables", "stats"])
valid_data = Config(
RegularGrid( lat_resolution=1.9, lon_resolution=2.5, ),
TimeSample( start=dt(2000,1,1), end=dt(2019,12,31), sampling="daily"),#possibly adopt range:-)
["mole_fraction_of_ozone_in_air"],#variable name
[ "dma8epax" ]
)
configs = {
"test_ta" : valid_data
}
#testing access:
#config = configs["test_ta"]
#config.grid
```
%% Cell type:code id: tags:
``` python
#CAVE: the request takes over 30min per requested year. Therefore this cell needs to be executed at different times to check, if the results are ready for download.
#the processing is done on the server of the TOAR database.
#a restart of the cell continues the request to the REST API if the requested data are ready for download
# The download can also take a few minutes
stats_endpoint = "https://toar-data.fz-juelich.de/api/v2/analysis/statistics/"
cache_basepath = Path("cache")
result_basepath = Path("results")
cache_basepath.mkdir(exist_ok=True)
result_basepath.mkdir(exist_ok=True)
analysis_service = AnalysisServiceDownload(stats_endpoint=stats_endpoint, cache_dir=cache_basepath, sample_dir=result_basepath, use_downloaded=True)
for person, config in configs.items():
datasets, metadatas = get_gridded_toar_data(
analysis_service=analysis_service,
grid=config.grid,
time=config.time,
variables=config.variables,
stats=config.stats,
)
for dataset, metadata in zip(datasets, metadatas):
dataset.to_netcdf(result_basepath / f"{metadata.get_id()}.nc")
print(metadata.get_id())
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment