Skip to content
Snippets Groups Projects

Resolve "release v1.2.0"

Merged Ghost User requested to merge release_v1.2.0 into master
3 files
+ 34
16
Compare changes
  • Side-by-side
  • Inline
Files
3
+ 13
9
@@ -45,7 +45,8 @@ def download_join(station_name: Union[str, List[str]], stat_var: dict, station_t
join_url_base, headers = join_settings(sampling)
# load series information
vars_dict = load_series_information(station_name, station_type, network_name, join_url_base, headers, data_origin)
vars_dict, data_origin = load_series_information(station_name, station_type, network_name, join_url_base, headers,
data_origin)
# check if all requested variables are available
if set(stat_var).issubset(vars_dict) is False:
@@ -65,7 +66,7 @@ def download_join(station_name: Union[str, List[str]], stat_var: dict, station_t
for var in _lower_list(sorted(vars_dict.keys())):
if var in stat_var.keys():
logging.debug('load: {}'.format(var)) # ToDo start here for #206
logging.debug('load: {}'.format(var))
# create data link
opts = {'base': join_url_base, 'service': 'stats', 'id': vars_dict[var], 'statistics': stat_var[var],
@@ -133,7 +134,7 @@ def get_data(opts: Dict, headers: Dict) -> Union[Dict, List]:
def load_series_information(station_name: List[str], station_type: str_or_none, network_name: str_or_none,
join_url_base: str, headers: Dict, data_origin: Dict = None) -> Dict:
join_url_base: str, headers: Dict, data_origin: Dict = None) -> [Dict, Dict]:
"""
List all series ids that are available for given station id and network name.
@@ -151,27 +152,30 @@ def load_series_information(station_name: List[str], station_type: str_or_none,
"network_name": network_name, "as_dict": "true",
"columns": "id,network_name,station_id,parameter_name,parameter_label,parameter_attribute"}
station_vars = get_data(opts, headers)
logging.debug(f"{station_name}: {station_vars}") # ToDo start here for #206
logging.debug(f"{station_name}: {station_vars}")
return _select_distinct_series(station_vars, data_origin)
def _select_distinct_series(vars: List[Dict], data_origin: Dict = None):
def _select_distinct_series(vars: List[Dict], data_origin: Dict = None) -> [Dict, Dict]:
"""
Select distinct series ids for all variables. Also check if a parameter is from REA or not.
"""
data_origin_default = {"cloudcover": "REA", "humidity": "REA", "pblheight": "REA", "press": "REA", "relhum": "REA",
"temp": "REA", "totprecip": "REA", "u": "REA", "v": "REA",
"no": "", "no2": "", "o3": "", "pm10": "", "so2": ""}
if data_origin is None:
data_origin = {"cloudcover": "REA", "humidity": "REA", "pblheight": "REA", "press": "REA", "relhum": "REA",
"temp": "REA", "totprecip": "REA", "u": "REA", "v": "REA",
"no": "", "no2": "", "o3": "", "pm10": "", "so2": ""}
data_origin = {}
# ToDo: maybe press, wdir, wspeed from obs? or also temp, ... ?
selected = {}
for var in vars:
name = var["parameter_name"].lower()
var_attr = var["parameter_attribute"].lower()
if name not in data_origin.keys():
data_origin.update({name: data_origin_default.get(name, "")})
attr = data_origin.get(name, "").lower()
if var_attr == attr:
selected[name] = var["id"]
return selected
return selected, data_origin
def _save_to_pandas(df: Union[pd.DataFrame, None], data: dict, stat: str, var: str) -> pd.DataFrame:
Loading