diff --git a/toardb/timeseries/crud.py b/toardb/timeseries/crud.py index c1ad7c22b3e6b654d07a9ceecb8a4326f6fa2f3f..a2c16a996fc712357e82bf67f03e21eadf8cf013 100644 --- a/toardb/timeseries/crud.py +++ b/toardb/timeseries/crud.py @@ -33,8 +33,7 @@ import toardb def clean_additional_metadata(ad_met_dict): # all changes are permanent! if not isinstance(ad_met_dict,dict): - tmp = ad_met_dict.replace('\\"','"') - tmp = tmp.replace('"','\\"') + tmp = ad_met_dict.replace('"','\\"') return tmp.replace("'",'"') # there is a mismatch with additional_metadata additional_metadata = ad_met_dict @@ -43,9 +42,8 @@ def clean_additional_metadata(ad_met_dict): for key2, value2 in value.items(): if isinstance(value2,str): additional_metadata[key][key2] = value2.replace("'","$apostroph$") - else: - if isinstance(value,str): - additional_metadata[key] = value.replace("'","$apostroph$") + elif isinstance(value,str): + additional_metadata[key] = value.replace("'","$apostroph$") additional_metadata = str(additional_metadata).replace('"','\\"') additional_metadata = str(additional_metadata).replace("'",'"') additional_metadata = str(additional_metadata).replace("$apostroph$","'") @@ -342,7 +340,7 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ .filter(models.Timeseries.variable_id == variable_id).all() # if already not found: return None # if only one single object is found, it has to be checked whether all criterions are fullfilled - if len(ret_db_object) == 0: + if not ret_db_object: return None @@ -354,35 +352,31 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ iter_obj = ret_db_object.copy() counter=0 for db_object in iter_obj: - found = False for role in db_object.roles: # resource provider is always an organisation! organisation = get_contact(db, contact_id=role.contact_id) if ((role_num == role.role) and (organisation.longname == resource_provider)): - found = True - if not found: - ret_db_object.pop(counter) - else: - counter = counter + 1 + ret_db_object.pop(counter) + counter -= 1 + break + counter += 1 else: # time series that do not have a resource_provider are not identical to those who do not! role_num = get_value_from_str(toardb.toardb.RC_vocabulary,'ResourceProvider') iter_obj = ret_db_object.copy() counter=0 for db_object in iter_obj: - found = False for role in db_object.roles: if (role_num == role.role): - found = True - if found: - ret_db_object.pop(counter) - else: - counter = counter + 1 + counter -= 1 + ret_db_object.pop(counter) + break + counter = counter + 1 # if already only none object --> return # if only one single object is found, it has to be checked whether all criterions are fullfilled - if len(ret_db_object) == 0: + if not ret_db_object: return None # filter for criterion 14.4 @@ -393,11 +387,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ if not (db_object.sampling_frequency == sampling_frequency): ret_db_object.pop(counter) else: - counter = counter + 1 + counter += 1 # if already only none object --> return # if only one single object is found, it has to be checked whether all criterions are fullfilled - if len(ret_db_object) == 0: + if not ret_db_object: return None # filter for criterion 14.5 @@ -408,11 +402,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ if not (db_object.provider_version == provider_version): ret_db_object.pop(counter) else: - counter = counter + 1 + counter += 1 # if already only none object --> return # if only one single object is found, it has to be checked whether all criterions are fullfilled - if len(ret_db_object) == 0: + if not ret_db_object: return None # filter for criterion 14.6 @@ -424,11 +418,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ if not (db_object.data_origin_type == data_origin_type_num): ret_db_object.pop(counter) else: - counter = counter + 1 + counter += 1 # if already only none object --> return # if only one single object is found, it has to be checked whether all criterions are fullfilled - if len(ret_db_object) == 0: + if not ret_db_object: return None # filter for criterion 14.7 @@ -440,11 +434,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ if not (db_object.data_origin == data_origin_num): ret_db_object.pop(counter) else: - counter = counter + 1 + counter += 1 # if already only none object --> return # if only one single object is found, it has to be checked whether all criterions are fullfilled - if len(ret_db_object) == 0: + if not ret_db_object: return None # filter for criterion 14.8 @@ -455,11 +449,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ if not (db_object.sampling_height == sampling_height): ret_db_object.pop(counter) else: - counter = counter + 1 + counter += 1 # if already only none object --> return # if only one single object is found, it has to be checked whether all criterions are fullfilled - if len(ret_db_object) == 0: + if not ret_db_object: return None # filter for criterion 14.9 @@ -470,27 +464,26 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ if not (db_object.label == label): ret_db_object.pop(counter) else: - counter = counter + 1 + counter += 1 # check that only one object is left!!! # adapt mismatches for return value - if len(ret_db_object) == 0: + if not ret_db_object: ret_db_object = None + elif len(ret_db_object) == 1: + ret_db_object = ret_db_object[0] + # there is a mismatch with additional_metadata + ret_db_object.additional_metadata = clean_additional_metadata(ret_db_object.additional_metadata) + # there is also a mismatch with coordinates and additional_metadata from station object + if isinstance(ret_db_object.station.coordinates, (WKBElement, WKTElement)): + ret_db_object.station.coordinates = get_coordinates_from_geom(ret_db_object.station.coordinates) + # there is a mismatch with additional_metadata + if isinstance(ret_db_object.station.additional_metadata, dict): + ret_db_object.station.additional_metadata = json.dumps(ret_db_object.station.additional_metadata) else: - if len(ret_db_object) == 1: - ret_db_object = ret_db_object[0] - # there is a mismatch with additional_metadata - ret_db_object.additional_metadata = clean_additional_metadata(ret_db_object.additional_metadata) - # there is also a mismatch with coordinates and additional_metadata from station object - if isinstance(ret_db_object.station.coordinates, (WKBElement, WKTElement)): - ret_db_object.station.coordinates = get_coordinates_from_geom(ret_db_object.station.coordinates) - # there is a mismatch with additional_metadata - if isinstance(ret_db_object.station.additional_metadata, dict): - ret_db_object.station.additional_metadata = json.dumps(ret_db_object.station.additional_metadata) - else: - status_code=405 - message=f"Timeseries not unique, more criteria need to be defined." - return JSONResponse(status_code=status_code, content=message) + status_code=405 + message=f"Timeseries not unique, more criteria need to be defined." + return JSONResponse(status_code=status_code, content=message) return ret_db_object diff --git a/toardb/timeseries/timeseries.py b/toardb/timeseries/timeseries.py index 91df21bb6418622fafa87efa83e88b6d43e70235..2a5d8005ca91a94ddc97e363a0a29a191429e96d 100644 --- a/toardb/timeseries/timeseries.py +++ b/toardb/timeseries/timeseries.py @@ -57,9 +57,7 @@ def search_all_timeseries_aggregations(request: Request, db: Session = Depends(g db, path_params=request.path_params, signs=signs, query_params_list=query_params ) else: - updated_query_params = get_query_params(request.url.query) - return crud.search_all(db, path_params=request.path_params, query_params=updated_query_params) - + return search_all_timeseries(request, db) #get all entries of table timeseries