Skip to content
Snippets Groups Projects
Commit 73cd3007 authored by Till Hauer's avatar Till Hauer
Browse files

Merge branch 'swt-project_wrap-up' into 'swt-project-filtering'

wrap_up

See merge request !225
parents c48c9f78 110e2c00
Branches
No related tags found
3 merge requests!227merge dev into testing,!226enable aggregated filtering,!225wrap_up
Pipeline #255769 failed
......@@ -33,8 +33,7 @@ import toardb
def clean_additional_metadata(ad_met_dict):
# all changes are permanent!
if not isinstance(ad_met_dict,dict):
tmp = ad_met_dict.replace('\\"','"')
tmp = tmp.replace('"','\\"')
tmp = ad_met_dict.replace('"','\\"')
return tmp.replace("'",'"')
# there is a mismatch with additional_metadata
additional_metadata = ad_met_dict
......@@ -43,8 +42,7 @@ def clean_additional_metadata(ad_met_dict):
for key2, value2 in value.items():
if isinstance(value2,str):
additional_metadata[key][key2] = value2.replace("'","$apostroph$")
else:
if isinstance(value,str):
elif isinstance(value,str):
additional_metadata[key] = value.replace("'","$apostroph$")
additional_metadata = str(additional_metadata).replace('"','\\"')
additional_metadata = str(additional_metadata).replace("'",'"')
......@@ -342,7 +340,7 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
.filter(models.Timeseries.variable_id == variable_id).all()
# if already not found: return None
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if len(ret_db_object) == 0:
if not ret_db_object:
return None
......@@ -354,35 +352,31 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
iter_obj = ret_db_object.copy()
counter=0
for db_object in iter_obj:
found = False
for role in db_object.roles:
# resource provider is always an organisation!
organisation = get_contact(db, contact_id=role.contact_id)
if ((role_num == role.role) and (organisation.longname == resource_provider)):
found = True
if not found:
ret_db_object.pop(counter)
else:
counter = counter + 1
counter -= 1
break
counter += 1
else:
# time series that do not have a resource_provider are not identical to those who do not!
role_num = get_value_from_str(toardb.toardb.RC_vocabulary,'ResourceProvider')
iter_obj = ret_db_object.copy()
counter=0
for db_object in iter_obj:
found = False
for role in db_object.roles:
if (role_num == role.role):
found = True
if found:
counter -= 1
ret_db_object.pop(counter)
else:
break
counter = counter + 1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if len(ret_db_object) == 0:
if not ret_db_object:
return None
# filter for criterion 14.4
......@@ -393,11 +387,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if not (db_object.sampling_frequency == sampling_frequency):
ret_db_object.pop(counter)
else:
counter = counter + 1
counter += 1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if len(ret_db_object) == 0:
if not ret_db_object:
return None
# filter for criterion 14.5
......@@ -408,11 +402,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if not (db_object.provider_version == provider_version):
ret_db_object.pop(counter)
else:
counter = counter + 1
counter += 1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if len(ret_db_object) == 0:
if not ret_db_object:
return None
# filter for criterion 14.6
......@@ -424,11 +418,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if not (db_object.data_origin_type == data_origin_type_num):
ret_db_object.pop(counter)
else:
counter = counter + 1
counter += 1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if len(ret_db_object) == 0:
if not ret_db_object:
return None
# filter for criterion 14.7
......@@ -440,11 +434,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if not (db_object.data_origin == data_origin_num):
ret_db_object.pop(counter)
else:
counter = counter + 1
counter += 1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if len(ret_db_object) == 0:
if not ret_db_object:
return None
# filter for criterion 14.8
......@@ -455,11 +449,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if not (db_object.sampling_height == sampling_height):
ret_db_object.pop(counter)
else:
counter = counter + 1
counter += 1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if len(ret_db_object) == 0:
if not ret_db_object:
return None
# filter for criterion 14.9
......@@ -470,14 +464,13 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if not (db_object.label == label):
ret_db_object.pop(counter)
else:
counter = counter + 1
counter += 1
# check that only one object is left!!!
# adapt mismatches for return value
if len(ret_db_object) == 0:
if not ret_db_object:
ret_db_object = None
else:
if len(ret_db_object) == 1:
elif len(ret_db_object) == 1:
ret_db_object = ret_db_object[0]
# there is a mismatch with additional_metadata
ret_db_object.additional_metadata = clean_additional_metadata(ret_db_object.additional_metadata)
......
......@@ -57,9 +57,7 @@ def search_all_timeseries_aggregations(request: Request, db: Session = Depends(g
db, path_params=request.path_params, signs=signs, query_params_list=query_params
)
else:
updated_query_params = get_query_params(request.url.query)
return crud.search_all(db, path_params=request.path_params, query_params=updated_query_params)
return search_all_timeseries(request, db)
#get all entries of table timeseries
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment