Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
TOAR-II FastAPI
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
esde
toar-data
TOAR-II FastAPI
Commits
73cd3007
Commit
73cd3007
authored
2 months ago
by
Till Hauer
Browse files
Options
Downloads
Plain Diff
Merge branch 'swt-project_wrap-up' into 'swt-project-filtering'
wrap_up See merge request
!225
parents
c48c9f78
110e2c00
Branches
Branches containing commit
No related tags found
3 merge requests
!227
merge dev into testing
,
!226
enable aggregated filtering
,
!225
wrap_up
Pipeline
#255769
failed
2 months ago
Stage: test
Changes
2
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
toardb/timeseries/crud.py
+38
-45
38 additions, 45 deletions
toardb/timeseries/crud.py
toardb/timeseries/timeseries.py
+1
-3
1 addition, 3 deletions
toardb/timeseries/timeseries.py
with
39 additions
and
48 deletions
toardb/timeseries/crud.py
+
38
−
45
View file @
73cd3007
...
...
@@ -33,8 +33,7 @@ import toardb
def
clean_additional_metadata
(
ad_met_dict
):
# all changes are permanent!
if
not
isinstance
(
ad_met_dict
,
dict
):
tmp
=
ad_met_dict
.
replace
(
'
\\
"'
,
'"'
)
tmp
=
tmp
.
replace
(
'"'
,
'
\\
"'
)
tmp
=
ad_met_dict
.
replace
(
'"'
,
'
\\
"'
)
return
tmp
.
replace
(
"'"
,
'"'
)
# there is a mismatch with additional_metadata
additional_metadata
=
ad_met_dict
...
...
@@ -43,8 +42,7 @@ def clean_additional_metadata(ad_met_dict):
for
key2
,
value2
in
value
.
items
():
if
isinstance
(
value2
,
str
):
additional_metadata
[
key
][
key2
]
=
value2
.
replace
(
"'"
,
"
$apostroph$
"
)
else
:
if
isinstance
(
value
,
str
):
elif
isinstance
(
value
,
str
):
additional_metadata
[
key
]
=
value
.
replace
(
"'"
,
"
$apostroph$
"
)
additional_metadata
=
str
(
additional_metadata
).
replace
(
'"'
,
'
\\
"'
)
additional_metadata
=
str
(
additional_metadata
).
replace
(
"'"
,
'"'
)
...
...
@@ -342,7 +340,7 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
.
filter
(
models
.
Timeseries
.
variable_id
==
variable_id
).
all
()
# if already not found: return None
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
return
None
...
...
@@ -354,35 +352,31 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
iter_obj
=
ret_db_object
.
copy
()
counter
=
0
for
db_object
in
iter_obj
:
found
=
False
for
role
in
db_object
.
roles
:
# resource provider is always an organisation!
organisation
=
get_contact
(
db
,
contact_id
=
role
.
contact_id
)
if
((
role_num
==
role
.
role
)
and
(
organisation
.
longname
==
resource_provider
)):
found
=
True
if
not
found
:
ret_db_object
.
pop
(
counter
)
else
:
counter
=
counter
+
1
counter
-=
1
break
counter
+=
1
else
:
# time series that do not have a resource_provider are not identical to those who do not!
role_num
=
get_value_from_str
(
toardb
.
toardb
.
RC_vocabulary
,
'
ResourceProvider
'
)
iter_obj
=
ret_db_object
.
copy
()
counter
=
0
for
db_object
in
iter_obj
:
found
=
False
for
role
in
db_object
.
roles
:
if
(
role_num
==
role
.
role
):
found
=
True
if
found
:
counter
-=
1
ret_db_object
.
pop
(
counter
)
else
:
break
counter
=
counter
+
1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
return
None
# filter for criterion 14.4
...
...
@@ -393,11 +387,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if
not
(
db_object
.
sampling_frequency
==
sampling_frequency
):
ret_db_object
.
pop
(
counter
)
else
:
counter
=
counter
+
1
counter
+
=
1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
return
None
# filter for criterion 14.5
...
...
@@ -408,11 +402,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if
not
(
db_object
.
provider_version
==
provider_version
):
ret_db_object
.
pop
(
counter
)
else
:
counter
=
counter
+
1
counter
+
=
1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
return
None
# filter for criterion 14.6
...
...
@@ -424,11 +418,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if
not
(
db_object
.
data_origin_type
==
data_origin_type_num
):
ret_db_object
.
pop
(
counter
)
else
:
counter
=
counter
+
1
counter
+
=
1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
return
None
# filter for criterion 14.7
...
...
@@ -440,11 +434,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if
not
(
db_object
.
data_origin
==
data_origin_num
):
ret_db_object
.
pop
(
counter
)
else
:
counter
=
counter
+
1
counter
+
=
1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
return
None
# filter for criterion 14.8
...
...
@@ -455,11 +449,11 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if
not
(
db_object
.
sampling_height
==
sampling_height
):
ret_db_object
.
pop
(
counter
)
else
:
counter
=
counter
+
1
counter
+
=
1
# if already only none object --> return
# if only one single object is found, it has to be checked whether all criterions are fullfilled
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
return
None
# filter for criterion 14.9
...
...
@@ -470,14 +464,13 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
if
not
(
db_object
.
label
==
label
):
ret_db_object
.
pop
(
counter
)
else
:
counter
=
counter
+
1
counter
+
=
1
# check that only one object is left!!!
# adapt mismatches for return value
if
len
(
ret_db_object
)
==
0
:
if
not
ret_db_object
:
ret_db_object
=
None
else
:
if
len
(
ret_db_object
)
==
1
:
elif
len
(
ret_db_object
)
==
1
:
ret_db_object
=
ret_db_object
[
0
]
# there is a mismatch with additional_metadata
ret_db_object
.
additional_metadata
=
clean_additional_metadata
(
ret_db_object
.
additional_metadata
)
...
...
This diff is collapsed.
Click to expand it.
toardb/timeseries/timeseries.py
+
1
−
3
View file @
73cd3007
...
...
@@ -57,9 +57,7 @@ def search_all_timeseries_aggregations(request: Request, db: Session = Depends(g
db
,
path_params
=
request
.
path_params
,
signs
=
signs
,
query_params_list
=
query_params
)
else
:
updated_query_params
=
get_query_params
(
request
.
url
.
query
)
return
crud
.
search_all
(
db
,
path_params
=
request
.
path_params
,
query_params
=
updated_query_params
)
return
search_all_timeseries
(
request
,
db
)
#get all entries of table timeseries
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment