Skip to content
Snippets Groups Projects
Commit bd777a29 authored by Sabine Schröder's avatar Sabine Schröder
Browse files

accidentally uploaded a preliminary version of the script; now replacing by last version

parent c24b7a11
No related branches found
No related tags found
No related merge requests found
Pipeline #57228 failed
......@@ -52,8 +52,7 @@ from urllib3.exceptions import InsecureRequestWarning
TOAR_SERVICE_URL = 'https://join-dev.fz-juelich.de:8443/'
#series_ids = [21848,26030,25769,21828,48238,21919,119939,25745,21915,35528,21831,
series_ids = [25769,21828,48238,21919,119939,25745,21915,35528,21831,
series_ids = [21848,26030,25769,21828,48238,21919,119939,25745,21915,35528,21831,
21931,28746,28727,25444,47866,25188,25037,28707,27893,26261,26324]
# from old database:
......@@ -164,6 +163,7 @@ tr_old_flags = {
# missing_value | true or false --> MissingValue
#
# not yet assigned:
# DoubtfulModified
# DoubtfulPreliminaryModified
# Changed <-> OKModified, OKPreliminaryModified ?!
#
......@@ -172,23 +172,21 @@ tr_old_flags = {
# some preliminary code
tr_flagging = {
('OK',False): 'OK',
}
testing_code = [
'OKPreliminary',
'OKModified',
'OKPreliminaryModified',
'Inconsistent',
'InconsistentPreliminary',
'Doubtful',
'DoubtfulPreliminary',
'DoubtfulModified',
'DoubtfulPreliminaryModified',
'Wrong',
'WrongPreliminary',
'NotCheckedPreliminary',
'Changed',
'Estimated',
'MissingValue' ]
('OK',True): 'OKPreliminary',
('changed',False): 'OKModified',
('changed',True): 'OKPreliminaryModified',
('inconsistent',False): 'Inconsistent',
('inconsistent',True): 'InconsistentPreliminary',
('doubtful',False): 'Doubtful',
('doubtful',True): 'DoubtfulPreliminary',
('wrong',False): 'Wrong',
('wrong',True): 'WrongPreliminary',
('not_checked',False): 'OK', #??????
('not_checked',True): 'NotCheckedPreliminary',
('estimated',False): 'Estimated',
('estimated',True): 'Estimated',
('missing_value',False): 'MissingValue', # just for reasons of completeness
('missing_value',True): 'MissingValue' } # (we did not store missing values in V1)
station_column_names = [
'numid',
......@@ -296,32 +294,32 @@ if __name__ == "__main__":
# what about escaping special characters? (f. ex. apostroph)
# do we need that?
# station_name = result_dict['station_name'].strip().replace('&','%26')
# station_name = result_dict['station_name'].strip()
# stationmeta = {"codes": [station_code],
# "name": station_name,
# "coordinates": {"lat": result_dict['station_lat'],"lng": result_dict['station_lon'],"alt": result_dict['station_alt']},
# "coordinate_validation_status": tr_coord_status[result_dict['station_coordinate_status']],
# "country": result_dict['station_country'].strip(),
# "state": result_dict['station_state'].strip(),
# "type_of_environment": tr_type_of_environment[result_dict['station_type'].strip()],
# "type_of_area": tr_type_of_area[result_dict['station_type_of_area'].strip()],
# "timezone": result_dict['station_timezone'].strip(),
# #to be done --> collect all data from old database!
# "additional_metadata": "{}",
# #to be done!
# "roles": [],
# "globalmeta": {"climatic_zone": tr_climatic_zone[result_dict['station_climatic_zone']]}
# }
# data = {"stationmeta": stationmeta}
# r = requests.post(TOAR_SERVICE_URL + 'stationmeta/',
# data=json.dumps(data),
# headers=headers,verify=insecure_ssl)
# # to do (German station are not critical!):
# # if code already exists, is it really the same station?
# msg = r.text
# print(f"{msg}\n")
station_name = result_dict['station_name'].strip()
stationmeta = {"codes": [station_code],
"name": station_name,
"coordinates": {"lat": result_dict['station_lat'],"lng": result_dict['station_lon'],"alt": result_dict['station_alt']},
"coordinate_validation_status": tr_coord_status[result_dict['station_coordinate_status']],
"country": result_dict['station_country'].strip(),
"state": result_dict['station_state'].strip(),
"type_of_environment": tr_type_of_environment[result_dict['station_type'].strip()],
"type_of_area": tr_type_of_area[result_dict['station_type_of_area'].strip()],
"timezone": result_dict['station_timezone'].strip(),
#to be done --> collect all data from old database!
"additional_metadata": "{}",
#to be done!
"roles": [],
"globalmeta": {"climatic_zone": tr_climatic_zone[result_dict['station_climatic_zone']]}
}
data = {"stationmeta": stationmeta}
r = requests.post(TOAR_SERVICE_URL + 'stationmeta/',
data=json.dumps(data),
headers=headers,verify=insecure_ssl)
# to do (German station are not critical!):
# if code already exists, is it really the same station?
msg = r.text
print(f"{msg}\n")
# 2. transfer given timeseries from old database to toardb_v2
# a. get metadata from old database
......@@ -342,41 +340,41 @@ if __name__ == "__main__":
# d. create timeseries dictionary
# at the moment! (preliminary!!!)
# parameter_label = ''
# entry = {}
# entry['label'] = parameter_label
# entry['order'] = 1
# entry['access_rights'] = "ByAttribution"
parameter_label = ''
entry = {}
entry['label'] = parameter_label
entry['order'] = 1
entry['access_rights'] = "ByAttribution"
sampling_frequency = result_dict['parameter_dataset_type'].strip()
# entry['sampling_frequency'] = tr_sampling_frequency[sampling_frequency]
# entry['aggregation'] = "Mean"
# entry['source'] = "Measurement"
# entry['sampling_height'] = 2
# entry['measurement_method'] = 'UnknownInstrument'
# entry['data_start_date'] = result_dict['data_start_date'].strftime("%Y-%m-%d %H:%M:%S+00")
# entry['data_end_date'] = result_dict['data_start_date'].strftime("%Y-%m-%d %H:%M:%S+00")
# entry['date_added'] = result_dict['creation_date'].strftime("%Y-%m-%d %H:%M:%S+00")
# entry['date_modified'] = result_dict['modification_date'].strftime("%Y-%m-%d %H:%M:%S+00")
# entry['station_id'] = station_id
# entry['variable_id'] = variable_id
entry['sampling_frequency'] = tr_sampling_frequency[sampling_frequency]
entry['aggregation'] = "Mean"
entry['source'] = "Measurement"
entry['sampling_height'] = 2
entry['measurement_method'] = 'UnknownInstrument'
entry['data_start_date'] = result_dict['data_start_date'].strftime("%Y-%m-%d %H:%M:%S+00")
entry['data_end_date'] = result_dict['data_start_date'].strftime("%Y-%m-%d %H:%M:%S+00")
entry['date_added'] = result_dict['creation_date'].strftime("%Y-%m-%d %H:%M:%S+00")
entry['date_modified'] = result_dict['modification_date'].strftime("%Y-%m-%d %H:%M:%S+00")
entry['station_id'] = station_id
entry['variable_id'] = variable_id
# e. get/create roles (TBD!!!)
# r = requests.get(TOAR_SERVICE_URL + f'contacts/orga_name/{parameter_contributor}',verify=insecure_ssl)
# data = r.json()
# contact_id_contributor=data['id']
# entry['roles'] = [{"role": "Contributor", "contact_id": contact_id_contributor, "status": "active"}]
# entry['roles'] = []
entry['roles'] = []
# f. collect all additional data (TBD!!!)
# entry['additional_metadata'] = "{}"
# timeseries_data = {}
# timeseries_data['timeseries'] = entry
entry['additional_metadata'] = "{}"
timeseries_data = {}
timeseries_data['timeseries'] = entry
# insert values in database
# r = requests.post(TOAR_SERVICE_URL + 'timeseries/',
# data=json.dumps(timeseries_data),
# headers=headers, verify=insecure_ssl)
# print(f"data added for old time series {pid}: ", r.json())
r = requests.post(TOAR_SERVICE_URL + 'timeseries/',
data=json.dumps(timeseries_data),
headers=headers, verify=insecure_ssl)
print(f"data added for old time series {pid}: ", r.json())
# 3. now add data itself
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment