diff --git a/harvesting/get_UBA_NRT_data_withAutoInsert.py b/harvesting/get_UBA_NRT_data_withAutoInsert.py
index e2ba1674509c43da1a8584f8c076dcf5b43b9b10..ff9999a2d036c17314dba215b4534c8fdf603c97 100644
--- a/harvesting/get_UBA_NRT_data_withAutoInsert.py
+++ b/harvesting/get_UBA_NRT_data_withAutoInsert.py
@@ -6,17 +6,18 @@
 #!/usr/bin/python
 import os
 import urllib
-import psycopg2
 import csv
 import re
 import datetime as dt
 import sys
 import operator
 import requests
+import json
 
 dir_name = "/home/s.schroeder/UBA_NRT"
 TOAR_SERVICE_URL = 'https://join-dev.fz-juelich.de:8443/'
 insecure_ssl=False
+headers = {'Content-Type': 'application/json'}
 
 url_link_stationsparameter = "wget --user Luftdaten --password 2jNmbXCjbh7xyCJqsNPaKMd4d http://www.luftdaten.umweltbundesamt.de/files/StationparameterMeta.csv >/dev/null 2>&1"
 url_link_stations = "wget --user Luftdaten --password 2jNmbXCjbh7xyCJqsNPaKMd4d http://www.luftdaten.umweltbundesamt.de/files/StationMeta.csv >/dev/null 2>&1"
@@ -71,18 +72,18 @@ PARAMETERS_DICT_en = {
 }
 
 TYPES_DICT = {
-    'Hintergrund': 'background',
-    'Industrie': 'industrial',
-    'Verkehr': 'traffic'
+    'Hintergrund': 'Background',
+    'Industrie': 'Industrial',
+    'Verkehr': 'Traffic'
 }
 
 TYPES_OF_AREA_DICT = {
-    u'l\xe4ndlich abgelegen': 'rural',
-    u'l\xe4ndliches Gebiet': 'rural',
-    u'l\xe4ndlich regional': 'rural',
-    u'l\xe4ndlich stadtnah': 'rural',
-    u'st\xe4dtisches Gebiet': 'urban',
-    u'vorst\xe4dtisches Gebiet': 'suburban'
+    u'l\xe4ndlich abgelegen': 'Rural',
+    u'l\xe4ndliches Gebiet': 'Rural',
+    u'l\xe4ndlich regional': 'Rural',
+    u'l\xe4ndlich stadtnah': 'Rural',
+    u'st\xe4dtisches Gebiet': 'Urban',
+    u'vorst\xe4dtisches Gebiet': 'Suburban'
 }
 
 
@@ -169,16 +170,16 @@ dstates["UB"] = ("unknown", "UBA", "Umweltbundesamt")
 
 
 
-def insert_one_station(station_code_toInsert, io_handler = None):
+def insert_one_station(station_code_toInsert):
     network_name = 'UBA'
     station_name = "unknown"
     station_country = "Germany"
     station_lat = None
     station_lon = None
     station_alt = None
-    station_category = "unknown"
-    station_type = "unknown"
-    station_type_of_area = "unknown"
+    station_category = "Unknown"
+    station_type = "Unknown"
+    station_type_of_area = "Unknown"
     station_timeshift = 0.
     station_state = "unknown"
     station_timezone = "Europe/Berlin"
@@ -207,43 +208,63 @@ def insert_one_station(station_code_toInsert, io_handler = None):
 
         station_state = dstates[station_code[2:4]][0]
 
-        # create station tuple
-        st = (None, network_name, station_code, station_local_id,
-              station_type, station_type_of_area, station_category,
-              station_name, station_country, station_state,
-              station_lon, station_lat, station_alt, station_timezone)
+        # create stationmeta dictionary
+        entry = {}
+        entry['codes'] = [ station_code ]
+        entry['name'] = station_name
+        entry['coordinates'] = {}
+        entry['coordinates']['lat'] = station_lat
+        entry['coordinates']['lng'] = station_lon
+        entry['coordinates']['alt'] = station_alt
+        entry['coordinate_validation_status'] = 'NotChecked'
+        entry['country'] = station_country
+        entry['state'] = station_state
+        entry['type_of_environment'] = station_type
+        entry['type_of_area'] = station_type_of_area
+        entry['timezone'] = station_timezone
+        entry['additional_metadata'] = "{}"
+        entry['globalmeta'] = {}
+        entry['globalmeta']['toar1_category'] = station_category
+        stationmeta_data = {}
+        stationmeta_data['stationmeta'] = entry
 
         # insert values in database
-        if io_handler is not None:
-            # changes are automatically committed (commit=True is default)
-            success = io_handler.update_stations(*st, noclobber = True)
-            print("inserted new station %s into database!" % (station_code,))
-            infile.close()
-            return
+        r = requests.post(TOAR_SERVICE_URL + 'stationmeta/',
+                          data=json.dumps(stationmeta_data),
+                          headers=headers, verify=insecure_ssl)
+        print("inserted new station %s into database!" % (station_code,))
+        infile.close()
+        return
     infile.close()
     return
 
 
-def insert_parameter_series(missing_station_code,parameter,io_handler):
+def insert_parameter_series(missing_station_code,variable_id, parameter):
 
     #set missing fields
-    numid = io_handler.get_station_code(station_code=missing_station_code, network_name="UBA")[0]
+    r = requests.get(TOAR_SERVICE_URL + f'stationmeta/{missing_station_code}',verify=insecure_ssl)
+    data = r.json()
+    numid = data['id']
     parameter_attribute = ""
     parameter_contributor_shortname = dstates[missing_station_code[2:4]][1]
-    parameter_dataset_type = "hourly"
+    parameter_dataset_type = "Hourly"
     parameter_label_values = [parameter_attribute,
                               parameter_contributor_shortname,
                               parameter_dataset_type,
                              ]
-    parameter_label = io_handler.update_parameter_labels(numid, parameter, parameter_label_values)
+    #parameter_label = io_handler.update_parameter_labels(numid, parameter, parameter_label_values)
+    # at the moment! (preliminary!!!)
+    parameter_label = ''
     parameter_sampling_type = "continuous"
-    parameter_original_units = UBA_PARAMETER_UNITS[parameter]
+    #original_units no longer entry of table timeseries?!
+    #parameter_original_units = UBA_PARAMETER_UNITS[parameter]
+    #original_units no longer entry of table timeseries?!
     parameter_calibration = ""
     parameter_contributor = dstates[missing_station_code[2:4]][2]
     parameter_contributor_country = "Germany"
     parameter_status = 0
     comments = ""
-    creation_date = dt.datetime.now()
+    creation_date = dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
     modification_date = creation_date
     # Add default start and end date temporarily and
     # then update it later once the data is inserted
@@ -270,73 +291,117 @@ def insert_parameter_series(missing_station_code,parameter,io_handler):
                     parameter_sampling_type = "continuous"
                 parameter_measurement_method = values[11].strip()
 
-                # create parameter series tuple
-                pst = (numid, parameter_label, parameter, None, parameter_attribute,
-                       parameter_sampling_type, parameter_measurement_method,
-                       parameter_original_units, parameter_calibration,
-                       parameter_contributor_shortname, parameter_contributor,
-                       parameter_contributor_country, parameter_dataset_type,
-                       parameter_status, comments, creation_date,
-                       modification_date, data_start_date, data_end_date)
+                # create timeseries dictionary
+                entry = {}
+                entry['label'] = parameter_label
+                entry['order'] = 1
+                entry['access_rights'] = "ByAttribution"
+                entry['sampling_frequency'] = parameter_dataset_type
+                entry['aggregation'] = "Mean1Of2"
+                entry['source'] = "Measurement"
+                entry['data_start_date'] = data_start_date
+                entry['data_end_date'] = data_start_date
+                #'measurement method not known: Haarhygrometer'
+                if parameter_measurement_method == 'Haarhygrometer':
+                    entry['measurement_method'] = 'UnknownInstrument'
+                else:
+                    entry['measurement_method'] = parameter_measurement_method
+                entry['sampling_height'] = 2
+                entry['date_added'] = creation_date
+                entry['date_modified'] = modification_date
+                entry['station_id'] = numid
+                entry['variable_id'] = variable_id
+                # get roles
+                r = requests.get(TOAR_SERVICE_URL + 'contacts/orga_name/Umweltbundesamt',verify=insecure_ssl)
+                data = r.json()
+                contact_id_resource_provider=data['id']
+                r = requests.get(TOAR_SERVICE_URL + f'contacts/orga_name/{parameter_contributor}',verify=insecure_ssl)
+                data = r.json()
+                contact_id_contributor=data['id']
+                entry['roles'] = [{"role": "ResourceProvider", "contact_id": contact_id_resource_provider, "status": "active"},
+                                  {"role": "Contributor", "contact_id": contact_id_contributor, "status": "active"}]
+                entry['additional_metadata'] = "{}"
+                timeseries_data = {}
+                timeseries_data['timeseries'] = entry 
 
                 # insert values in database
-                if io_handler is not None:
-                    success = io_handler.update_parameter_series(*pst, noclobber = True)
-                    if success:
-                        io_handler.commit()
-                        print("commited new parameter_series for %s and %s!" % (station_code,parameter))
+                r = requests.post(TOAR_SERVICE_URL + 'timeseries/',
+                                  data=json.dumps(timeseries_data),
+                                  headers=headers, verify=insecure_ssl)
+                print("data added: ", timeseries_data)
                 infile.close()
                 return
     infile.close()
     return
 
 
-def insert_invented_parameter_series(missing_station_code,parameter,io_handler):
+def insert_invented_parameter_series(missing_station_code,variable_id, parameter):
 
     #set missing fields
-    numid = io_handler.get_station_code(station_code=missing_station_code, network_name="UBA")[0]
+    r = requests.get(TOAR_SERVICE_URL + f'stationmeta/{missing_station_code}',verify=insecure_ssl)
+    data = r.json()
+    numid = data['id']
     parameter_attribute = ""
     parameter_contributor_shortname = dstates[missing_station_code[2:4]][1]
-    parameter_dataset_type = "hourly"
+    parameter_dataset_type = "Hourly"
     parameter_label_values = [parameter_attribute,
                               parameter_contributor_shortname,
                               parameter_dataset_type,
                              ]
-    parameter_label = io_handler.update_parameter_labels(numid, parameter, parameter_label_values)
+    #parameter_label = io_handler.update_parameter_labels(numid, parameter, parameter_label_values)
+    # at the moment! (preliminary!!!)
+    parameter_label = ''
     parameter_sampling_type = "continuous"
     parameter_original_units = UBA_PARAMETER_UNITS[parameter]
     parameter_calibration = ""
     parameter_contributor = dstates[missing_station_code[2:4]][2]
     parameter_contributor_country = "Germany"
-    parameter_measurement_method = "unknown"
+    parameter_measurement_method = "UnknownInstrument"
     parameter_status = 2
     comments = ""
-    creation_date = dt.datetime.now()
+    creation_date = dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
     modification_date = creation_date
     # Add default start and end date temporarily and
     # then update it later once the data is inserted
     data_start_date = "1900-01-01 00:00:00"
     data_end_date = "1900-01-01 00:00:00"
 
-    # create parameter series tuple
-    pst = (numid, parameter_label, parameter, None, parameter_attribute,
-           parameter_sampling_type, parameter_measurement_method,
-           parameter_original_units, parameter_calibration,
-           parameter_contributor_shortname, parameter_contributor,
-           parameter_contributor_country, parameter_dataset_type,
-           parameter_status, comments, creation_date,
-           modification_date, data_start_date, data_end_date)
-
+    # create timeseries dictionary
+    entry = {}
+    entry['label'] = parameter_label
+    entry['order'] = 1
+    entry['access_rights'] = "ByAttribution"
+    entry['sampling_frequency'] = parameter_dataset_type
+    entry['aggregation'] = "Mean1Of2"
+    entry['source'] = "Measurement"
+    entry['data_start_date'] = data_start_date
+    entry['data_end_date'] = data_start_date
+    entry['measurement_method'] = parameter_measurement_method
+    entry['sampling_height'] = 2
+    entry['date_added'] = creation_date
+    entry['date_modified'] = modification_date
+    entry['station_id'] = numid
+    entry['variable_id'] = variable_id
+    # get roles
+    r = requests.get(TOAR_SERVICE_URL + 'contacts/orga_name/Umweltbundesamt',verify=insecure_ssl)
+    data = r.json()
+    contact_id_resource_provider=data['id']
+    r = requests.get(TOAR_SERVICE_URL + f'contacts/orga_name/{parameter_contributor}',verify=insecure_ssl)
+    data = r.json()
+    contact_id_contributor=data['id']
+    entry['roles'] = [{"role": "ResourceProvider", "contact_id": contact_id_resource_provider, "status": "active"},
+                      {"role": "Contributor", "contact_id": contact_id_contributor, "status": "active"}]
+    entry['additional_metadata'] = "{}"
+    timeseries_data = {}
+    timeseries_data['timeseries'] = entry
     # insert values in database
-    if io_handler is not None:
-        success = io_handler.update_parameter_series(*pst, noclobber = True)
-        if success:
-            io_handler.commit()
-            print("commited new parameter_series for %s and %s!" % (missing_station_code,parameter))
-    return
+    r = requests.post(TOAR_SERVICE_URL + 'timeseries/',
+                      data=json.dumps(timeseries_data),
+                      headers=headers, verify=insecure_ssl)
+    print("data added: ", timeseries_data)
 
 
-def read_missing_stations(filename, io_handler = None):
+def read_missing_stations(filename):
     """Read all stations data from stations table, add missing fields and
     its default values and then returns stations dictionary"""
 
@@ -355,11 +420,15 @@ def read_missing_stations(filename, io_handler = None):
         # one station_code might report multiple parameters
         # ==> insert station only once
         if not missing_station_code in missing_station_codes:
-            insert_one_station(missing_station_code, io_handler)
+            insert_one_station(missing_station_code)
             missing_station_codes |= { missing_station_code }
         #also add missing parameter_series
         # ==> add parameter_series for every parameter
-        insert_parameter_series(missing_station_code,parameter,io_handler)
+        # get variable_id
+        r = requests.get(TOAR_SERVICE_URL + f'variables/{parameter}',verify=insecure_ssl)
+        data = r.json()
+        variable_id=data['id']
+        insert_parameter_series(missing_station_code,variable_id, parameter)
 
 
 def apply_filters(parameter, value):
@@ -390,12 +459,12 @@ def apply_filters(parameter, value):
             db_val = formula[0](value, formula[1])
         else:
             db_val = value
-        db_flag = WMO_QUALITY_FLAG["OKPreliminary"]
+        db_flag = "OKPreliminary"
     return db_val, db_flag
 
             
 #add data into the database
-def addData(db, csv_file):
+def addData(csv_file):
 
     minor=dt.datetime.now().strftime("%Y%m%d%H%M%S")
     db_version=f'000000.000001.{minor}'
@@ -479,30 +548,20 @@ def addData(db, csv_file):
                 data = r.json()
                 if 'detail' in data:
                     # series not present yet (new species for the station)
-                    insert_parameter_series(station_code,species,db)
-                    # now get series_id from freshly inserted parameter_series
-                    res = db.search(network_name='UBA',
-                                    station_code=station_code,
-                                    parameter_name=species,
-                                    parameter_contributor_shortname=parameter_contributor_shortname,
-                                    parameter_dataset_type = 'hourly',
-                                    columns='id')
+                    insert_parameter_series(station_code,variable_id, species)
+                    r = requests.get(TOAR_SERVICE_URL + f'timeseries/unique/?station_id={numid}&variable_id={variable_id}&resource_provider=UBA&label={label}',verify=insecure_ssl)
+                    data = r.json()
                     try:
-                        series_id = res[0].id[0]
+                        series_id = data['id']
                         newStations[series_id] = dt.datetime(int(9999),  int(12),  int(31))
-                    except IndexError:
+                    except KeyError:
                         print("parameter_series %s for station %s not reported in 'StationparameterMeta.csv'!" % (species,station_code))
                         print("now inventing metadata for parameter_series %s for station %s!" % (species,station_code))
                         # series not present yet (new species for the station -- metadata to be invented!)
-                        insert_invented_parameter_series(station_code,species,db)
-                        # now get series_id from freshly inserted parameter_series
-                        res = db.search(network_name='UBA',
-                                        station_code=station_code,
-                                        parameter_name=species,
-                                        parameter_contributor_shortname=parameter_contributor_shortname,
-                                        parameter_dataset_type = 'hourly',
-                                        columns='id')
-                        series_id = res[0].id[0]
+                        insert_invented_parameter_series(station_code,variable_id,species)
+                        r = requests.get(TOAR_SERVICE_URL + f'timeseries/unique/?station_id={numid}&variable_id={variable_id}&resource_provider=UBA&label={label}',verify=insecure_ssl)
+                        data = r.json()
+                        series_id = data['id']
                         newStations[series_id] = dt.datetime(int(9999),  int(12),  int(31))
                 series_id = data['id']
                 val_arr = []
@@ -544,39 +603,52 @@ def addData(db, csv_file):
                         if db_val is not None:
                             db_datetime = date_obj + dt.timedelta(hours = (i-3)/2)
                             # insert/update in database
-                            r = requests.post(TOAR_SERVICE_URL + f'data/record/?series_id={series_id}&datetime={db_datetime}&value={db_val}&flag={db_flag}&version={db_version}',verify=insecure_ssl)
-                            data = r.json()
-                            if (series_id in updatedStations):
-                                updatedStations[series_id] = max(db_datetime,updatedStations[series_id])
-                            else:
-                                updatedStations[series_id] = db_datetime
-                            if (series_id in newStations):
-                                newStations[series_id] = min(db_datetime,newStations[series_id])
+                            # insert: OK
+                            # update: will throw error, because of unique violation (could be patched, but data is preliminary anyway)
+                            try:
+                                r = requests.post(TOAR_SERVICE_URL + f'data/record/?series_id={series_id}&datetime={db_datetime}&value={db_val}&flag={db_flag}&version={db_version}',verify=insecure_ssl)
+                                data = r.json()
+                                if (series_id in updatedStations):
+                                    updatedStations[series_id] = max(db_datetime,updatedStations[series_id])
+                                else:
+                                    updatedStations[series_id] = db_datetime
+                                if (series_id in newStations):
+                                    newStations[series_id] = min(db_datetime,newStations[series_id])
+                            except:
+                                pass
     if not lfirstmiss:
         outstationfile.close()
         outdatafile.close()
         # insert missing stations automatically
-        read_missing_stations(outstationfilename, db)
+        read_missing_stations(outstationfilename)
         # still todo: insert missing stations' data automatically
         #read_missing_data(outdatafilename, db)
-    #commit all changes at the end of the program at once! (issue of run time)
-    db.commit()
     newStations2={ k:v for k, v in newStations.items() if v != dt.datetime(9999, 12, 31, 0, 0)}
     return updatedStations, newStations2
 
 
-def update_parameter_series_dates(db, updatedStations, newStations):
+def update_parameter_series_dates(updatedStations, newStations):
     """Update data_end_date in parameter_series table
     for all updated UBA stations and its species """
 
     for series_id, data_end_date in updatedStations.items():
-        db.update_parameter_series_dates(series_id = series_id,
-                                         data_end_date = data_end_date)
+        entry = {}
+        entry['data_end_date'] = data_end_date.strftime("%Y-%m-%d %H:%M:%S+00")
+        timeseries_data = {}
+        timeseries_data['timeseries'] = entry
+        # NOLOG: We do not want a changelog entry for NRT data
+        r = requests.patch(TOAR_SERVICE_URL + f'timeseries/{series_id}?description=NOLOG',
+                                  data=json.dumps(timeseries_data),
+                                  headers=headers, verify=insecure_ssl)
     for series_id, data_start_date in newStations.items():
-        db.update_parameter_series_dates(series_id = series_id,
-                                         data_start_date = data_start_date)
-    # commit changes at end
-    db.commit()
+        entry = {}
+        entry['data_start_date'] = data_start_date.strftime("%Y-%m-%d %H:%M:%S+00")
+        timeseries_data = {}
+        timeseries_data['timeseries'] = entry
+        # NOLOG: We do not want a changelog entry for NRT data
+        r = requests.patch(TOAR_SERVICE_URL + f'timeseries/{series_id}?description=NOLOG',
+                                  data=json.dumps(timeseries_data),
+                                  headers=headers, verify=insecure_ssl)
 
 
 # download files for given urls
@@ -603,29 +675,24 @@ def download():
     return dest
 
 if __name__ == "__main__":
-    # connect to database
-    with psycopg2.connect(host="zam10116.zam.kfa-juelich.de", dbname='toardb_v2', user='toarcurator') as db:
-
-        #next three lines to be deleted
-        csv_file='uba_20201218.csv'
-        updatedStations, newStations = addData(db, csv_file)
-        exit()
-        #remove old file StationparameterMeta.csv from previous download
-        if os.path.exists('StationparameterMeta.csv'):
-            os.remove('StationparameterMeta.csv')
-        # download csv files from uba site
-        csv_file = download()
-        if csv_file != "":
-            if os.path.isfile(csv_file):
-                updatedStations, newStations = addData(db, csv_file)
-                # update new data range in parameter_series 
-                update_parameter_series_dates(db,updatedStations,newStations)
-        #at the end remove downloaded files
-        #don't remove StationparameterMeta.csv (it's needed for check_for_unique_parameters.sh)
-        try:
-            if os.path.exists(csv_file):
-                os.remove(csv_file)
-            if os.path.exists('StationMeta.csv'):
-                os.remove('StationMeta.csv')
-        except OSError:
-            pass
+    # direct login into database no longer needed (now using REST with Unity.idm)
+
+    #remove old file StationparameterMeta.csv from previous download
+    if os.path.exists('StationparameterMeta.csv'):
+        os.remove('StationparameterMeta.csv')
+    # download csv files from uba site
+    csv_file = download()
+    if csv_file != "":
+        if os.path.isfile(csv_file):
+            updatedStations, newStations = addData(csv_file)
+            # update new data range in parameter_series 
+            update_parameter_series_dates(updatedStations,newStations)
+    #at the end remove downloaded files
+    #don't remove StationparameterMeta.csv (it's needed for check_for_unique_parameters.sh)
+    try:
+        if os.path.exists(csv_file):
+            os.remove(csv_file)
+        if os.path.exists('StationMeta.csv'):
+            os.remove('StationMeta.csv')
+    except OSError:
+        pass
diff --git a/toardb/contacts/contacts.py b/toardb/contacts/contacts.py
index a2dd70a15ddd34cebc86fcbcffc2aa6419627b80..8a87a0543e646d75e4453a7b44751f01b4dd1696 100644
--- a/toardb/contacts/contacts.py
+++ b/toardb/contacts/contacts.py
@@ -86,3 +86,9 @@ def get_all_contacts(contact_id: int, db: Session = Depends(get_db)):
     contact = crud.get_contact(db, contact_id=contact_id)
     return contact
 
+#get a special entry of table contacts (given by its organisation (long) name)
+@router.get('/contacts/orga_name/{name}', response_model=schemas.Contact)
+def get_all_contacts(name: str, db: Session = Depends(get_db)):
+    contact = crud.get_contact_by_orga_name(db, name=name)
+    return contact
+
diff --git a/toardb/contacts/crud.py b/toardb/contacts/crud.py
index 737e3cd1bc9b78d0a208944d5a4684376f0028a1..1e3e2cd9d682c0a1ab413086773381d4fdfacf42 100644
--- a/toardb/contacts/crud.py
+++ b/toardb/contacts/crud.py
@@ -65,3 +65,7 @@ def get_all_contacts(db: Session, skip : int = 0, limit: int = None):
 
 def get_contact(db: Session, contact_id: int):
     return db.query(models.Contact).filter(models.Contact.id == contact_id).first()
+
+def get_contact_by_orga_name(db: Session, name: str):
+    db_object = db.query(models.Organisation).filter(models.Organisation.longname == name).first()
+    return db.query(models.Contact).filter(models.Contact.organisation_id == db_object.id).first()
diff --git a/toardb/contacts/schemas.py b/toardb/contacts/schemas.py
index da376c4db6a4d74d6d1e4a71cea7dc180dc37f7d..deb397a4d7795b8cc0d8f9e929d66340c221da27 100644
--- a/toardb/contacts/schemas.py
+++ b/toardb/contacts/schemas.py
@@ -69,10 +69,17 @@ class Person(PersonBase):
 
 # ======== for nested view =========
 
-class Contact(BaseModel):
+class ContactBase(BaseModel):
+    id: int = None
     person: Person
     organisation: Organisation
 
     class Config:
         orm_mode = True
 
+class Contact(ContactBase):
+    id: int
+
+    class Config:
+        orm_mode = True
+
diff --git a/toardb/stationmeta/crud.py b/toardb/stationmeta/crud.py
index 58e4c077727f85b6e177d207c86c03db7b05ca28..748c97ccc4c2ba06a01a9913355f22324fdb2f84 100644
--- a/toardb/stationmeta/crud.py
+++ b/toardb/stationmeta/crud.py
@@ -16,7 +16,7 @@ from fastapi.responses import JSONResponse
 from . import models
 from .models import StationmetaCore, StationmetaChangelog, stationmeta_core_stationmeta_roles_table, \
                     stationmeta_core_stationmeta_annotations_table, \
-                    CZ_enum, CV_enum, ST_enum, TA_enum
+                    CZ_enum, CV_enum, ST_enum, TA_enum, TC_enum
 from toardb.generic.models import RS_enum, RC_enum, AK_enum, CL_enum
 from .schemas import get_coordinates_from_geom, get_geom_from_coordinates, StationmetaCreate, StationmetaPatch, Coordinates
 from pydantic import ValidationError
@@ -96,7 +96,7 @@ def get_unique_stationmeta_annotation(db: Session, text: str, contributor_id: in
 
 
 # this is just to fake what would be done! This is a dry-run!
-def create_stationmeta(db: Session, engine: Engine, stationmeta: StationmetaCreate):
+def create_stationmeta_dryrun(db: Session, engine: Engine, stationmeta: StationmetaCreate):
     stationmeta_dict = stationmeta.dict()
     roles_data         = stationmeta_dict.pop('roles', None)
     annotations_data   = stationmeta_dict.pop('annotations', None)
@@ -133,7 +133,7 @@ def create_stationmeta(db: Session, engine: Engine, stationmeta: StationmetaCrea
     status_code=200
     return JSONResponse(status_code=status_code, content=message)
 
-def create_stationmeta_theOriginal(db: Session, engine: Engine, stationmeta: StationmetaCreate):
+def create_stationmeta(db: Session, engine: Engine, stationmeta: StationmetaCreate):
     stationmeta_dict = stationmeta.dict()
     roles_data         = stationmeta_dict.pop('roles', None)
     annotations_data   = stationmeta_dict.pop('annotations', None)
@@ -153,7 +153,7 @@ def create_stationmeta_theOriginal(db: Session, engine: Engine, stationmeta: Sta
     db_stationmeta.additional_metadata = str(db_stationmeta.additional_metadata).replace("'",'"')
     fake_conn = engine.raw_connection()
     fake_cur = fake_conn.cursor()
-    radius = 100
+    radius = 10
     db_cmd = f"select * from stationmeta_core where ST_DistanceSphere(stationmeta_core.coordinates, '{db_stationmeta.coordinates}') < {radius}"
     fake_cur.execute(db_cmd)
     records = fake_cur.fetchall()
@@ -168,10 +168,11 @@ def create_stationmeta_theOriginal(db: Session, engine: Engine, stationmeta: Sta
     else:
         db_stationmeta.type_of_environment = get_value_from_str(ST_enum,db_stationmeta.type_of_environment)
         db_stationmeta.type_of_area = get_value_from_str(TA_enum,db_stationmeta.type_of_area)
+        db_stationmeta.coordinate_validation_status = get_value_from_str(CV_enum,db_stationmeta.coordinate_validation_status)
         db.add(db_stationmeta)
         result = db.commit()
         db.refresh(db_stationmeta)
-        # get staionmeta_core_id
+        # get stationmeta_core_id
         stationmeta_core_id = db_stationmeta.id
         # store roles and update association table
         if roles_data:
@@ -234,6 +235,8 @@ def create_stationmeta_theOriginal(db: Session, engine: Engine, stationmeta: Sta
             db_global = models.StationmetaGlobal(**globalmeta_data)
             if db_global.climatic_zone:
                 db_global.climatic_zone = get_value_from_str(CZ_enum,db_global.climatic_zone)
+            if db_global.toar1_category:
+                db_global.toar1_category = get_value_from_str(TC_enum,db_global.toar1_category)
             db_global.station_id = stationmeta_core_id
             db.add(db_global)
             db.commit()
diff --git a/toardb/stationmeta/schemas.py b/toardb/stationmeta/schemas.py
index b7bb7ad776ba3a49fa7838f56967bba77db4d973..cee9f657973884c7f6f2153dddee8129be3c8d4e 100644
--- a/toardb/stationmeta/schemas.py
+++ b/toardb/stationmeta/schemas.py
@@ -381,7 +381,7 @@ class StationmetaGlobalBaseNested(BaseModel):
     etopo_min_alt_5km: float  = None
     etopo_relative_alt: float = None
     dominant_landcover_year2012: int = None
-    toar1_category: int = None
+    toar1_category: str = None
 
 
 class StationmetaGlobalNestedCreate(StationmetaGlobalBaseNested):
diff --git a/toardb/stationmeta/stationmeta.py b/toardb/stationmeta/stationmeta.py
index 6fe2110f0477494144356379dc02bf738b5df5a2..ec7f50df79341c096967c2eb52b67d06a947564b 100644
--- a/toardb/stationmeta/stationmeta.py
+++ b/toardb/stationmeta/stationmeta.py
@@ -67,11 +67,11 @@ def create_stationmeta_core(stationmeta: schemas.StationmetaCreate = Body(..., e
         if db_stationmeta_core:
             raise HTTPException(status_code=400, detail="Station already registered.")
 # the original post command!
-#   return crud.create_stationmeta(db=db, engine=engine, stationmeta=stationmeta)
+    return crud.create_stationmeta(db=db, engine=engine, stationmeta=stationmeta)
 # now the dry run!
-    response=crud.create_stationmeta(db=db, engine=engine, stationmeta=stationmeta)
-    msg = response.body.decode('utf-8')
-    raise HTTPException(status_code=200, detail=str(msg))
+#   response=crud.create_stationmeta(db=db, engine=engine, stationmeta=stationmeta)
+#   msg = response.body.decode('utf-8')
+#   raise HTTPException(status_code=200, detail=str(msg))
 
 @router.patch('/stationmeta/{station_code}', response_model=schemas.StationmetaPatch)
 def patch_stationmeta_core(station_code: str, description: str, stationmeta: schemas.StationmetaPatch = Body(..., embed = True), db: Session = Depends(get_db)):
diff --git a/toardb/timeseries/crud.py b/toardb/timeseries/crud.py
index c4868d57344225b527a4236f0b635ee230b20c20..569aacd8e7ddc915390e2d4082a25af289dd4234 100644
--- a/toardb/timeseries/crud.py
+++ b/toardb/timeseries/crud.py
@@ -16,7 +16,7 @@ from toardb.stationmeta.models import StationmetaCore
 from toardb.stationmeta.schemas import get_coordinates_from_geom, get_geom_from_coordinates
 from toardb.generic.models import RS_enum, RC_enum
 from toardb.contacts.crud import get_organisation_by_name, get_contact
-from .schemas import TimeseriesCreate, TimeseriesPatch
+from .schemas import TimeseriesCreate, TimeseriesPatch, TimeseriesRoleNoCreate
 from toardb.utils.utils import get_value_from_str, get_str_from_value
 
 
@@ -159,16 +159,17 @@ def patch_timeseries(db: Session, description: str, timeseries_id: int, timeseri
     timeseries_dict = timeseries.dict()
     roles_data = timeseries_dict.pop('roles', None)
     annotations_data = timeseries_dict.pop('annotations', None)
-    db_timeseries = models.Timeseries(**timeseries_dict)
-    # prepare changelog entry/entries
-    db_changelog = TimeseriesChangelog(description=description, timeseries_id=timeseries_id, author_id=1, type_of_change=1)
     db_obj = models.Timeseries(**timeseries_dict)
     db_timeseries = db.query(models.Timeseries).get(timeseries_id)
-    for k, v in timeseries_dict.items():
-        if v is not None:
-            db_changelog.old_value=str(getattr(db_timeseries,k))
-            setattr(db_timeseries,k,timeseries_dict[k])
-            db_changelog.new_value=str(getattr(db_timeseries,k))
+    # prepare changelog entry/entries
+    no_log = (description == 'NOLOG')
+    if not no_log:
+        db_changelog = TimeseriesChangelog(description=description, timeseries_id=timeseries_id, author_id=1, type_of_change=1)
+        for k, v in timeseries_dict.items():
+            if v is not None:
+                db_changelog.old_value=str(getattr(db_timeseries,k))
+                setattr(db_timeseries,k,timeseries_dict[k])
+                db_changelog.new_value=str(getattr(db_timeseries,k))
     # problems with coordinates...
     db_stationmeta = db.query(StationmetaCore).get(db_timeseries.station_id)
     tmp_coordinates = db_stationmeta.coordinates
@@ -178,7 +179,7 @@ def patch_timeseries(db: Session, description: str, timeseries_id: int, timeseri
     # store roles and update association table
     if roles_data:
         for r in roles_data:
-            db_role = models.TimeseriesRole(**r)
+            db_role = models.TimeseriesRoleNoCreate(**r)
             db_role.role = get_value_from_str(RC_enum,db_role.role)
             db_role.status = get_value_from_str(RS_enum,db_role.status)
             # check whether role is already present in database
@@ -208,8 +209,9 @@ def patch_timeseries(db: Session, description: str, timeseries_id: int, timeseri
             db.execute(insert(timeseries_timeseries_annotations_table).values(timeseries_id=timeseries_id, annotation_id=annotation_id))
             db.commit()
     # add patch to changelog table
-    db.add(db_changelog)
-    db.commit()
+    if not no_log:
+        db.add(db_changelog)
+        db.commit()
     # there's a mismatch with coordinates --> how to automatically switch back and forth?!
     db_stationmeta.coordinates = tmp_coordinates
     # there is a mismatch with additional_metadata
diff --git a/toardb/timeseries/schemas.py b/toardb/timeseries/schemas.py
index 6be64c7c473090e4ac3967763e5414197510ba8d..8cae379d4b01e8815a03b52b6a80a1e0b985834c 100644
--- a/toardb/timeseries/schemas.py
+++ b/toardb/timeseries/schemas.py
@@ -129,6 +129,9 @@ class TimeseriesRoleBase(BaseModel):
     def check_status(cls, v):
         return tuple(filter(lambda x: x.value == int(v), RS_enum))[0].string
 
+    class Config:
+        orm_mode = True
+
 
 class TimeseriesRoleCreate(TimeseriesRoleBase):
     pass
@@ -147,6 +150,17 @@ class TimeseriesRoleCreate(TimeseriesRoleBase):
         else:
             raise ValueError(f"role status not known: {v}")
 
+    class Config:
+        orm_mode = True
+
+# do not create a role while creating a timeseries!
+class TimeseriesRoleNoCreate(TimeseriesRoleCreate):
+    contact_id: int
+    contact: Contact = None
+
+    class Config:
+        orm_mode = True
+
 
 class TimeseriesRole(TimeseriesRoleBase):
     id: int
@@ -255,18 +269,21 @@ class TimeseriesPatch(BaseModel):
     date_added: dt.datetime = None
     date_modified: dt.datetime = None
     additional_metadata: Json = None
-    roles: List[TimeseriesRole] = None
-    annotations: List[TimeseriesAnnotation] = None
-    variable: Variable = None
-    station: StationmetaCoreBase = None
-    programme: TimeseriesProgramme = None
+#   roles: List[TimeseriesRole] = None
+#   annotations: List[TimeseriesAnnotation] = None
+#   variable: Variable = None
+#   station: StationmetaCoreBase = None
+#   programme: TimeseriesProgramme = None
+    station_id: int = None
+    variable_id: int = None
+    programme_id: int = None
 
     class Config:
         orm_mode = True
 
 
 class TimeseriesCreate(TimeseriesCoreCreate):
-    roles: List[TimeseriesRoleCreate] = None
+    roles: List[TimeseriesRoleNoCreate] = None
     annotations: List[TimeseriesAnnotation] = None
 
     class Config:
diff --git a/toardb/timeseries/timeseries.py b/toardb/timeseries/timeseries.py
index 01c63d7c1e2577ab3c771b86fbf98c87aced2d43..d8d569a920f7e43c58a083e46a020d0631570ffd 100644
--- a/toardb/timeseries/timeseries.py
+++ b/toardb/timeseries/timeseries.py
@@ -9,6 +9,7 @@ from . import crud, schemas
 from toardb.utils.database import ToarDbSession, get_db
 from toardb.generic.models import RC_enum, RS_enum
 from toardb.utils.utils import get_str_from_value
+from toardb.contacts.crud import get_contact
 
 router = APIRouter()
 
@@ -46,10 +47,17 @@ def get_timeseries_changelog(timeseries_id: int, db: Session = Depends(get_db)):
 #
 #
 
-@router.post('/timeseries/', response_model=schemas.Timeseries)
+#problems with Roles!!! prelimarily return TimeseriesPatch (instead of Timeseries!)
+@router.post('/timeseries/', response_model=schemas.TimeseriesPatch)
 def create_timeseries(timeseries: schemas.TimeseriesCreate = Body(..., embed = True), db: Session = Depends(get_db)):
+    #to be done! resource_provider!!!
+    resource_provider = ''
+    for role in timeseries.roles:
+        if role.role == 'ResourceProvider':
+            contact = get_contact(db, contact_id=role.contact_id)
+            resource_provider=contact.organisation.longname
     db_timeseries = crud.get_timeseries_by_unique_constraints(db, station_id=timeseries.station_id,
-                         variable_id=timeseries.variable_id, label=timeseries.label)
+                         variable_id=timeseries.variable_id, label=timeseries.label, resource_provider=resource_provider)
     if db_timeseries:
         raise HTTPException(status_code=400, detail="Timeseries already registered.")
     db_timeseries=crud.create_timeseries(db=db, timeseries=timeseries)