diff --git a/production_tests.sh b/production_tests.sh
index 5c0837658afffab10f5d2e3c1eb564ccad7db6dc..fa097acbeab8a1321ffc5d40c999ad363cbe00f0 100755
--- a/production_tests.sh
+++ b/production_tests.sh
@@ -30,11 +30,8 @@ curl -X POST -H "Content-Type:application/json" -d '{"stationmeta": {"codes":["t
 
 curl http://127.0.0.1:8000/timeseries/
 curl http://127.0.0.1:8000/timeseries/2
-# timeseries upload without nested fields
-curl -X POST -H "Content-Type:application/json" -d '{"timeseries": {"label": "CMA2", "order": 1, "access_rights": 0, "sampling_frequency": 0, "aggregation": 0, "data_start_date": "2003-09-07T15:30:00+02:00", "data_end_date": "2016-12-31T14:30:00+01:00", "measurement_method": "UV absorption", "sampling_height": 7.0, "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00", "station_id": 2, "variable_id": 7, "additional_metadata":"{}"}}' http://127.0.0.1:8000/timeseries/
-# nested upload with enum fields
-curl -X POST -H "Content-Type:application/json" -d '{"timeseries": {"label": "CMA5", "order": 1, "access_rights": 0, "sampling_frequency": 0, "aggregation": 0, "data_start_date": "2003-09-07T15:30:00+02:00", "data_end_date": "2016-12-31T14:30:00+01:00", "measurement_method": "UV absorption", "sampling_height": 7.0, "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00", "station_id": 2, "variable_id": 7, "additional_metadata":"{}", "roles": [{"role": 0, "person_id": 3, "status": 0},{"role": 1, "person_id": 3, "status": 0}]}}' http://127.0.0.1:8000/timeseries/
-# TBD: (nested) upload with human readable fields
-curl -X POST -H "Content-Type:application/json" -d '{"timeseries15:30:00+02:00", "data_end_date": "2016-12-31T14:30:00+01:00", "measurement_method": "UV absorption", "sampling_height": 7.0, "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00", "station_id": 2, "variable_id": 7, "additional_metadata":"{}", "roles": [{"role": "PointOfContact", "person": "s.schroeder@fz-juelich.de", "status": "active"},{"role": "Originator", "person": "Stefan.Feigenspan@uba.de", "status": "active"}]}}' http://127.0.0.1:8000/timeseries/
+# timeseries upload with (nested) human readable fields
+curl -X POST -H "Content-Type:application/json" -d '{"timeseries": {"label": "CMA5", "order": 1, "access_rights": "ByAttribution", "sampling_frequency": "Hourly", "aggregation": "Mean", "source": "Measurement", "data_start_date": "2003-09-07T15:30:00+02:00", "data_end_date": "2016-12-31T14:30:00+01:00", "measurement_method": "UV absorption", "sampling_height": 7.0, "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00", "station_id": 2, "variable_id": 7, "additional_metadata":"{}", "roles": [{"role": "PointOfContact", "person_id": 3, "status": "active"},{"role": "Originator", "person_id": 1, "status": "active"}]}}' http://127.0.0.1:8000/timeseries/
+# TBD: use person_email (instead of person_id) to identify persons, f. ex. "person_email": "s.schroeder@fz-juelich.de"
 
 curl -X POST -H 'Content-Type: multipart/form-data; charset=utf-8; boundary=__X_PAW_BOUNDARY__' -F "file=@o3_CO002_2012_2017_v1-0.dat" "http://127.0.0.1:8000/data/"
diff --git a/toardb/test_base.py b/toardb/test_base.py
index ff85b1aae866c86493552fd7b6897a91c126c2e7..9046fbcd2fff54d41e49f307448f37d80a766b07 100644
--- a/toardb/test_base.py
+++ b/toardb/test_base.py
@@ -61,6 +61,11 @@ def test_db_session():
     # Drop all data after each test
     for tbl in reversed(Base.metadata.sorted_tables):
         _db_conn.execute(tbl.delete())
+    # all tables from "toar_controlled_vocabulary" got lost by the above command!
+    fake_conn = _db_conn.raw_connection()
+    fake_cur = fake_conn.cursor()
+    fake_cur.execute("DROP EXTENSION toar_controlled_vocabulary")
+    fake_conn.commit()
     # put back the connection to the connection pool
     session.close()
 
diff --git a/toardb/timeseries/crud.py b/toardb/timeseries/crud.py
index 1d8324d777702c5a9d8ec27ac4ff5f3dc2a6ba8c..ddd8dff8e940b46f7171d1bdcf514f45e9845f1f 100644
--- a/toardb/timeseries/crud.py
+++ b/toardb/timeseries/crud.py
@@ -9,10 +9,11 @@ from sqlalchemy.orm import Session
 from fastapi.responses import JSONResponse
 from . import models
 from .models import timeseries_timeseries_roles_table, \
-                    timeseries_timeseries_annotations_table, timeseries_timeseries_programmes_table
+                    timeseries_timeseries_annotations_table, timeseries_timeseries_programmes_table, \
+                    DA_enum, SF_enum, AT_enum, DS_enum
 from toardb.generic.models import RS_enum, RC_enum
 from .schemas import TimeseriesCreate
-from toardb.utils.utils import get_value_from_str
+from toardb.utils.utils import get_value_from_str, get_str_from_value
 
 
 def get_timeseries(db: Session, timeseries_id: int):
@@ -20,6 +21,10 @@ def get_timeseries(db: Session, timeseries_id: int):
     # there is a mismatch with additional_metadata
     if db_object:
         db_object.additional_metadata = str(db_object.additional_metadata)
+        db_object.access_rights = get_str_from_value(db,DA_enum,db_object.access_rights)
+        db_object.sampling_frequency = get_str_from_value(db,SF_enum,db_object.sampling_frequency)
+        db_object.aggregation = get_str_from_value(db,AT_enum,db_object.aggregation)
+        db_object.source = get_str_from_value(db,DS_enum,db_object.source)
     return db_object
 
 
@@ -28,6 +33,10 @@ def get_all_timeseries(db: Session, skip : int = 0, limit: int = None):
     for db_object in db_objects:
         # there is a mismatch with additional_metadata
         db_object.additional_metadata = str(db_object.additional_metadata)
+        db_object.access_rights = get_str_from_value(db,DA_enum,db_object.access_rights)
+        db_object.sampling_frequency = get_str_from_value(db,SF_enum,db_object.sampling_frequency)
+        db_object.aggregation = get_str_from_value(db,AT_enum,db_object.aggregation)
+        db_object.source = get_str_from_value(db,DS_enum,db_object.source)
     return db_objects
 
 
@@ -39,6 +48,10 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
     # there is a mismatch with additional_metadata
     if db_object:
         db_object.additional_metadata = str(db_object.additional_metadata)
+        db_object.access_rights = get_str_from_value(db,DA_enum,db_object.access_rights)
+        db_object.sampling_frequency = get_str_from_value(db,SF_enum,db_object.sampling_frequency)
+        db_object.aggregation = get_str_from_value(db,AT_enum,db_object.aggregation)
+        db_object.source = get_str_from_value(db,DS_enum,db_object.source)
     return db_object
 
 
@@ -81,6 +94,10 @@ def create_timeseries(db: Session, timeseries: TimeseriesCreate):
     annotations_data = timeseries_dict.pop('annotations', None)
     programmes_data = timeseries_dict.pop('programmes', None)
     db_timeseries = models.Timeseries(**timeseries_dict)
+    db_timeseries.access_rights = get_value_from_str(db,DA_enum,db_timeseries.access_rights)
+    db_timeseries.sampling_frequency = get_value_from_str(db,SF_enum,db_timeseries.sampling_frequency)
+    db_timeseries.aggregation = get_value_from_str(db,AT_enum,db_timeseries.aggregation)
+    db_timeseries.source = get_value_from_str(db,DS_enum,db_timeseries.source)
     db.add(db_timeseries)
     result = db.commit()
     db.refresh(db_timeseries)
@@ -137,4 +154,8 @@ def create_timeseries(db: Session, timeseries: TimeseriesCreate):
     # in upload command, we have now: "additional_metadata": "{}"
     # but return from this method gives (=database): "additional_metadata": {}
     db_timeseries.additional_metadata = str(db_timeseries.additional_metadata)
+    db_timeseries.access_rights = get_str_from_value(db,DA_enum,db_timeseries.access_rights)
+    db_timeseries.sampling_frequency = get_str_from_value(db,SF_enum,db_timeseries.sampling_frequency)
+    db_timeseries.aggregation = get_str_from_value(db,AT_enum,db_timeseries.aggregation)
+    db_timeseries.source = get_str_from_value(db,DS_enum,db_timeseries.source)
     return db_timeseries
diff --git a/toardb/timeseries/models_core.py b/toardb/timeseries/models_core.py
index eb1fdca582cff018b12d794728d0e592013907a7..4ad8e1243ac9f971a3c636c98abe38b6ee29d6b5 100644
--- a/toardb/timeseries/models_core.py
+++ b/toardb/timeseries/models_core.py
@@ -31,6 +31,8 @@ class Timeseries(Base):
     +---------------------+--------------------------+-----------+----------+----------------------------------------+
     | aggregation         | integer                  |           | not null |                                        |
     +---------------------+--------------------------+-----------+----------+----------------------------------------+
+    | source              | integer                  |           | not null | 1                                      |
+    +---------------------+--------------------------+-----------+----------+----------------------------------------+
     | data_start_date     | timestamp with time zone |           | not null |                                        |
     +---------------------+--------------------------+-----------+----------+----------------------------------------+
     | data_end_date       | timestamp with time zone |           | not null |                                        |
@@ -84,6 +86,7 @@ class Timeseries(Base):
     access_rights = Column(Integer, nullable=False)
     sampling_frequency = Column(Integer, nullable=False)
     aggregation = Column(Integer, nullable=False)
+    source = Column(Integer, nullable=False, server_default=text("1"))
     data_start_date = Column(DateTime(True), nullable=False)
     data_end_date = Column(DateTime(True), nullable=False)
     measurement_method = Column(String(128), nullable=False)
diff --git a/toardb/timeseries/schemas.py b/toardb/timeseries/schemas.py
index 75fd67149a469813080c164a415acc97756697ab..40db3f88bc4f496b01c6d9487919c885ff8f62cd 100644
--- a/toardb/timeseries/schemas.py
+++ b/toardb/timeseries/schemas.py
@@ -15,9 +15,10 @@ class TimeseriesCoreBase(BaseModel):
     id: int = None
     label: str
     order: int
-    access_rights: int
-    sampling_frequency: int
-    aggregation: int
+    access_rights: str
+    sampling_frequency: str
+    aggregation: str
+    source: str
     data_start_date: dt.datetime
     data_end_date: dt.datetime
     measurement_method: str
diff --git a/toardb/timeseries/test_timeseries.py b/toardb/timeseries/test_timeseries.py
index de006943d659ee6f24a99b2a667f0d5809908889..82e7d250c3515e14ad7f69b142c89fc399e56cc3 100644
--- a/toardb/timeseries/test_timeseries.py
+++ b/toardb/timeseries/test_timeseries.py
@@ -28,10 +28,13 @@ class TestApps:
     @pytest.fixture(autouse=True)
     def setup_db_data(self, db):
         _db_conn = get_test_engine()
-        print(_db_conn)
         # id_seq will not be reset automatically between tests!
         fake_conn = _db_conn.raw_connection()
         fake_cur = fake_conn.cursor()
+        # all tables of "toar_controlled_vocabulary get lost inbetween tests
+        # ==> create the extension again!
+        fake_cur.execute("CREATE EXTENSION IF NOT EXISTS toar_controlled_vocabulary")
+        fake_conn.commit()
         fake_cur.execute("ALTER SEQUENCE auth_user_id_seq RESTART WITH 1")
         fake_conn.commit()
         fake_cur.execute("ALTER SEQUENCE variables_id_seq RESTART WITH 1")
@@ -121,13 +124,11 @@ class TestApps:
 
 
     def test_get_timeseries(self, client, db):
-        print("==============Client: ================", client.base_url)
         response = client.get("/timeseries/")
         expected_status_code = 200
         assert response.status_code == expected_status_code
-        print("I got: ", response.json())
-        expected_resp = [{'id': 1, 'label': 'CMA', 'order': 1, 'access_rights': 0,
-                          'sampling_frequency': 0, 'aggregation': 0,
+        expected_resp = [{'id': 1, 'label': 'CMA', 'order': 1, 'access_rights': 'ByAttribution',
+                          'sampling_frequency': 'Hourly', 'aggregation': 'Mean', 'source': 'Measurement',
                           'data_start_date': '2003-09-07T15:30:00+02:00', 'data_end_date': '2016-12-31T14:30:00+01:00',
                           'measurement_method': 'UV absorption', 'sampling_height': 7.0,
                           'date_added': '2020-05-15T15:30:00+02:00', 'date_modified': '2020-05-16T09:30:00+02:00',
@@ -148,8 +149,8 @@ class TestApps:
         response = client.get("/timeseries/1")
         expected_status_code = 200
         assert response.status_code == expected_status_code
-        expected_resp = {'id': 1, 'label': 'CMA', 'order': 1, 'access_rights': 0,
-                         'sampling_frequency': 0, 'aggregation': 0,
+        expected_resp = {'id': 1, 'label': 'CMA', 'order': 1, 'access_rights': 'ByAttribution',
+                         'sampling_frequency': 'Hourly', 'aggregation': 'Mean', 'source': 'Measurement',
                          'data_start_date': '2003-09-07T15:30:00+02:00', 'data_end_date': '2016-12-31T14:30:00+01:00',
                          'measurement_method': 'UV absorption', 'sampling_height': 7.0,
                          'date_added': '2020-05-15T15:30:00+02:00', 'date_modified': '2020-05-16T09:30:00+02:00',
@@ -178,8 +179,8 @@ class TestApps:
     def test_insert_new(self, client, db):
         response = client.post("/timeseries/",
                 json={"timeseries":
-                          {"label": "CMA2", "order": 1, "access_rights": 0,
-                           "sampling_frequency": 0, "aggregation": 0,
+                          {"label": "CMA2", "order": 1, "access_rights": "ByAttribution",
+                           "sampling_frequency": "Hourly", "aggregation": "Mean", "source": "Measurement",
                            "data_start_date": "2003-09-07T15:30:00+02:00",
                            "data_end_date": "2016-12-31T14:30:00+01:00",
                            "measurement_method": "UV absorption", "sampling_height": 7.0,
@@ -190,8 +191,8 @@ class TestApps:
                    )
         expected_status_code = 200
         assert response.status_code == expected_status_code
-        expected_resp = {'id': 2, 'label': 'CMA2', 'order': 1, 'access_rights': 0,
-                         'sampling_frequency': 0, 'aggregation': 0,
+        expected_resp = {'id': 2, 'label': 'CMA2', 'order': 1, 'access_rights': 'ByAttribution',
+                         'sampling_frequency': 'Hourly', 'aggregation': 'Mean', 'source': 'Measurement',
                          'data_start_date': '2003-09-07T15:30:00+02:00', 'data_end_date': '2016-12-31T14:30:00+01:00',
                          'measurement_method': 'UV absorption', 'sampling_height': 7.0,
                          'date_added': '2020-05-15T15:30:00+02:00', 'date_modified': '2020-05-16T09:30:00+02:00',
@@ -203,27 +204,29 @@ class TestApps:
     def test_insert_new_with_roles(self, client, db):
         response = client.post("/timeseries/",
                 json={"timeseries":
-                          {"label": "CMA2", "order": 1, "access_rights": 0,
-                           "sampling_frequency": 0, "aggregation": 0,
+                          {"label": "CMA2", "order": 1, "access_rights": "ByAttribution",
+                           "sampling_frequency": "Hourly", "aggregation": "Mean", "source": "Measurement",
                            "data_start_date": "2003-09-07T15:30:00+02:00",
                            "data_end_date": "2016-12-31T14:30:00+01:00",
                            "measurement_method": "UV absorption", "sampling_height": 7.0,
                            "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00",
                            "station_id": 2, "variable_id": 7,
                            "additional_metadata":"{}",
-                           "roles": [{"role": 0, "person_id": 3, "status": 0},{"role": 1, "person_id": 3, "status": 0}]}
+                           "roles": [{"role": "PointOfContact", "person_id": 3, "status": "active"},
+                                     {"role": "Originator", "person_id": 1, "status": "active"}]
+                          }
                      }
                    )
         expected_status_code = 200
         assert response.status_code == expected_status_code
-        expected_resp = {'id': 2, 'label': 'CMA2', 'order': 1, 'access_rights': 0,
-                         'sampling_frequency': 0, 'aggregation': 0,
+        expected_resp = {'id': 2, 'label': 'CMA2', 'order': 1, 'access_rights': 'ByAttribution',
+                         'sampling_frequency': 'Hourly', 'aggregation': 'Mean', 'source': 'Measurement',
                          'data_start_date': '2003-09-07T15:30:00+02:00', 'data_end_date': '2016-12-31T14:30:00+01:00',
                          'measurement_method': 'UV absorption', 'sampling_height': 7.0,
                          'date_added': '2020-05-15T15:30:00+02:00', 'date_modified': '2020-05-16T09:30:00+02:00',
                          'station_id': 2, 'variable_id': 7,
                          'additional_metadata':{},
-                         'roles': [{'id': 1, 'person_id': 3, 'role': 0, 'status': 0}, {'id': 2, 'person_id': 3, 'role': 1, 'status': 0}],
+                         'roles': [{'id': 1, 'person_id': 3, 'role': '0', 'status': '0'}, {'id': 2, 'person_id': 1, 'role': '2', 'status': '0'}],
                          'annotations': [], 'programmes': []}
         assert response.json() == expected_resp