diff --git a/production_tests.sh b/production_tests.sh
index e2934438b24c98a41da54e90d6f0e45898bacbfc..8c4c8711d90ee90dc1f1a3b6cdb04d9dd1d59d9d 100755
--- a/production_tests.sh
+++ b/production_tests.sh
@@ -20,8 +20,15 @@ curl "http://127.0.0.1:8000/contacts/persons/Sabine%20Schr%C3%B6der"
 curl -X POST -H "Content-Type:application/json" -d '{"person": {"name": "Martin Schultz", "email": "m.schultz@fz-juelich.de", "phone": "+49-2461-61-96870", "isprivate": true}}' http://127.0.0.1:8000/contacts/persons/
 
 curl http://127.0.0.1:8000/stationmeta_core/
+curl http://127.0.0.1:8000/stationmeta/
 curl http://127.0.0.1:8000/stationmeta_core/China11
-curl -X POST -H "Content-Type:application/json" -d '{"stationmeta_core": {"codes":["ttt3","ttt4"],"name":"Test_China","coordinates":{"lat":36.256,"lng":17.106,"alt":1534.0},"country":"China","state":"Shandong Sheng","coordinate_validation_status":0,"coordinate_validation_date":"2020-03-11T12:22:18.047974+01:00","type_of_environment":0,"type_of_area":0,"category":"","timezone":"", "coordinate_validator_id": 1, "additional_metadata":"{}"}}' "http://127.0.0.1:8000/stationmeta_core/"
+curl http://127.0.0.1:8000/stationmeta/China11
+# station upload without nested fields
+curl -X POST -H "Content-Type:application/json" -d '{"stationmeta": {"codes":["ttt3","ttt4"],"name":"Test_China","coordinates":{"lat":36.256,"lng":17.106,"alt":1534.0},"country":"China","state":"Shandong Sheng","coordinate_validation_status":0,"coordinate_validation_date":"2020-03-11T12:22:18.047974+01:00","type_of_environment":0,"type_of_area":0,"category":"","timezone":"", "coordinate_validator_id": 1, "additional_metadata":"{}"}}' "http://127.0.0.1:8000/stationmeta/"
+# nested upload with enum fields
+curl -X POST -H "Content-Type:application/json" -d '{"stationmeta": {"codes":["ttt3","ttt4"],"name":"Test_China","coordinates":{"lat":36.256,"lng":17.106,"alt":1534.0},"country":"China","state":"Shandong Sheng","coordinate_validation_status":0,"coordinate_validation_date":"2020-03-11T12:22:18.047974+01:00","type_of_environment":0,"type_of_area":0,"category":"","timezone":"", "coordinate_validator_id": 1, "additional_metadata":"{}", roles": [{"role": 0, "person_id": 3, "status": 0},{"role": 1, "person_id": 3, "status": 0}]}}' "http://127.0.0.1:8000/stationmeta/"
+# TBD: (nested) upload with human readable fields
+curl -X POST -H "Content-Type:application/json" -d '{"stationmeta": {"codes":["ttt3","ttt4"],"name":"Test_China","coordinates":{"lat":36.256,"lng":17.106,"alt":1534.0},"country":"China","state":"Shandong Sheng","coordinate_validation_status":0,"coordinate_validation_date":"2020-03-11T12:22:18.047974+01:00","type_of_environment":0,"type_of_area":0,"category":"","timezone":"", "coordinate_validator_id": 1, "additional_metadata":"{}", "roles": [{"role": "PointOfContact", "person": "s.schroeder@fz-juelich.de", "status": "active"},{"role": "Originator", "person": "Stefan.Feigenspan@uba.de", "status": "active"}]}}' "http://127.0.0.1:8000/stationmeta/"
 
 curl http://127.0.0.1:8000/timeseries/
 curl http://127.0.0.1:8000/timeseries/2
@@ -29,7 +36,7 @@ curl http://127.0.0.1:8000/timeseries/2
 curl -X POST -H "Content-Type:application/json" -d '{"timeseries": {"label": "CMA2", "order": 1, "access_rights": 0, "sampling_frequency": 0, "aggregation": 0, "data_start_date": "2003-09-07T15:30:00+02:00", "data_end_date": "2016-12-31T14:30:00+01:00", "measurement_method": "UV absorption", "sampling_height": 7.0, "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00", "station_id": 2, "variable_id": 7, "additional_metadata":"{}"}}' http://127.0.0.1:8000/timeseries/
 # nested upload with enum fields
 curl -X POST -H "Content-Type:application/json" -d '{"timeseries": {"label": "CMA5", "order": 1, "access_rights": 0, "sampling_frequency": 0, "aggregation": 0, "data_start_date": "2003-09-07T15:30:00+02:00", "data_end_date": "2016-12-31T14:30:00+01:00", "measurement_method": "UV absorption", "sampling_height": 7.0, "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00", "station_id": 2, "variable_id": 7, "additional_metadata":"{}", "roles": [{"role": 0, "person_id": 3, "status": 0},{"role": 1, "person_id": 3, "status": 0}]}}' http://127.0.0.1:8000/timeseries/
-# nested upload with human readable fields
+# TBD: (nested) upload with human readable fields
 curl -X POST -H "Content-Type:application/json" -d '{"timeseries15:30:00+02:00", "data_end_date": "2016-12-31T14:30:00+01:00", "measurement_method": "UV absorption", "sampling_height": 7.0, "date_added": "2020-05-15T15:30:00+02:00", "date_modified": "2020-05-16T09:30:00+02:00", "station_id": 2, "variable_id": 7, "additional_metadata":"{}", "roles": [{"role": "PointOfContact", "person": "s.schroeder@fz-juelich.de", "status": "active"},{"role": "Originator", "person": "Stefan.Feigenspan@uba.de", "status": "active"}]}}' http://127.0.0.1:8000/timeseries/
 
 curl -X POST -H 'Content-Type: multipart/form-data; charset=utf-8; boundary=__X_PAW_BOUNDARY__' -F "file=@o3_CO002_2012_2017_v1-0.dat" "http://127.0.0.1:8000/data/"
diff --git a/toardb/stationmeta/crud.py b/toardb/stationmeta/crud.py
index b6035fd381ccb583362c5c4f5c94135beeb2cda9..7991ca20b8497cf83085960f85bc8de7f9848ef3 100644
--- a/toardb/stationmeta/crud.py
+++ b/toardb/stationmeta/crud.py
@@ -4,7 +4,7 @@ Create, Read, Update, Delete functionality
 
 """
 
-from sqlalchemy import cast, Text
+from sqlalchemy import cast, Text, insert
 from typing import List
 from geoalchemy2.types import Geometry
 from sqlalchemy.orm import Session
@@ -13,7 +13,8 @@ from fastapi import File, UploadFile
 from fastapi.responses import JSONResponse
 from fastapi.encoders import jsonable_encoder
 from . import models
-from .schemas import get_coordinates_from_geom, get_geom_from_coordinates, StationmetaCoreCreate, Coordinates
+from .models import stationmeta_core_stationmeta_roles_table, stationmeta_core_stationmeta_annotations_table
+from .schemas import get_coordinates_from_geom, get_geom_from_coordinates, StationmetaCreate, Coordinates
 from pydantic import ValidationError
 
 
@@ -26,6 +27,15 @@ def get_stationmeta_core(db: Session, station_code: str):
     return db_object
 
 
+def get_stationmeta(db: Session, station_code: str):
+    db_object = db.query(models.StationmetaCore).filter(cast(models.StationmetaCore.codes,Text).contains(station_code.strip())).first()
+    # there is a mismatch with coordinates and additional_metadata
+    if db_object:
+        db_object.coordinates = get_coordinates_from_geom(db_object.coordinates)
+        db_object.additional_metadata = str(db_object.additional_metadata)
+    return db_object
+
+
 def get_all_stationmeta_core(db: Session, skip : int = 0, limit: int = None):
     db_objects = db.query(models.StationmetaCore).offset(skip).limit(limit).all()
     for db_object in db_objects:
@@ -44,19 +54,116 @@ def get_all_stationmeta(db: Session, skip : int = 0, limit: int = None):
     return db_objects
 
 
-def create_stationmeta_core(db: Session, stationmeta_core: StationmetaCoreCreate):
-    db_stationmeta_core = models.StationmetaCore(**stationmeta_core.dict())
+# is this internal, or should this also go to public REST api?
+def get_unique_stationmeta_role(db: Session, role: int, person_id: int, status: int):
+    db_object = db.query(models.StationmetaRole).filter(models.StationmetaRole.role == role) \
+                                                .filter(models.StationmetaRole.person_id == person_id) \
+                                                .filter(models.StationmetaRole.status == status) \
+                                                .first()
+    return db_object
+
+
+# is this internal, or should this also go to public REST api?
+def get_unique_stationmeta_annotation(db: Session, text: str, contributor_id: int):
+    db_object = db.query(models.StationmetaAnnotation).filter(models.StationmetaAnnotation.text == text) \
+                                                      .filter(models.StationmetaAnnotation.contributor_id == contributor_id) \
+                                                      .first()
+    return db_object
+
+
+def create_stationmeta(db: Session, stationmeta: StationmetaCreate):
+    stationmeta_dict = stationmeta.dict()
+    roles_data         = stationmeta_dict.pop('roles', None)
+    annotations_data   = stationmeta_dict.pop('annotations', None)
+    aux_images_data    = stationmeta_dict.pop('aux_images', None)
+    aux_docs_data      = stationmeta_dict.pop('aux_docs', None)
+    aux_urls_data      = stationmeta_dict.pop('aux_urls', None)
+    globalmeta_data    = stationmeta_dict.pop('globalmeta', None)
+    globalservice_data = stationmeta_dict.pop('globalservice', None)
+    db_stationmeta = models.StationmetaCore(**stationmeta_dict)
     # there's a mismatch with coordinates --> how to automatically switch back and forth?!
-    tmp_coordinates = db_stationmeta_core.coordinates
-    db_stationmeta_core.coordinates = get_geom_from_coordinates(Coordinates(**db_stationmeta_core.coordinates))
+    tmp_coordinates = db_stationmeta.coordinates
+    db_stationmeta.coordinates = get_geom_from_coordinates(Coordinates(**db_stationmeta.coordinates))
     # there's also a mismatch with additional_metadata --> BUT: this should not be switched back!
     # in upload command, we have now: "additional_metadata": "{}"
     # but return from this method gives: "additional_metadata": {}
     # ==> there is a mismatch between model(JSONB) and schema(JSON)
-    db_stationmeta_core.additional_metadata = str(db_stationmeta_core.additional_metadata)
-    db.add(db_stationmeta_core)
+    db_stationmeta.additional_metadata = str(db_stationmeta.additional_metadata)
+    db.add(db_stationmeta)
     result = db.commit()
-    db.refresh(db_stationmeta_core)
+    db.refresh(db_stationmeta)
+    # get staionmeta_core_id
+    stationmeta_core_id = db_stationmeta.id
+    # store roles and update association table
+    if roles_data:
+        for r in roles_data:
+            db_role = models.StationmetaRole(**r)
+            # check whether role is already present in database
+            db_object = get_unique_stationmeta_role(db, db_role.role, db_role.person_id, db_role.status)
+            if db_object:
+                role_id = db_object.id
+            else:
+                db.add(db_role)
+                db.commit()
+                db.refresh(db_role)
+                role_id = db_role.id
+            db.execute(insert(stationmeta_core_stationmeta_roles_table).values(station_id=stationmeta_core_id, role_id=role_id))
+            db.commit()
+    # store annotations and update association table
+    if annotations_data:
+        for a in annotations_data:
+            db_annotation = models.StationmetaAnnotation(**a)
+            # check whether annotation is already present in database
+            db_object = get_unique_stationmeta_annotation(db, db_annotation.text, db_annotation.contributor_id)
+            if db_object:
+                annotation_id = db_object.id
+            else:
+                db.add(db_annotation)
+                db.commit()
+                db.refresh(db_annotation)
+                annotation_id = db_annotation.id
+            db.execute(insert(stationmeta_core_stationmeta_annotations_table).values(station_id=stationmeta_core_id, annotation_id=annotation_id))
+            db.commit()
+    # store aux_images
+    if aux_images_data:
+        for i in aux_images_data:
+            db_aux_image = models.StationmetaAuxImage(**i)
+            db_aux_image.station_id = stationmeta_core_id
+            db.add(db_aux_image)
+            db.commit()
+            db.refresh(db_aux_image)
+    # store aux_docs
+    if aux_docs_data:
+        for d in aux_docs_data:
+            db_aux_doc = models.StationmetaAuxDoc(**d)
+            db_aux_doc.station_id = stationmeta_core_id
+            db.add(db_aux_doc)
+            db.commit()
+            db.refresh(db_aux_doc)
+    # store aux_urls
+    if aux_urls_data:
+        for u in aux_urls_data:
+            db_aux_url = models.StationmetaAuxUrl(**u)
+            db_aux_url.station_id = stationmeta_core_id
+            db.add(db_aux_url)
+            db.commit()
+            db.refresh(db_aux_url)
+    # store globalmeta
+    if globalmeta_data:
+        for g in globalmeta_data:
+            db_global = models.StationmetaGlobal(**g)
+            db_global.station_id = stationmeta_core_id
+            db.add(db_global)
+            db.commit()
+            db.refresh(db_global)
+    # store globalservice
+    if globalservice_data:
+        for s in globalservice_data:
+            db_globalservice = models.StationmetaGlobalService(**s)
+            db_globalservice.station_id = stationmeta_core_id
+            db.add(db_globalservice)
+            db.commit()
+            db.refresh(db_globalservice)
     # there's a mismatch with coordinates --> how to automatically switch back and forth?!
-    db_stationmeta_core.coordinates = tmp_coordinates
-    return db_stationmeta_core
+    db_stationmeta.coordinates = tmp_coordinates
+    return db_stationmeta
diff --git a/toardb/stationmeta/models.py b/toardb/stationmeta/models.py
index debb835d0b35857740194fd667e28abe493a3055..7a674c37920dab8e18df1c4b3600be4336e5315c 100644
--- a/toardb/stationmeta/models.py
+++ b/toardb/stationmeta/models.py
@@ -1,3 +1,4 @@
+from sqlalchemy import Table, Column, Integer, String
 from sqlalchemy.ext.declarative import declarative_base
 from .models_core import StationmetaCore
 from .models_global import StationmetaGlobal
@@ -7,3 +8,22 @@ from .models_annotation import StationmetaAnnotation, stationmeta_core_stationme
 from .models_aux import StationmetaAuxDoc, StationmetaAuxImage, StationmetaAuxUrl
 
 from toardb.base import Base
+
+# controlled vocabulary
+
+# Station Climatic Zone
+CZ_enum = Table("cz_vocabulary",
+    Base.metadata,
+    Column("enum_val", Integer, primary_key=True),
+    Column("enum_str", String),
+    Column("enum_display_str", String)
+)
+
+# Station Coordinate Validity
+CV_enum = Table("cv_vocabulary",
+    Base.metadata,
+    Column("enum_val", Integer, primary_key=True),
+    Column("enum_str", String),
+    Column("enum_display_str", String)
+)
+
diff --git a/toardb/stationmeta/schemas.py b/toardb/stationmeta/schemas.py
index ea08658db4fca632360124e58aeb702da8a58810..b4a984f4a1f104e992b3421173e9a2ad3dd358d7 100644
--- a/toardb/stationmeta/schemas.py
+++ b/toardb/stationmeta/schemas.py
@@ -70,7 +70,6 @@ class StationmetaAnnotationBase(BaseModel):
     date_added: dt.datetime
     approved: bool
     contributor_id: int
-    station_id: int
 
 
 class StationmetaAnnotationCreate(StationmetaAnnotationBase):
@@ -207,7 +206,6 @@ class StationmetaRoleBase(BaseModel):
     role: int
     status: int
     person_id: int
-    station_id: int
 
 
 class StationmetaRoleCreate(StationmetaRoleBase):
diff --git a/toardb/stationmeta/stationmeta.py b/toardb/stationmeta/stationmeta.py
index 823d07d61364352d5a549903cdc354f7b8cc92a0..b831d8a4d90daa395f9909b415a862d96d86cf15 100644
--- a/toardb/stationmeta/stationmeta.py
+++ b/toardb/stationmeta/stationmeta.py
@@ -34,20 +34,29 @@ def get_stationmeta_core(station_code: str, db: Session = Depends(get_db)):
         raise HTTPException(status_code=404, detail="Data not found.")
     return db_stationmeta_core
 
+# the same as above, but nested view
+#get all core metadata of one station
+@router.get('/stationmeta/{station_code}', response_model=schemas.Stationmeta)
+def get_stationmeta(station_code: str, db: Session = Depends(get_db)):
+    db_stationmeta = crud.get_stationmeta(db, station_code=station_code)
+    if db_stationmeta is None:
+        raise HTTPException(status_code=404, detail="Data not found.")
+    return db_stationmeta
+
 #some more gets to be tested:
 # - get stationmeta_global
 # - get stationmeta_aux
 # - ...
 
-@router.post('/stationmeta_core/', response_model=schemas.StationmetaCore)
+@router.post('/stationmeta/', response_model=schemas.Stationmeta)
 #curl -X POST -H "Content-Type:application/json" -d '{"stationmeta_core": {"codes":["ttt3","ttt4"],"name":"Test_China","coordinates":{"lat":36.256,"lng":17.106,"alt":1534.0},"country":"China","state":"Shandong Sheng","coordinate_validation_status":0,"coordinate_validation_date":"2020-03-11T12:22:18.047974+01:00","type_of_environment":0,"type_of_area":0,"category":"","timezone":"", "coordinate_validator_id": 1, "additional_metadata":"{}"}}' "http://127.0.0.1:8000/stationmeta_core/"
 # The following command was not working as long as the upload via Body was defined.
 # See bug report: https://github.com/tiangolo/fastapi/issues/300
 # (Although this seems to be fixed in the meantime, it is not working in my FastAPI version.)
 #def create_stationmeta_core(stationmeta_core: schemas.StationmetaCoreCreate, db: Session = Depends(get_db)):
-def create_stationmeta_core(stationmeta_core: schemas.StationmetaCoreCreate = Body(..., embed = True), db: Session = Depends(get_db)):
+def create_stationmeta_core(stationmeta: schemas.StationmetaCreate = Body(..., embed = True), db: Session = Depends(get_db)):
     # for the  moment, just check the first code of station's codes
-    db_stationmeta_core= crud.get_stationmeta_core(db, station_code=stationmeta_core.codes[0])
+    db_stationmeta_core= crud.get_stationmeta_core(db, station_code=stationmeta.codes[0])
     if db_stationmeta_core:
         raise HTTPException(status_code=400, detail="Station already registered.")
-    return crud.create_stationmeta_core(db=db, stationmeta_core=stationmeta_core)
+    return crud.create_stationmeta(db=db, stationmeta=stationmeta)
diff --git a/toardb/timeseries/crud.py b/toardb/timeseries/crud.py
index d0f4364b23b4c73620e1b089bc10350185857065..d8fd17e7ec3467339a9d20038f434a083d60851f 100644
--- a/toardb/timeseries/crud.py
+++ b/toardb/timeseries/crud.py
@@ -8,7 +8,8 @@ from sqlalchemy import insert
 from sqlalchemy.orm import Session
 from fastapi.responses import JSONResponse
 from . import models
-from .models import RS_enum, RC_enum, timeseries_timeseries_roles_table
+from .models import RS_enum, RC_enum, timeseries_timeseries_roles_table, \
+                    timeseries_timeseries_annotations_table, timeseries_timeseries_programmes_table
 from .schemas import TimeseriesCreate
 
 
@@ -40,6 +41,7 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_
 
 
 # is this internal, or should this also go to public REST api?
+# do we need this at all?
 def get_role_ids_of_timeseries(db: Session, timeseries_id: int):
     db_objects = db.query(models.TimeseriesTimeseriesRoles) \
                                       .filter(models.TimeseriesTimeseriesRoles.timeseries_id == timeseries_id) \
@@ -63,6 +65,14 @@ def get_unique_timeseries_programme(db: Session, name: str, homepage: str):
     return db_object
 
 
+# is this internal, or should this also go to public REST api?
+def get_unique_timeseries_annotation(db: Session, text: str, contributor_id: int):
+    db_object = db.query(models.TimeseriesAnnotation).filter(models.TimeseriesAnnotation.text == text) \
+                                      .filter(models.TimeseriesAnnotation.contributor_id == contributor_id) \
+                                      .first()
+    return db_object
+
+
 def __get_status_enum(db: Session):
     return db.query(RS_enum).all()
 
@@ -103,10 +113,15 @@ def create_timeseries(db: Session, timeseries: TimeseriesCreate):
     if annotations_data:
         for a in annotations_data:
             db_annotation = models.TimeseriesAnnotation(**a)
-            db.add(db_annotation)
-            db.commit()
-            db.refresh(db_annotation)
-            annotation_id = db_annotation.id
+            # check whether annotation is already present in database
+            db_object = get_unique_timeseries_annotation(db, db_annotation.text, db_annotation.contributor_id)
+            if db_object:
+                annotation_id = db_object.id
+            else:
+                db.add(db_annotation)
+                db.commit()
+                db.refresh(db_annotation)
+                annotation_id = db_annotation.id
             db.execute(insert(timeseries_timeseries_annotations_table).values(timeseries_id=timeseries_id, annotation_id=annotation_id))
             db.commit()
     # store programmes and update association table
@@ -125,8 +140,6 @@ def create_timeseries(db: Session, timeseries: TimeseriesCreate):
             db.execute(insert(timeseries_timeseries_programmes_table).values(timeseries_id=timeseries_id, programme_id=programme_id))
             db.commit()
     # there is a mismatch with additional_metadata
-    # there is a mismatch with additional_metadata
-    # there is a mismatch with additional_metadata
     # in upload command, we have now: "additional_metadata": "{}"
     # but return from this method gives (=database): "additional_metadata": {}
     db_timeseries.additional_metadata = str(db_timeseries.additional_metadata)