From 5b806246d6def0a018895ce5664fe65d086f7cd5 Mon Sep 17 00:00:00 2001
From: schroeder5 <s.schroeder@fz-juelich.de>
Date: Tue, 2 Jun 2020 01:31:43 +0200
Subject: [PATCH] #1: pytest for data established; renamed date_time to
 datetime; needed 'no_autoflush' in timeseries queries

---
 toardb/data/data.py                           |   2 +-
 toardb/data/fixtures/data.json                |  62 ++++++
 .../toluene_SDZ54421_2012_2012_v1-0.dat       |  60 ++++++
 toardb/data/models.py                         |   7 +-
 toardb/data/schemas.py                        |   2 +-
 toardb/data/test_base.py                      | 124 +++++++++++
 toardb/data/test_data.py                      | 193 ++++++++++++++++++
 toardb/timeseries/crud.py                     |  16 +-
 8 files changed, 456 insertions(+), 10 deletions(-)
 create mode 100644 toardb/data/fixtures/data.json
 create mode 100644 toardb/data/fixtures/toluene_SDZ54421_2012_2012_v1-0.dat
 create mode 100644 toardb/data/test_base.py
 create mode 100644 toardb/data/test_data.py

diff --git a/toardb/data/data.py b/toardb/data/data.py
index c042383..d3ffc8b 100644
--- a/toardb/data/data.py
+++ b/toardb/data/data.py
@@ -46,7 +46,7 @@ def get_data(timeseries_id: int, db: Session = Depends(get_db)):
 async def create_data(file: UploadFile = File(...), db: Session = Depends(get_db)):
 #   # the next three lines are automatically done by database management,
 #   # but we do want helpful error messages!
-#   db_data = crud.get_data_by_datetime_and_timeseriesid(db, datetime=data.date_time, timeseries_id=data.timeseries_id)
+#   db_data = crud.get_data_by_datetime_and_timeseriesid(db, datetime=data.datetime, timeseries_id=data.timeseries_id)
 #   if db_data:
 #       raise HTTPException(status_code=400, detail="Data already registered.")
 
diff --git a/toardb/data/fixtures/data.json b/toardb/data/fixtures/data.json
new file mode 100644
index 0000000..ecf7cf4
--- /dev/null
+++ b/toardb/data/fixtures/data.json
@@ -0,0 +1,62 @@
+[
+  {
+    "datetime":"2012-12-16 21:00:00+01",
+    "value":21.5809999999999995,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-16 22:00:00+01",
+    "value":13.7340000000000018,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-16 23:00:00+01",
+    "value":13.7340000000000018,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-17 00:00:00+01",
+    "value":7.84799999999999986,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-17 01:00:00+01",
+    "value":15.6959999999999997,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-17 02:00:00+01",
+    "value":11.7720000000000002,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-17 03:00:00+01",
+    "value":13.7340000000000018,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-17 04:00:00+01",
+    "value":19.620000000000001,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-17 05:00:00+01",
+    "value":15.6959999999999997,
+    "flags":0,
+    "timeseries_id":1
+  },
+  {
+    "datetime":"2012-12-17 06:00:00+01",
+    "value":5.88600000000000012,
+    "flags":0,
+    "timeseries_id":1
+  }
+]
diff --git a/toardb/data/fixtures/toluene_SDZ54421_2012_2012_v1-0.dat b/toardb/data/fixtures/toluene_SDZ54421_2012_2012_v1-0.dat
new file mode 100644
index 0000000..753909e
--- /dev/null
+++ b/toardb/data/fixtures/toluene_SDZ54421_2012_2012_v1-0.dat
@@ -0,0 +1,60 @@
+#individual_harmonize.py: automatically created (datetimeformat (input): %d/%m/%Y %H:%M)
+#individual_harmonize.py: date: 2020-04-07 21:26
+#individual_harmonize.py: version of data: 1.0
+#station_id: SDZ54421
+#station_name: Shangdianzi
+#station_country: China
+#station_lat: 40.65
+#station_lon: 117.17
+#station_alt: 293.9
+#timeshift_from_utc: 8.0
+#time_reporting: begin_of_interval
+#original_units: ppb
+#station_local_id: SDZ54421
+#station_state: Beijing Shi
+#station_timezone: Asia/Shanghai
+#station_type_of_area: rural
+#contributor: China Meteorological Administration
+#contributor_shortname: CMA
+#contributor_country: China
+#dataset_pi: Néstor Xu Xiaobin
+#dataset_pi_email: xuxb@camscma.cn
+#Time; Value; Flag
+2012-12-15 10:00;  33.353;  0
+2012-12-15 11:00;  58.859;  0
+2012-12-15 12:00;  66.706;  0
+2012-12-15 13:00;  64.744;  0
+2012-12-15 14:00;  72.592;  0
+2012-12-15 15:00;  56.897;  0
+2012-12-15 16:00;  41.201;  0
+2012-12-15 17:00;  35.315;  0
+2012-12-15 18:00;  37.277;  0
+2012-12-15 19:00;  31.391;  0
+2012-12-15 20:00;  23.543;  0
+2012-12-15 21:00;   7.848;  0
+2012-12-15 22:00;   1.962;  0
+2012-12-16 08:00;   3.924;  0
+2012-12-16 09:00;   9.810;  0
+2012-12-16 10:00;  31.391;  0
+2012-12-16 11:00;  52.973;  0
+2012-12-16 12:00;  66.706;  0
+2012-12-16 13:00;  74.554;  0
+2012-12-16 14:00;  74.554;  0
+2012-12-16 15:00;  66.706;  0
+2012-12-16 16:00;  51.011;  0
+2012-12-16 17:00;  41.201;  0
+2012-12-16 18:00;  37.277;  0
+2012-12-16 19:00;  31.391;  0
+2012-12-16 20:00;  21.581;  0
+2012-12-16 21:00;  13.734;  0
+2012-12-16 22:00;  13.734;  0
+2012-12-16 23:00;   7.848;  0
+2012-12-17 00:00;  15.696;  0
+2012-12-17 01:00;  11.772;  0
+2012-12-17 02:00;  13.734;  0
+2012-12-17 03:00;  19.620;  0
+2012-12-17 04:00;  15.696;  0
+2012-12-17 05:00;   5.886;  0
+2012-12-17 06:00;   3.924;  0
+2012-12-17 08:00;   1.962;  0
+2012-12-17 09:00;  23.543;  0
diff --git a/toardb/data/models.py b/toardb/data/models.py
index 4be7853..a8d9b14 100644
--- a/toardb/data/models.py
+++ b/toardb/data/models.py
@@ -5,6 +5,8 @@ from sqlalchemy.sql.sqltypes import NullType
 from sqlalchemy.dialects.postgresql import JSONB
 from sqlalchemy.ext.declarative import declarative_base
 
+from toardb.timeseries.models import Timeseries
+
 Base = declarative_base()
 metadata = Base.metadata
 
@@ -43,4 +45,7 @@ class Data(Base):
     datetime = Column(DateTime(True), nullable=False, index=True)
     value = Column(Float(53), nullable=False, index=True)
     flags = Column(Integer, nullable=False)
-    timeseries_id = Column(ForeignKey('timeseries.id', deferrable=True, initially='DEFERRED'), nullable=False, index=True)
+# do not use string declaration here (not working for pytest)
+# use the explicit class name here,
+# see: https://groups.google.com/forum/#!topic/sqlalchemy/YjGhE4d6K4U
+    timeseries_id = Column(ForeignKey(Timeseries.id, deferrable=True, initially='DEFERRED'), nullable=False, index=True)
diff --git a/toardb/data/schemas.py b/toardb/data/schemas.py
index d5f48c4..c156157 100644
--- a/toardb/data/schemas.py
+++ b/toardb/data/schemas.py
@@ -9,7 +9,7 @@ from pydantic import BaseModel
 import datetime as dt
 
 class DataBase(BaseModel):
-    date_time: dt.datetime
+    datetime: dt.datetime
     value: float
     flags: int
     timeseries_id: int
diff --git a/toardb/data/test_base.py b/toardb/data/test_base.py
new file mode 100644
index 0000000..ad1ef70
--- /dev/null
+++ b/toardb/data/test_base.py
@@ -0,0 +1,124 @@
+#taken from: https://github.com/tiangolo/fastapi/issues/831
+import pytest
+from starlette.testclient import TestClient
+from typing import Optional, AsyncIterable
+from sqlalchemy import create_engine
+from sqlalchemy.engine import Engine as Database
+from sqlalchemy.orm import Session
+from sqlalchemy_utils import database_exists, create_database, drop_database
+
+from toardb.utils.database import DATABASE_URL
+from .models import Base
+from toardb.auth_user.models import Base as AuthUserBase
+from toardb.contacts.models import Base as ContactBase
+from toardb.variables.models import Base as VariableBase
+from toardb.stationmeta.models import Base as StationmetaBase
+from toardb.timeseries.models import Base as TimeseriesBase
+from toardb.toardb import app
+from toardb.data.data import get_db
+
+url = str(DATABASE_URL+ "_test")
+_db_conn = create_engine(url)
+
+def get_test_db_conn() -> Database:
+    assert _db_conn is not None
+    return _db_conn
+
+
+def get_test_db() -> AsyncIterable[Session]:
+    sess = Session(bind=_db_conn)
+
+    try:
+        yield sess
+    finally:
+        sess.close()
+
+
+@pytest.fixture(scope="session", autouse=True)
+def create_test_database():
+    """
+    Create a clean database on every test case.
+    We use the `sqlalchemy_utils` package here for a few helpers in consistently
+    creating and dropping the database.
+    """
+    if database_exists(url):
+        drop_database(url)
+    create_database(url)  # Create the test database.
+    #'create_all' does not work (because of undefined 'Geometry')!
+    #declare PostGIS extension!
+    fake_conn = _db_conn.raw_connection()
+    fake_cur = fake_conn.cursor()
+    fake_cur.execute("CREATE EXTENSION postgis")
+    fake_conn.commit()
+    # I also need the id_seq tables:
+    fake_cur.execute("CREATE SEQUENCE public.auth_user_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.variables_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_core_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_global_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_global_services_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_roles_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_annotations_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_aux_docs_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_aux_images_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.stationmeta_aux_urls_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.persons_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.organisations_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.timeseries_annotations_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.timeseries_roles_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.timeseries_programmes_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    fake_cur.execute("CREATE SEQUENCE public.timeseries_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
+    fake_conn.commit()
+    AuthUserBase.metadata.create_all(_db_conn)  # Create the tables.
+    ContactBase.metadata.create_all(_db_conn)  # Create the tables.
+    VariableBase.metadata.create_all(_db_conn)  # Create the tables.
+    StationmetaBase.metadata.create_all(_db_conn)  # Create the tables.
+    TimeseriesBase.metadata.create_all(_db_conn)  # Create the tables.
+    Base.metadata.create_all(_db_conn)  # Create the tables.
+    #try with the basics
+    app.dependency_overrides[get_db] = get_test_db  # Mock the Database Dependency
+    yield  # Run the tests.
+    drop_database(url)  # Drop the test database.
+
+
+@pytest.yield_fixture
+def test_db_session():
+    """Returns an sqlalchemy session, and after the test tears down everything properly."""
+
+    session = Session(bind=_db_conn)
+
+    yield session
+    # Drop all data after each test
+    for tbl in reversed(AuthUserBase.metadata.sorted_tables +
+                        ContactBase.metadata.sorted_tables +
+                        VariableBase.metadata.sorted_tables +
+                        StationmetaBase.metadata.sorted_tables +
+                        TimeseriesBase.metadata.sorted_tables +
+                        Base.metadata.sorted_tables):
+        _db_conn.execute(tbl.delete())
+    # put back the connection to the connection pool
+    session.close()
+
+
+@pytest.fixture()
+def client():
+    """
+    When using the 'client' fixture in test cases, we'll get full database
+    rollbacks between test cases:
+    """
+    with TestClient(app) as client:
+        yield client
diff --git a/toardb/data/test_data.py b/toardb/data/test_data.py
new file mode 100644
index 0000000..c2d4ac6
--- /dev/null
+++ b/toardb/data/test_data.py
@@ -0,0 +1,193 @@
+import pytest
+import json
+import io
+from fastapi import File, UploadFile
+from .models import Data
+from toardb.timeseries.models import Timeseries
+from toardb.stationmeta.models import StationmetaCore
+from toardb.stationmeta.schemas import get_geom_from_coordinates, Coordinates
+from toardb.variables.models import Variable
+from toardb.contacts.models import Person, Organisation
+from toardb.auth_user.models import AuthUser
+# Required imports 'create_test_database'
+from .test_base import (
+    client,
+    get_test_db,
+    create_test_database,
+    url,
+    test_db_session as db,
+)
+from .test_base import _db_conn
+
+class TestApps:
+    def setup(self):
+        self.application_url = "/data/"
+
+    """Set up all the data before each test
+       If you want the setup only once (per test module),
+       the scope argument is not working in the expected way, as discussed here:
+       https://stackoverflow.com/questions/45817153/py-test-fixture-use-function-fixture-in-scope-fixture
+    """
+    @pytest.fixture(autouse=True)
+    def setup_db_data(self, db):
+        fake_conn = _db_conn.raw_connection()
+        fake_cur = fake_conn.cursor()
+        # id_seq will not be reset automatically between tests!
+        fake_cur.execute("ALTER SEQUENCE auth_user_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE variables_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_core_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_global_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_global_services_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_roles_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_annotations_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_aux_docs_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_aux_images_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE stationmeta_aux_urls_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE persons_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE organisations_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE timeseries_annotations_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE timeseries_roles_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        fake_cur.execute("ALTER SEQUENCE timeseries_id_seq RESTART WITH 1")
+        fake_conn.commit()
+        infilename = "toardb/auth_user/fixtures/auth.json"
+        with open(infilename) as f:
+            metajson=json.load(f)
+            for entry in metajson:
+                new_auth_user = AuthUser(**entry)
+                db.add(new_auth_user)
+                db.commit()
+                db.refresh(new_auth_user)
+        infilename = "toardb/contacts/fixtures/persons.json"
+        with open(infilename) as f:
+            metajson=json.load(f)
+            for entry in metajson:
+                new_person = Person(**entry)
+                db.add(new_person)
+                db.commit()
+                db.refresh(new_person)
+        infilename = "toardb/contacts/fixtures/organisations.json"
+        with open(infilename) as f:
+            metajson=json.load(f)
+            for entry in metajson:
+                new_organisation = Organisation(**entry)
+                db.add(new_organisation)
+                db.commit()
+                db.refresh(new_organisation)
+        infilename = "toardb/variables/fixtures/variables.json"
+        with open(infilename) as f:
+            metajson=json.load(f)
+            for entry in metajson:
+                new_variable = Variable(**entry)
+                db.add(new_variable)
+                db.commit()
+                db.refresh(new_variable)
+        infilename = "toardb/stationmeta/fixtures/stationmeta_core.json"
+        with open(infilename) as f:
+            metajson=json.load(f)
+            for entry in metajson:
+                new_stationmeta_core = StationmetaCore(**entry)
+                # there's a mismatch with coordinates --> how to automatically switch back and forth?!
+                tmp_coordinates = new_stationmeta_core.coordinates
+                new_stationmeta_core.coordinates = get_geom_from_coordinates(Coordinates(**new_stationmeta_core.coordinates))
+                # there's also a mismatch with additional_metadata --> BUT: this should not be switched back!
+                # in upload command, we have now: "additional_metadata": "{}"
+                # but return from this method gives: "additional_metadata": {}
+                # ==> there is a mismatch between model(JSONB) and schema(JSON)
+                new_stationmeta_core.additional_metadata = str(new_stationmeta_core.additional_metadata)
+                db.add(new_stationmeta_core)
+                db.commit()
+                db.refresh(new_stationmeta_core)
+        infilename = "toardb/timeseries/fixtures/timeseries.json"
+        with open(infilename) as f:
+            metajson=json.load(f)
+            for entry in metajson:
+                new_timeseries = Timeseries(**entry)
+                db.add(new_timeseries)
+                db.commit()
+                db.refresh(new_timeseries)
+        infilename = "toardb/data/fixtures/data.json"
+        with open(infilename) as f:
+            metajson=json.load(f)
+            for entry in metajson:
+                new_data = Data (**entry)
+                db.add(new_data)
+                db.commit()
+                db.refresh(new_data)
+
+
+    def test_get_data(self, client, db):
+        response = client.get("/data/?limit=4")
+        expected_status_code = 200
+        assert response.status_code == expected_status_code
+        expected_resp = [{'datetime': '2012-12-16T21:00:00+01:00', 'value': 21.581, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-16T22:00:00+01:00', 'value': 13.734, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-16T23:00:00+01:00', 'value': 13.734, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T00:00:00+01:00', 'value':  7.848, 'flags': 0, 'timeseries_id': 1}]
+        assert response.json() == expected_resp
+
+
+    def test_get_special(self, client, db):
+        response = client.get("/data/1")
+        expected_status_code = 200
+        assert response.status_code == expected_status_code
+        expected_resp = [{'datetime': '2012-12-16T21:00:00+01:00', 'value': 21.581, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-16T22:00:00+01:00', 'value': 13.734, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-16T23:00:00+01:00', 'value': 13.734, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T00:00:00+01:00', 'value':  7.848, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T01:00:00+01:00', 'value': 15.696, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T02:00:00+01:00', 'value': 11.772, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T03:00:00+01:00', 'value': 13.734, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T04:00:00+01:00', 'value': 19.62,  'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T05:00:00+01:00', 'value': 15.696, 'flags': 0, 'timeseries_id': 1},
+                         {'datetime': '2012-12-17T06:00:00+01:00', 'value':  5.886, 'flags': 0, 'timeseries_id': 1}]
+        assert response.json() == expected_resp
+
+
+#   def test_insert_new_without_credits(self):
+#?      response = client.post("/data/")
+#       expected_status_code=401
+#       assert response.status_code == expected_status_code
+#?      expected_resp = ...
+#   assert response.json() == expected_resp
+
+
+
+#   def test_insert_new_wrong_credits(self):
+#?      response = client.post("/data/")
+#       expected_status_code = 401
+#       assert response.status_code == expected_status_code
+#?      expected_resp = ...
+#   assert response.json() == expected_resp
+
+
+    def test_insert_new(self, client, db):
+        response = client.post("/data/", files={"file": open("toardb/data/fixtures/toluene_SDZ54421_2012_2012_v1-0.dat", "rb")})
+        print(response.json())
+        expected_status_code = 200
+        assert response.status_code == expected_status_code
+        expected_resp = response.json()
+        assert response.json() == expected_resp
+
+                                                    
+#   def test_insert_duplicate(self, client, db):
+#       response = client.post("/data/",
+#                  ...
+#                  )
+#       expected_status_code = 400
+#       assert response.status_code == expected_status_code
+#       expected_resp = {'detail': 'Data for timeseries already registered.'}
+#       assert response.json() == expected_resp
diff --git a/toardb/timeseries/crud.py b/toardb/timeseries/crud.py
index 7abde34..f704291 100644
--- a/toardb/timeseries/crud.py
+++ b/toardb/timeseries/crud.py
@@ -27,13 +27,15 @@ def get_all_timeseries(db: Session, skip : int = 0, limit: int = None):
 
 
 def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_id: int, label: str):
-    db_object = db.query(models.Timeseries).filter(models.Timeseries.station_id == station_id) \
-                                      .filter(models.Timeseries.variable_id == variable_id) \
-                                      .filter(models.Timeseries.label == label.strip()) \
-                                      .first()
-    # there is a mismatch with additional_metadata
-    if db_object:
-        db_object.additional_metadata = str(db_object.additional_metadata)
+    with db.no_autoflush:
+        db_object = db.query(models.Timeseries).filter(models.Timeseries.station_id == station_id) \
+                                          .filter(models.Timeseries.variable_id == variable_id) \
+                                          .filter(models.Timeseries.label == label.strip()) \
+                                          .first()
+        print(db_object)
+        # there is a mismatch with additional_metadata
+        if db_object:
+            db_object.additional_metadata = str(db_object.additional_metadata)
     return db_object
 
 
-- 
GitLab