From b3a7c5206e00d69f3f41e06b7f7b42418aae3b72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sabine=20Schr=C3=B6der?= <s.schroeder@fz-juelich.de> Date: Mon, 5 Oct 2020 16:32:56 +0200 Subject: [PATCH] #17, #9: save changes to gitlab after crash --- toardb/data/crud.py | 5 ++++- toardb/data/fixtures/data.json | 30 ++++++++++++++++++++---------- toardb/data/models.py | 5 ++++- toardb/data/schemas.py | 1 + toardb/stationmeta/crud.py | 13 +++++++++---- toardb/timeseries/crud.py | 6 ++++-- 6 files changed, 42 insertions(+), 18 deletions(-) diff --git a/toardb/data/crud.py b/toardb/data/crud.py index 461cc46..6afd7ff 100644 --- a/toardb/data/crud.py +++ b/toardb/data/crud.py @@ -58,6 +58,7 @@ def create_data(db: Session, engine: Engine, input_handle: UploadFile = File(... stationmeta_core = get_stationmeta_core(db=db,station_code=station_code) station_id = stationmeta_core.id timeseries = get_timeseries_by_unique_constraints(db=db,station_id=station_id,variable_id=variable_id,label=label) + version = '000001.000000.00000000000000' if timeseries: timeseries_id = timeseries.id # open SpooledTemporaryFile, skip header (and also try to insert timeseries_id!) @@ -66,6 +67,8 @@ def create_data(db: Session, engine: Engine, input_handle: UploadFile = File(... df.index = df.index - timeoffset # now insert the timeseries_id to the end of the data frame df.insert(2, 'timeseries_id', timeseries_id) + # also insert version + df.insert(3, 'version', version) # datetime needs timezone information df = df.tz_localize('UTC') buf = StringIO() @@ -75,7 +78,7 @@ def create_data(db: Session, engine: Engine, input_handle: UploadFile = File(... fake_conn = engine.raw_connection() fake_cur = fake_conn.cursor() try: - fake_cur.copy_from(buf, 'data', sep=',', columns=('datetime','value','flags','timeseries_id')) + fake_cur.copy_from(buf, 'data', sep=',', columns=('datetime','value','flags','timeseries_id', 'version')) fake_conn.commit() message = 'Data successfully inserted.' status_code = 200 diff --git a/toardb/data/fixtures/data.json b/toardb/data/fixtures/data.json index ecf7cf4..5714280 100644 --- a/toardb/data/fixtures/data.json +++ b/toardb/data/fixtures/data.json @@ -3,60 +3,70 @@ "datetime":"2012-12-16 21:00:00+01", "value":21.5809999999999995, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-16 22:00:00+01", "value":13.7340000000000018, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-16 23:00:00+01", "value":13.7340000000000018, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-17 00:00:00+01", "value":7.84799999999999986, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-17 01:00:00+01", "value":15.6959999999999997, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-17 02:00:00+01", "value":11.7720000000000002, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-17 03:00:00+01", "value":13.7340000000000018, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-17 04:00:00+01", "value":19.620000000000001, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-17 05:00:00+01", "value":15.6959999999999997, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" }, { "datetime":"2012-12-17 06:00:00+01", "value":5.88600000000000012, "flags":0, - "timeseries_id":1 + "timeseries_id":1, + "version":"000001.000000.00000000000000" } ] diff --git a/toardb/data/models.py b/toardb/data/models.py index db86f7d..f149ac8 100644 --- a/toardb/data/models.py +++ b/toardb/data/models.py @@ -1,6 +1,6 @@ # coding: utf-8 from sqlalchemy import PrimaryKeyConstraint, Column, DateTime, Float, ForeignKey, Integer, text, \ - Table, String + Table, String, CHAR from sqlalchemy.orm import relationship from sqlalchemy.sql.sqltypes import NullType from sqlalchemy.dialects.postgresql import JSONB @@ -21,6 +21,8 @@ class Data(Base): +---------------+--------------------------+-----------+----------+---------+ | flags | integer | | not null | | +---------------+--------------------------+-----------+----------+---------+ + | version | character(28) | | not null | '000001.000000.00000000000000'::bpchar | + +---------------+--------------------------+-----------+----------+---------+ | timeseries_id | integer | | not null | | +---------------+--------------------------+-----------+----------+---------+ @@ -44,6 +46,7 @@ class Data(Base): datetime = Column(DateTime(True), nullable=False, index=True) value = Column(Float(53), nullable=False, index=True) flags = Column(ForeignKey('df_vocabulary.enum_val'), nullable=False) + version = Column(CHAR(28), nullable=False, server_default=text("'000001.000000.00000000000000'::bpchar")) # do not use string declaration here (not working for pytest) # use the explicit class name here, # see: https://groups.google.com/forum/#!topic/sqlalchemy/YjGhE4d6K4U diff --git a/toardb/data/schemas.py b/toardb/data/schemas.py index 3e125e6..ad1d908 100644 --- a/toardb/data/schemas.py +++ b/toardb/data/schemas.py @@ -13,6 +13,7 @@ class DataBase(BaseModel): datetime: dt.datetime = Field(..., description="date and time of observation") value: float = Field(..., description="variable value at datetime") flags: str = Field(..., description="flags") + version: str = Field(..., description="version") timeseries_id: int = Field(..., description="ID of timeseries") @validator('flags') diff --git a/toardb/stationmeta/crud.py b/toardb/stationmeta/crud.py index a2842f1..2c036cf 100644 --- a/toardb/stationmeta/crud.py +++ b/toardb/stationmeta/crud.py @@ -7,6 +7,7 @@ Create, Read, Update, Delete functionality from sqlalchemy import cast, Text, insert, update, delete from typing import List from geoalchemy2.types import Geometry +from geoalchemy2.elements import WKBElement, WKTElement from sqlalchemy.orm import Session from sqlalchemy.dialects.postgresql import JSONB, ARRAY from fastapi import File, UploadFile @@ -25,7 +26,8 @@ def get_stationmeta_core(db: Session, station_code: str): db_object = db.query(models.StationmetaCore).filter(cast(models.StationmetaCore.codes,Text).contains(station_code.strip())).first() # there is a mismatch with coordinates and additional_metadata if db_object: - db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) + if isinstance(db_object.coordinates, (WKBElement, WKTElement)): + db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) db_object.additional_metadata = str(db_object.additional_metadata).replace("'",'"') return db_object @@ -34,7 +36,8 @@ def get_stationmeta(db: Session, station_code: str): db_object = db.query(models.StationmetaCore).filter(cast(models.StationmetaCore.codes,Text).contains(station_code.strip())).first() # there is a mismatch with coordinates and additional_metadata if db_object: - db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) + if isinstance(db_object.coordinates, (WKBElement, WKTElement)): + db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) db_object.additional_metadata = str(db_object.additional_metadata).replace("'",'"') return db_object @@ -43,7 +46,8 @@ def get_all_stationmeta_core(db: Session, skip : int = 0, limit: int = None): db_objects = db.query(models.StationmetaCore).offset(skip).limit(limit).all() for db_object in db_objects: # there is a mismatch with coordinates and additional_metadata - db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) + if isinstance(db_object.coordinates, (WKBElement, WKTElement)): + db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) db_object.additional_metadata = str(db_object.additional_metadata).replace("'",'"') return db_objects @@ -52,7 +56,8 @@ def get_all_stationmeta(db: Session, skip : int = 0, limit: int = None): db_objects = db.query(models.StationmetaCore).offset(skip).limit(limit).all() for db_object in db_objects: # there is a mismatch with coordinates and additional_metadata - db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) + if isinstance(db_object.coordinates, (WKBElement, WKTElement)): + db_object.coordinates = get_coordinates_from_geom(db_object.coordinates) db_object.additional_metadata = str(db_object.additional_metadata).replace("'",'"') return db_objects diff --git a/toardb/timeseries/crud.py b/toardb/timeseries/crud.py index 36b57c7..526f63e 100644 --- a/toardb/timeseries/crud.py +++ b/toardb/timeseries/crud.py @@ -25,7 +25,8 @@ def get_timeseries(db: Session, timeseries_id: int): if db_object: db_object.additional_metadata = str(db_object.additional_metadata).replace("'",'"') # there is also a mismatch with coordinates and additional_metadata from station object - db_object.station.coordinates = get_coordinates_from_geom(db_object.station.coordinates) + if isinstance(db_object.station.coordinates, (WKBElement, WKTElement)): + db_object.station.coordinates = get_coordinates_from_geom(db_object.station.coordinates) db_object.station.additional_metadata = str(db_object.station.additional_metadata).replace("'",'"') return db_object @@ -51,7 +52,8 @@ def get_timeseries_by_unique_constraints(db: Session, station_id: int, variable_ if db_object: db_object.additional_metadata = str(db_object.additional_metadata).replace("'",'"') # there is also a mismatch with coordinates and additional_metadata from station object - db_object.station.coordinates = get_coordinates_from_geom(db_object.station.coordinates) + if isinstance(db_object.station.coordinates, (WKBElement, WKTElement)): + db_object.station.coordinates = get_coordinates_from_geom(db_object.station.coordinates) db_object.station.additional_metadata = str(db_object.station.additional_metadata).replace("'",'"') return db_object -- GitLab