diff --git a/client/1.json b/client/1.json new file mode 100644 index 0000000000000000000000000000000000000000..fa4df1bd0a80eeafcb8702ba2e8eca8a6d7ee087 --- /dev/null +++ b/client/1.json @@ -0,0 +1,34 @@ +[ + { + "name": "ibtracs: Tropical Storm KOGUMA (2021163N18110)", + "url": "http://ibtracs.unca.edu/index.php?name=v04r00-2021163N18110#sources", + "metadata": { + "id": "2021163N18110", + "start": "Jun 12 00Z" + } + }, + { + "name": "Australian Tropical Cyclone Database", + "url": "http://www.bom.gov.au/clim_data/IDCKMSTM0S.csv", + "metadata": { + "Last updated": "2021-06-15", + "author": "Australian Bureau of Meteorology" + } + }, + { + "name": "ibtracs: IBTrACS version 4", + "url": "https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r00/access/csv/", + "metadata": { + "format": "csv", + "author": "World Datacenter for Meterology" + } + }, + { + "name": "3D-CMCC FEM", + "url": "https://www.cmcc.it/models/3d-cmcc-fem-three-dimension-forest-ecosystem-model", + "metadata": { + "Division": "IAFES - Impacts on Agriculture, Forests and Ecosystem Services.", + "author": "CMCC Foundation" + } + } +] diff --git a/client/upload_client.py b/client/upload_client.py new file mode 100755 index 0000000000000000000000000000000000000000..202aab7bb0529cd36bb3d92062aed0b8b4d9af59 --- /dev/null +++ b/client/upload_client.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 + +import argparse +import getpass +import json +from urllib.parse import urljoin +import sys + +import requests + +def login(server, user, password): + r = requests.post(urljoin(server, 'token'), + data={'username': user, 'password': password}) + + if r.status_code!=200: + print("Unable to authenticate. Breaking") + sys.exit(-1) + + token = r.json()['access_token'] + return {'Authorization': f"Bearer {token}"} + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Uploads data to apiserver') + parser.add_argument('-s', '--server', help='server url', + default='http://localhost:8000/') + parser.add_argument('-u', '--user', help='user name', required=True) + parser.add_argument('-f', '--dataset', + help='json file with dataset', required=True) + args = parser.parse_args() + + password = getpass.getpass(prompt='Password required') + auth_headers = login(args.server, args.user, password) + + lst = requests.get(urljoin(args.server, 'dataset')).json() + datasets = {a[0]: a[1] for a in lst} + + with open(args.dataset, 'r') as f: + ds = json.load(f) + + for el in ds: + if el['name'] in datasets: + print(f"{el['name']} is already on sever (id={datasets[el['name']]}). Updating...") + r = requests.put(url=urljoin(args.server, f"dataset/{datasets[el['name']]}"), + json=el, headers=auth_headers) + else: + r = requests.post(urljoin(args.server, 'dataset'), + json=el, headers=auth_headers) + if r.status_code==200: + print(f"Sent {el['name']} -> {r.json()[0]}") + else: + print(r.url, r.status_code, r.text) + + print('Data sets on the server:') + r = requests.get(urljoin(args.server, 'dataset')) + for el in r.json(): + print(el)