Skip to content
Snippets Groups Projects
Commit d87f6b65 authored by lukas leufen's avatar lukas leufen
Browse files

Merge branch 'develop' into 'master'

New App wheat production

See merge request toar/toar-location-services!8
parents 4760d475 c2320b19
Branches
Tags
1 merge request!8New App wheat production
Showing
with 308 additions and 10 deletions
......@@ -7,7 +7,7 @@ Author: Martin Schultz, FZ Juelich (04 May 2016)
"""
import numpy as np
import os
from toar_location_services.settings import DATA_DIR, DEBUG, USE_DUMMY_POPULATION_DATA
from toar_location_services.settings import DATA_DIR, DEBUG, USE_DUMMY_POPULATION_DATA, PLOT_DATA
import matplotlib.pyplot as plt
......@@ -58,7 +58,7 @@ def read_proxydata(filename, dummy=DEBUG and USE_DUMMY_POPULATION_DATA):
logdata = data.copy()
logdata[logdata <= 1.e-4] = 1.e-4
if DEBUG:
if DEBUG and PLOT_DATA:
plt.contourf(lonvec, latvec, np.log10(logdata))
plt.savefig('../plots/global_population_density.png')
plt.close()
......
......@@ -8,7 +8,7 @@ Author: Lukas Leufen, FZ Juelich (21th May 2019)
"""
import numpy as np
import os
from toar_location_services.settings import DATA_DIR, DEBUG, USE_DUMMY_STABLE_NIGHT_LIGHTS_DATA
from toar_location_services.settings import DATA_DIR, DEBUG, USE_DUMMY_STABLE_NIGHT_LIGHTS_DATA, PLOT_DATA
from django.contrib.gis.gdal import GDALRaster
import matplotlib.pyplot as plt
# import gdal
......@@ -52,7 +52,7 @@ def read_proxydata(filename, dummy=DEBUG and USE_DUMMY_STABLE_NIGHT_LIGHTS_DATA)
boundingbox = [lon0, latvec.min(), lonvec.max(), lat0]
# plot data
if DEBUG:
if DEBUG and PLOT_DATA:
plt.contourf(lonvec, latvec, data)
plt.savefig('../plots/global_nighttime_lights.png')
plt.close()
......
......@@ -10,6 +10,7 @@ from rest_framework.serializers import BaseSerializer
def get_provenance(obj):
"""construct provenance information on stable night lights dataset"""
# TODO: complete provenancen information
prov = OrderedDict([
('dataset_name', 'night time lights'),
('dataset_description', 'Year 2013 Nighttime lights brightness values from NOAA DMSP. Resolution? Morde '
......@@ -39,6 +40,7 @@ class AggSerializer(BaseSerializer):
vlength = len(val)
except TypeError:
vlength = 1
# TODO: check units
if vlength > 1:
properties = OrderedDict([
('agg_function', agg_function),
......
......@@ -43,7 +43,8 @@ INSTALLED_APPS = [
'major-roads',
'population-density',
'topography-tandem-x',
'stable_night_lights'
'stable_night_lights',
'wheat_production'
]
MIDDLEWARE = [
......@@ -145,4 +146,5 @@ USE_LOCAL_OVERPASS_DATA = False
USE_DUMMY_POPULATION_DATA = True
USE_DUMMY_TOPOGRAPHY_TANDEM_DATA = False
USE_DUMMY_STABLE_NIGHT_LIGHTS_DATA = False
USE_DUMMY_WHEAT_DATA = False
PLOT_DATA = False
......@@ -8,5 +8,6 @@ urlpatterns = [
url(r'major-roads/', include('major-roads.urls'), name='major-roads'),
url(r'population-density/', include('population-density.urls'), name='population-density-density'),
url(r'topography-tandem-x/', include('topography-tandem-x.urls'), name='topography-tandem-x'),
url(r'stable_night_lights/', include('stable_night_lights.urls'), name='stable_night_lights')
url(r'stable_night_lights/', include('stable_night_lights.urls'), name='stable_night_lights'),
url(r'wheat_production/', include('wheat_production.urls'), name='wheat_production')
]
......@@ -3,7 +3,6 @@ from rest_framework.views import APIView
from rest_framework.response import Response
class LocationServicesRootView(APIView):
"""Local services REST services"""
......@@ -16,6 +15,7 @@ class LocationServicesRootView(APIView):
('population-density', request.build_absolute_uri()+'population-density/'),
('topography-tandem-x', request.build_absolute_uri()+'topography-tandem-x/'),
('stable_night_lights', request.build_absolute_uri()+'stable_night_lights/'),
('wheat_production', request.build_absolute_uri()+'wheat_production/'),
]),
]
# add admin view if staff user
......
......@@ -8,7 +8,7 @@ Author: Lukas Leufen, FZ Juelich (04 December 2018)
"""
import numpy as np
import os
from toar_location_services.settings import DATA_DIR, DEBUG, USE_DUMMY_TOPOGRAPHY_TANDEM_DATA
from toar_location_services.settings import DATA_DIR, DEBUG, USE_DUMMY_TOPOGRAPHY_TANDEM_DATA, PLOT_DATA
from django.contrib.gis.gdal import GDALRaster
import matplotlib.pyplot as plt
......@@ -53,7 +53,7 @@ def read_proxydata(filename, dummy=DEBUG and USE_DUMMY_TOPOGRAPHY_TANDEM_DATA):
# set metadata
boundingbox = [lon0, latvec.min(), lonvec.max(), lat0]
if DEBUG:
if DEBUG and PLOT_DATA:
plt.contourf(lonvec, latvec, data)
plt.savefig('../plots/topography.png')
plt.close()
......
from django.contrib import admin
# Register your models here.
from django.apps import AppConfig
class WheatProductionConfig(AppConfig):
name = 'wheat_production'
from django.db import models
# Create your models here.
"""serializer for wheat production data
"""
from collections import OrderedDict
import datetime as dt
from rest_framework.serializers import BaseSerializer
# helper functions
def get_provenance(obj):
"""construct provenance information on wheat production dataset"""
# TODO: Fill right provenance information
prov = OrderedDict([
('dataset_name', '?'),
('dataset_description', """Wheat production values for the globe at 5 arc minute resolution. The data are in
units of production (irrigated + non-irrigated) in thousand tonnes. \nThe data were downloaded from the GAEZ
data portal ( http://gaez.fao.org/Main.html# ) and then output to ascii text format by:\nDr Katrina Sharps\n
Centre for Ecology & Hydrology, Environment Centre Wales, UK\nTel. + 44 (0)1248 374518 (direct)\n
Tel. + 44 (0)1248 374500 (reception)\nE-mail: katshar@ceh.ac.uk"""),
('data_source', """Dr Katrina Sharps\nCentre for Ecology & Hydrology, Environment Centre Wales, UK\n
Tel. + 44 (0)1248 374518 (direct)\nTel. + 44 (0)1248 374500 (reception)\nE-mail: katshar@ceh.ac.uk"""),
('datacenter_url', 'http://gaez.fao.org/Main.html'),
('download_date', '?'),
('timestamp', dt.datetime.now().isoformat())
])
return prov
# serializer classes
class AggSerializer(BaseSerializer):
""" see http://www.django-rest-framework.org/api-guide/serializers/#baseserializer """
def to_representation(self, obj):
"""takes dictionary-like obj and returns geojson compliant structure"""
agg_function = obj['agg_function']
val = obj[agg_function]
# build GeoJSON response with 'provenance' extension
# ToDo (probably in views): change content-type to application/vnd.geo+json
# ToDo: enable support for different output formats
# format properties depending on 'by_direction' (vector or not)
try:
vlength = len(val)
except TypeError:
vlength = 1
# TODO: check units below
if vlength > 1:
properties = OrderedDict([
('agg_function', agg_function),
('many', True),
(agg_function, OrderedDict([
(d, v) for d, v in zip(obj['direction'], val)
])),
('units', 'kt'),
('radius', obj['radius']),
])
if obj['direction'] is None:
properties.pop('direction')
else:
properties = OrderedDict([
('agg_function', agg_function),
('many', False),
(agg_function, val),
('units', 'kt'),
('radius', obj['radius']),
('direction', obj['direction']),
])
if obj['direction'] is None:
properties.pop('direction')
response = OrderedDict([
('type', 'Feature'),
('geometry', OrderedDict([
('type', 'Point'),
('coordinates', [obj['lon'], obj['lat']]),
])),
('properties', properties),
('provenance', get_provenance(obj)),
])
return response
from django.test import TestCase
# Create your tests here.
from wheat_file_extraction import read_proxydata
FILENAME = "wheat.txt"
read_proxydata(FILENAME, dummy=False)
from django.conf.urls import url
from .views import WheatView
urlpatterns = [
url(r'^$', WheatView.as_view()),
]
import datetime as dt
import numpy as np
from collections import OrderedDict
from rest_framework.views import APIView
from rest_framework.response import Response
from toar_location_services.settings import DEBUG
from .wheat_file_extraction import read_proxydata
from .serializers import AggSerializer
from utils.views_commons import get_query_params, get_agg_function
from utils.geoutils import Directions
from utils.extraction_tools import extract_value, extract_value_stats
FILENAME = "wheat.txt"
lonvec, latvec, data, datainfo = read_proxydata(FILENAME)
if DEBUG:
print("File %s successfully loaded" % FILENAME, datainfo)
class WheatView(APIView):
def _extract(self, lat, lon, radius, agg, direction, by_direction):
"""perform actual extraction of desired quantity"""
print('**by_direction:', by_direction, '** radius, agg = ', radius, agg)
if agg is not None and radius is not None and radius > 0.:
agg_function = get_agg_function(agg)
min_angle = None
max_angle = None
if by_direction:
result = np.zeros((16,))
direction = Directions.LABELS
for i, d in enumerate(Directions.LABELS):
min_angle, max_angle = Directions.edges(d)
# ToDo: once we serve the data via rasdaman the calls with directions should use
# a polygon query for efficiency reasons.
result[i] = extract_value_stats(lonvec, latvec, data, lon, lat, default_value=-999.,
out_of_bounds_value=0., min_valid=0., max_valid=2.e6,
radius=radius, min_angle=min_angle, max_angle=max_angle,
agg=agg_function)
else:
if direction is not None:
min_angle, max_angle = Directions.edges(direction)
# ToDo: once we serve the data via rasdaman the calls with directions should use
# a polygon query for efficiency reasons.
result = extract_value_stats(lonvec, latvec, data, lon, lat, default_value=-999.,
out_of_bounds_value=0., min_valid=0., max_valid=2.e6,
radius=radius, min_angle=min_angle, max_angle=max_angle,
agg=agg_function)
else:
agg = 'value'
result = extract_value(lonvec, latvec, data, lon, lat, default_value=-999.,
out_of_bounds_value=0., min_valid=0., max_valid=2.e6)
# return data, also return agg and direction as they may have been overwritten
return result, agg, direction
def get(self, request, format=None):
"""process GET requests for wheat_production app
returns a Geo-JSON response with information about the wheat production at or
around a point location.
required arguments:
lat: latitude in degrees_north
lng: longitude in degrees_east (can be either -180 to 180 or 0 to 360)
optional arguments:
radius: search radius in m. See settings.py for default and max allowed values.
Without 'agg', the radius defaults to None and the wheat production at the
point location is returned.
agg: method of aggregation for data around point location. See settings.py for
default method. Only evaluated if radius > 0. Allowed methods are mean,
min, max, median, and NN-percentile (see views_commons.py)
direction: return data aggregation in one direction (wind sector) only.
Direction must be given as wind sector (e.g. 'N', 'NNE', 'NE', etc.).
by_direction: if True, data are returned as vector with one value aggregated
over each of 16 wind directions.
"""
lat, lon, radius, agg, direction, by_direction = get_query_params(request.query_params,
['lat', 'lon', 'radius', 'agg', 'direction', 'by_direction'])
result, agg, direction = self._extract(lat, lon, radius, agg, direction, by_direction)
rawdata = OrderedDict([
("lat", lat),
("lon", lon),
("radius", radius),
("direction", direction),
("agg_function", agg),
(agg, result),
])
response = AggSerializer(rawdata).data
return Response(response)
#!/usr/bin/python
"""
Import wheat production from .txt-file
Author: Lukas Leufen, FZ Juelich (24th May 2019)
"""
import numpy as np
import os
from toar_location_services.settings import DATA_DIR, DEBUG, USE_DUMMY_WHEAT_DATA, PLOT_DATA
import matplotlib.pyplot as plt
def read_proxydata(filename, dummy=DEBUG and USE_DUMMY_WHEAT_DATA):
"""Read the ascii file and return the data array together with
some dataset properties for use in the extraction routines.
filename: name of data file (wheat.txt)
dummy: if true a small set of dummy data are returned to speed up development of other services
"""
if dummy:
return create_dummy_data()
if DEBUG:
print("DATA_DIR = ", DATA_DIR, "...")
print("Opening ", os.path.join(DATA_DIR, filename), "...")
with open(os.path.join(DATA_DIR, filename), "r") as dataset:
tok, cols = dataset.readline().split()
tok, rows = dataset.readline().split()
cols = int(cols)
rows = int(rows)
tok, lon0 = dataset.readline().split()
tok, lat0 = dataset.readline().split()
tok, dlon = dataset.readline().split()
lon0 = float(lon0)
lat0 = float(lat0)
dlon = float(dlon)
dlat = dlon
tok, missval = dataset.readline().split()
# construct data array and lonvec, latvec
data = np.zeros((rows, cols), dtype='f4')
lonvec = np.linspace(lon0, lon0 + cols * dlon, cols)
latvec = np.linspace(lat0, lat0 + rows * dlat, rows)
# data are flipped, therefore reverse latitudes
# trick from http://stackoverflow.com/questions/6771428/most-efficient-way-to-reverse-a-numpy-array
latvec = np.fliplr(np.atleast_2d((latvec)))[0]
# read actual data
for i, line in enumerate(dataset):
row = np.array([float(x) for x in line.split()], dtype='f4')
data[i, :] = row
# correct missing values
data[data == float(missval)] = np.nan
logdata = data.copy()
logdata[logdata <= 1.e-4] = 1.e-4
if DEBUG and PLOT_DATA:
plt.contourf(lonvec, latvec, np.log10(logdata))
plt.savefig('../plots/global_wheat_production.png')
plt.close()
# set metadata
boundingbox = [lon0, lat0, lonvec.max(), latvec.max()]
datainfo = {'size': (rows, cols), 'resolution': (np.abs(dlon), np.abs(dlat)),
'boundingbox': boundingbox}
return lonvec, latvec, data, datainfo
def create_dummy_data():
"""generate some small dummy data set for testing of other services
This avoids loading of a large file
Eventually this should become obsolete when we have rasdaman running..."""
lonvec = np.array([4., 5., 6.])
latvec = np.array([52., 53., 54.])
data = np.array([[1., 10., 11.], [0., 2.5, 3.], [6., 7., 8.]])
datainfo = {'size': (3, 3), 'resolution': (1., 1.),
'boundingbox': [4., 52., 6., 54.]}
msg = "#DEBUG: Using dummy data for wheat production"
if DEBUG:
print(msg)
else:
raise UserWarning(msg)
return lonvec, latvec, data, datainfo
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment