Skip to content
Snippets Groups Projects
Commit 36905028 authored by leufen1's avatar leufen1
Browse files

solved issue with zero devision

parent 194fd5de
No related branches found
No related tags found
4 merge requests!432IOA works now also with xarray and on identical data, IOA is included in...,!431Resolve "release v2.1.0",!430update recent developments,!415Resolve "add metric IOA"
Pipeline #99727 passed
...@@ -217,7 +217,11 @@ def index_of_agreement(a, b, dim=None): ...@@ -217,7 +217,11 @@ def index_of_agreement(a, b, dim=None):
b_mean = (b * np.ones(1)).mean(dim) b_mean = (b * np.ones(1)).mean(dim)
den = (np.square(np.abs(b - b_mean) + np.abs(a - b_mean))).sum(dim) den = (np.square(np.abs(b - b_mean) + np.abs(a - b_mean))).sum(dim)
frac = num / den frac = num / den
frac[num == 0] = 0 # 0/0 division for exactly equal arrays # issue with 0/0 division for exactly equal arrays
if isinstance(frac, (int, float)):
frac = 0 if num == 0 else frac
else:
frac[num == 0] = 0
return 1 - frac return 1 - frac
......
...@@ -86,6 +86,7 @@ class PostProcessing(RunEnvironment): ...@@ -86,6 +86,7 @@ class PostProcessing(RunEnvironment):
self.skill_scores = None self.skill_scores = None
self.feature_importance_skill_scores = None self.feature_importance_skill_scores = None
self.uncertainty_estimate = None self.uncertainty_estimate = None
self.block_mse_per_station = None
self.competitor_path = self.data_store.get("competitor_path") self.competitor_path = self.data_store.get("competitor_path")
self.competitors = to_list(self.data_store.get_default("competitors", default=[])) self.competitors = to_list(self.data_store.get_default("competitors", default=[]))
self.forecast_indicator = "nn" self.forecast_indicator = "nn"
...@@ -148,8 +149,10 @@ class PostProcessing(RunEnvironment): ...@@ -148,8 +149,10 @@ class PostProcessing(RunEnvironment):
block_length = self.data_store.get_default("block_length", default="1m", scope="uncertainty_estimate") block_length = self.data_store.get_default("block_length", default="1m", scope="uncertainty_estimate")
evaluate_competitors = self.data_store.get_default("evaluate_competitors", default=True, evaluate_competitors = self.data_store.get_default("evaluate_competitors", default=True,
scope="uncertainty_estimate") scope="uncertainty_estimate")
block_mse = self.calculate_block_mse(evaluate_competitors=evaluate_competitors, separate_ahead=separate_ahead, block_mse, block_mse_per_station = self.calculate_block_mse(evaluate_competitors=evaluate_competitors,
separate_ahead=separate_ahead,
block_length=block_length) block_length=block_length)
self.block_mse_per_station = block_mse_per_station
self.uncertainty_estimate = statistics.create_n_bootstrap_realizations( self.uncertainty_estimate = statistics.create_n_bootstrap_realizations(
block_mse, dim_name_time=self.index_dim, dim_name_model=self.model_type_dim, block_mse, dim_name_time=self.index_dim, dim_name_model=self.model_type_dim,
dim_name_boots=self.uncertainty_estimate_boot_dim, n_boots=n_boots) dim_name_boots=self.uncertainty_estimate_boot_dim, n_boots=n_boots)
...@@ -227,12 +230,16 @@ class PostProcessing(RunEnvironment): ...@@ -227,12 +230,16 @@ class PostProcessing(RunEnvironment):
# calc mse for each block (single station) # calc mse for each block (single station)
mse = errors.resample(indexer={index_dim: block_length}).mean(skipna=True) mse = errors.resample(indexer={index_dim: block_length}).mean(skipna=True)
collector.append(mse.assign_coords({coll_dim: station})) collector.append(mse.assign_coords({coll_dim: station}))
# combine all mse blocks
mse_blocks_per_station = xr.concat(collector, dim=coll_dim)
# calc mse for each block (average over all stations) # calc mse for each block (average over all stations)
mse_blocks = xr.concat(collector, dim=coll_dim).mean(dim=coll_dim, skipna=True) mse_blocks = mse_blocks_per_station.mean(dim=coll_dim, skipna=True)
# average also on ahead steps # average also on ahead steps
if separate_ahead is False: if separate_ahead is False:
mse_blocks = mse_blocks.mean(dim=self.ahead_dim, skipna=True) mse_blocks = mse_blocks.mean(dim=self.ahead_dim, skipna=True)
return mse_blocks mse_blocks_per_station = mse_blocks_per_station.mean(dim=self.ahead_dim, skipna=True)
return mse_blocks, mse_blocks_per_station
def create_error_array(self, data): def create_error_array(self, data):
"""Calculate squared error of all given time series in relation to observation.""" """Calculate squared error of all given time series in relation to observation."""
......
...@@ -315,10 +315,12 @@ class TestIndexOfAgreement: ...@@ -315,10 +315,12 @@ class TestIndexOfAgreement:
coords = {'index': range(shape[0]), 'value': range(shape[1])} coords = {'index': range(shape[0]), 'value': range(shape[1])}
x_array1 = xr.DataArray(d1, coords=coords, dims=coords.keys()) x_array1 = xr.DataArray(d1, coords=coords, dims=coords.keys())
x_array2 = xr.DataArray(d2, coords=coords, dims=coords.keys()) x_array2 = xr.DataArray(d2, coords=coords, dims=coords.keys())
expected = xr.DataArray(np.array([1, 1, 1]), coords={"index": [0, 1, 2]}, dims=["index"])
res = index_of_agreement(x_array1, x_array1, dim="value") res = index_of_agreement(x_array1, x_array1, dim="value")
assert np.testing.assert_almost_equal(res, 1) is None assert xr.testing.assert_equal(res, expected) is None
expected = xr.DataArray(np.array([0.8478, 0.9333, 0.9629]), coords={"index": [0, 1, 2]}, dims=["index"])
res = index_of_agreement(x_array1, x_array2, dim="value") res = index_of_agreement(x_array1, x_array2, dim="value")
assert np.testing.assert_almost_equal(res, 0.919, 2) is None assert xr.testing.assert_allclose(res, expected, atol=10**-2) is None
class TestCalculateErrorMetrics: class TestCalculateErrorMetrics:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment