Skip to content
Snippets Groups Projects
Commit e5616b9b authored by lukas leufen's avatar lukas leufen
Browse files

simple test for l_p_loss

parent feb1aa56
No related branches found
No related tags found
2 merge requests!9new version v0.2.0,!7l_p_loss and lrdecay implementation
Pipeline #25704 passed
...@@ -14,12 +14,10 @@ def to_list(arg): ...@@ -14,12 +14,10 @@ def to_list(arg):
return arg return arg
class Loss: def l_p_loss(power):
def loss(y_true, y_pred):
def l_p_loss(self, power): return K.mean(K.pow(K.abs(y_pred - y_true), power), axis=-1)
def loss(y_true, y_pred): return loss
return K.mean(K.pow(K.abs(y_pred - y_true), power), axis=-1)
return loss
class lrDecay(keras.callbacks.History): class lrDecay(keras.callbacks.History):
......
import pytest
from src.helpers import l_p_loss
import logging
import os
import keras
import keras.backend as K
import numpy as np
class TestLoss:
def test_l_p_loss(self):
model = keras.Sequential()
model.add(keras.layers.Lambda(lambda x: x, input_shape=(None, )))
model.compile(optimizer=keras.optimizers.Adam(), loss=l_p_loss(2))
hist = model.fit(np.array([1, 0]), np.array([1, 1]), epochs=1)
assert hist.history['loss'][0] == 0.5
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment