import keras
import numpy as np
import pytest

from src.helpers import l_p_loss
from src.model_modules.keras_extensions import *


class TestLearningRateDecay:

    def test_init(self):
        lr_decay = LearningRateDecay()
        assert lr_decay.lr == {'lr': []}
        assert lr_decay.base_lr == 0.01
        assert lr_decay.drop == 0.96
        assert lr_decay.epochs_drop == 8

    def test_check_param(self):
        lr_decay = object.__new__(LearningRateDecay)
        assert lr_decay.check_param(1, "tester") == 1
        assert lr_decay.check_param(0.5, "tester") == 0.5
        with pytest.raises(ValueError) as e:
            lr_decay.check_param(0, "tester")
        assert "tester is out of allowed range (0, 1]: tester=0" in e.value.args[0]
        with pytest.raises(ValueError) as e:
            lr_decay.check_param(1.5, "tester")
        assert "tester is out of allowed range (0, 1]: tester=1.5" in e.value.args[0]
        assert lr_decay.check_param(1.5, "tester", upper=None) == 1.5
        with pytest.raises(ValueError) as e:
            lr_decay.check_param(0, "tester", upper=None)
        assert "tester is out of allowed range (0, inf): tester=0" in e.value.args[0]
        assert lr_decay.check_param(0.5, "tester", lower=None) == 0.5
        with pytest.raises(ValueError) as e:
            lr_decay.check_param(0.5, "tester", lower=None, upper=0.2)
        assert "tester is out of allowed range (-inf, 0.2]: tester=0.5" in e.value.args[0]
        assert lr_decay.check_param(10, "tester", upper=None, lower=None)

    def test_on_epoch_begin(self):
        lr_decay = LearningRateDecay(base_lr=0.02, drop=0.95, epochs_drop=2)
        model = keras.Sequential()
        model.add(keras.layers.Dense(1, input_dim=1))
        model.compile(optimizer=keras.optimizers.Adam(), loss=l_p_loss(2))
        model.fit(np.array([1, 0, 2, 0.5]), np.array([1, 1, 0, 0.5]), epochs=5, callbacks=[lr_decay])
        assert lr_decay.lr['lr'] == [0.02, 0.02, 0.02 * 0.95, 0.02 * 0.95, 0.02 * 0.95 * 0.95]