Skip to content
Snippets Groups Projects
Commit e7a4586d authored by lukas leufen's avatar lukas leufen
Browse files

29added tests for AbstractModelClass, /close #29

parent 763ddcb0
No related branches found
No related tags found
2 merge requests!24include recent development,!22model class
Pipeline #27449 passed
......@@ -18,13 +18,16 @@ class AbstractModelClass(ABC):
"""
def __init__(self) -> None:
"""
Predefine internal attributes for model and loss.
"""
self._model = None
self._loss = None
def __getattr__(self, name: str) -> Any:
"""
Is called if __getattribute__ is not able to find requested attribute. Normally, the model class is saved into
a variable like `model = ModelClass()`. To bypass a call like `model.model` to access the _model attribute,
......@@ -34,18 +37,22 @@ class AbstractModelClass(ABC):
:param name: name of the attribute or method to call
:return: attribute or method from self.model namespace
"""
return self.model.__getattribute__(name)
@property
def model(self) -> keras.Model:
"""
The model property containing a keras.Model instance.
:return: the keras model
"""
return self._model
@property
def loss(self) -> Callable:
"""
The loss property containing a callable loss function. The loss function can be any keras loss or a customised
function. If the loss is a customised function, it must contain the internal loss(y_true, y_pred) function:
......@@ -55,6 +62,7 @@ class AbstractModelClass(ABC):
return loss
:return: the loss function
"""
return self._loss
......@@ -67,6 +75,7 @@ class MyLittleModel(AbstractModelClass):
"""
def __init__(self, activation, window_history_size, channels, regularizer, dropout_rate, window_lead_time):
"""
Sets model and loss depending on the given arguments.
:param activation: activation function
......@@ -76,11 +85,13 @@ class MyLittleModel(AbstractModelClass):
:param dropout_rate: dropout rate used in the model [0, 1)
:param window_lead_time: number of time steps to forecast in the output layer
"""
super().__init__()
self.set_model(activation, window_history_size, channels, dropout_rate, window_lead_time)
self.set_loss()
def set_model(self, activation, window_history_size, channels, dropout_rate, window_lead_time):
"""
Build the model.
:param activation: activation function
......@@ -90,25 +101,28 @@ class MyLittleModel(AbstractModelClass):
:param window_lead_time: number of time steps to forecast in the output layer
:return: built keras model
"""
X_input = keras.layers.Input(shape=(window_history_size + 1, 1, channels)) # add 1 to window_size to include current time step t0
X_in = keras.layers.Conv2D(32, (1, 1), padding='same', name='{}_Conv_1x1'.format("major"))(X_input)
X_in = activation(name='{}_conv_act'.format("major"))(X_in)
X_in = keras.layers.Flatten(name='{}'.format("major"))(X_in)
X_in = keras.layers.Dropout(dropout_rate, name='{}_Dropout_1'.format("major"))(X_in)
X_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(X_in)
X_in = activation()(X_in)
X_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(X_in)
X_in = activation()(X_in)
X_in = keras.layers.Dense(16, name='{}_Dense_16'.format("major"))(X_in)
X_in = activation()(X_in)
X_in = keras.layers.Dense(window_lead_time, name='{}_Dense'.format("major"))(X_in)
out_main = activation()(X_in)
self._model = keras.Model(inputs=X_input, outputs=[out_main])
# add 1 to window_size to include current time step t0
x_input = keras.layers.Input(shape=(window_history_size + 1, 1, channels))
x_in = keras.layers.Conv2D(32, (1, 1), padding='same', name='{}_Conv_1x1'.format("major"))(x_input)
x_in = activation(name='{}_conv_act'.format("major"))(x_in)
x_in = keras.layers.Flatten(name='{}'.format("major"))(x_in)
x_in = keras.layers.Dropout(dropout_rate, name='{}_Dropout_1'.format("major"))(x_in)
x_in = keras.layers.Dense(64, name='{}_Dense_64'.format("major"))(x_in)
x_in = activation()(x_in)
x_in = keras.layers.Dense(32, name='{}_Dense_32'.format("major"))(x_in)
x_in = activation()(x_in)
x_in = keras.layers.Dense(16, name='{}_Dense_16'.format("major"))(x_in)
x_in = activation()(x_in)
x_in = keras.layers.Dense(window_lead_time, name='{}_Dense'.format("major"))(x_in)
out_main = activation()(x_in)
self._model = keras.Model(inputs=x_input, outputs=[out_main])
def set_loss(self):
"""
Set the loss
:return: loss function
"""
self._loss = keras.losses.mean_squared_error
self._loss = keras.losses.mean_squared_error
import pytest
import keras
from src.model_modules.model_class import AbstractModelClass
class TestAbstractModelClass:
@pytest.fixture
def amc(self):
return AbstractModelClass()
def test_init(self, amc):
assert amc._model is None
assert amc._loss is None
def test_model_property(self, amc):
amc._model = keras.Model()
assert isinstance(amc.model, keras.Model) is True
def test_loss_property(self, amc):
amc._loss = keras.losses.mean_absolute_error
assert amc.loss == keras.losses.mean_absolute_error
def test_getattr(self, amc):
amc._model = keras.Model()
assert hasattr(amc, "compile") is True
assert hasattr(amc.model, "compile") is True
assert amc.compile == amc.model.compile
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment