Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
MLAir
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
esde
machine-learning
MLAir
Commits
e7a4586d
Commit
e7a4586d
authored
5 years ago
by
lukas leufen
Browse files
Options
Downloads
Patches
Plain Diff
29added tests for AbstractModelClass, /close
#29
parent
763ddcb0
No related branches found
No related tags found
2 merge requests
!24
include recent development
,
!22
model class
Pipeline
#27449
passed
5 years ago
Stage: test
Stage: pages
Stage: deploy
Changes
2
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
src/model_modules/model_class.py
+29
-15
29 additions, 15 deletions
src/model_modules/model_class.py
test/test_model_modules/test_model_class.py
+29
-0
29 additions, 0 deletions
test/test_model_modules/test_model_class.py
with
58 additions
and
15 deletions
src/model_modules/model_class.py
+
29
−
15
View file @
e7a4586d
...
...
@@ -18,13 +18,16 @@ class AbstractModelClass(ABC):
"""
def
__init__
(
self
)
->
None
:
"""
Predefine internal attributes for model and loss.
"""
self
.
_model
=
None
self
.
_loss
=
None
def
__getattr__
(
self
,
name
:
str
)
->
Any
:
"""
Is called if __getattribute__ is not able to find requested attribute. Normally, the model class is saved into
a variable like `model = ModelClass()`. To bypass a call like `model.model` to access the _model attribute,
...
...
@@ -34,18 +37,22 @@ class AbstractModelClass(ABC):
:param name: name of the attribute or method to call
:return: attribute or method from self.model namespace
"""
return
self
.
model
.
__getattribute__
(
name
)
@property
def
model
(
self
)
->
keras
.
Model
:
"""
The model property containing a keras.Model instance.
:return: the keras model
"""
return
self
.
_model
@property
def
loss
(
self
)
->
Callable
:
"""
The loss property containing a callable loss function. The loss function can be any keras loss or a customised
function. If the loss is a customised function, it must contain the internal loss(y_true, y_pred) function:
...
...
@@ -55,6 +62,7 @@ class AbstractModelClass(ABC):
return loss
:return: the loss function
"""
return
self
.
_loss
...
...
@@ -67,6 +75,7 @@ class MyLittleModel(AbstractModelClass):
"""
def
__init__
(
self
,
activation
,
window_history_size
,
channels
,
regularizer
,
dropout_rate
,
window_lead_time
):
"""
Sets model and loss depending on the given arguments.
:param activation: activation function
...
...
@@ -76,11 +85,13 @@ class MyLittleModel(AbstractModelClass):
:param dropout_rate: dropout rate used in the model [0, 1)
:param window_lead_time: number of time steps to forecast in the output layer
"""
super
().
__init__
()
self
.
set_model
(
activation
,
window_history_size
,
channels
,
dropout_rate
,
window_lead_time
)
self
.
set_loss
()
def
set_model
(
self
,
activation
,
window_history_size
,
channels
,
dropout_rate
,
window_lead_time
):
"""
Build the model.
:param activation: activation function
...
...
@@ -90,25 +101,28 @@ class MyLittleModel(AbstractModelClass):
:param window_lead_time: number of time steps to forecast in the output layer
:return: built keras model
"""
X_input
=
keras
.
layers
.
Input
(
shape
=
(
window_history_size
+
1
,
1
,
channels
))
# add 1 to window_size to include current time step t0
X_in
=
keras
.
layers
.
Conv2D
(
32
,
(
1
,
1
),
padding
=
'
same
'
,
name
=
'
{}_Conv_1x1
'
.
format
(
"
major
"
))(
X_input
)
X_in
=
activation
(
name
=
'
{}_conv_act
'
.
format
(
"
major
"
))(
X_in
)
X_in
=
keras
.
layers
.
Flatten
(
name
=
'
{}
'
.
format
(
"
major
"
))(
X_in
)
X_in
=
keras
.
layers
.
Dropout
(
dropout_rate
,
name
=
'
{}_Dropout_1
'
.
format
(
"
major
"
))(
X_in
)
X_in
=
keras
.
layers
.
Dense
(
64
,
name
=
'
{}_Dense_64
'
.
format
(
"
major
"
))(
X_in
)
X_in
=
activation
()(
X_in
)
X_in
=
keras
.
layers
.
Dense
(
32
,
name
=
'
{}_Dense_32
'
.
format
(
"
major
"
))(
X_in
)
X_in
=
activation
()(
X_in
)
X_in
=
keras
.
layers
.
Dense
(
16
,
name
=
'
{}_Dense_16
'
.
format
(
"
major
"
))(
X_in
)
X_in
=
activation
()(
X_in
)
X_in
=
keras
.
layers
.
Dense
(
window_lead_time
,
name
=
'
{}_Dense
'
.
format
(
"
major
"
))(
X_in
)
out_main
=
activation
()(
X_in
)
self
.
_model
=
keras
.
Model
(
inputs
=
X_input
,
outputs
=
[
out_main
])
# add 1 to window_size to include current time step t0
x_input
=
keras
.
layers
.
Input
(
shape
=
(
window_history_size
+
1
,
1
,
channels
))
x_in
=
keras
.
layers
.
Conv2D
(
32
,
(
1
,
1
),
padding
=
'
same
'
,
name
=
'
{}_Conv_1x1
'
.
format
(
"
major
"
))(
x_input
)
x_in
=
activation
(
name
=
'
{}_conv_act
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
keras
.
layers
.
Flatten
(
name
=
'
{}
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
keras
.
layers
.
Dropout
(
dropout_rate
,
name
=
'
{}_Dropout_1
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
keras
.
layers
.
Dense
(
64
,
name
=
'
{}_Dense_64
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
activation
()(
x_in
)
x_in
=
keras
.
layers
.
Dense
(
32
,
name
=
'
{}_Dense_32
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
activation
()(
x_in
)
x_in
=
keras
.
layers
.
Dense
(
16
,
name
=
'
{}_Dense_16
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
activation
()(
x_in
)
x_in
=
keras
.
layers
.
Dense
(
window_lead_time
,
name
=
'
{}_Dense
'
.
format
(
"
major
"
))(
x_in
)
out_main
=
activation
()(
x_in
)
self
.
_model
=
keras
.
Model
(
inputs
=
x_input
,
outputs
=
[
out_main
])
def
set_loss
(
self
):
"""
Set the loss
:return: loss function
"""
self
.
_loss
=
keras
.
losses
.
mean_squared_error
self
.
_loss
=
keras
.
losses
.
mean_squared_error
This diff is collapsed.
Click to expand it.
test/test_model_modules/test_model_class.py
0 → 100644
+
29
−
0
View file @
e7a4586d
import
pytest
import
keras
from
src.model_modules.model_class
import
AbstractModelClass
class
TestAbstractModelClass
:
@pytest.fixture
def
amc
(
self
):
return
AbstractModelClass
()
def
test_init
(
self
,
amc
):
assert
amc
.
_model
is
None
assert
amc
.
_loss
is
None
def
test_model_property
(
self
,
amc
):
amc
.
_model
=
keras
.
Model
()
assert
isinstance
(
amc
.
model
,
keras
.
Model
)
is
True
def
test_loss_property
(
self
,
amc
):
amc
.
_loss
=
keras
.
losses
.
mean_absolute_error
assert
amc
.
loss
==
keras
.
losses
.
mean_absolute_error
def
test_getattr
(
self
,
amc
):
amc
.
_model
=
keras
.
Model
()
assert
hasattr
(
amc
,
"
compile
"
)
is
True
assert
hasattr
(
amc
.
model
,
"
compile
"
)
is
True
assert
amc
.
compile
==
amc
.
model
.
compile
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment