Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
MLAir
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
esde
machine-learning
MLAir
Merge requests
!25
fixed bug: make prediction with correct dims
Code
Review changes
Check out branch
Download
Patches
Plain diff
Expand sidebar
Merged
fixed bug: make prediction with correct dims
lukas_issue006_bug_make-prediction-correct-dims
into
develop
Overview
0
Commits
2
Pipelines
1
Changes
3
Merged
Ghost User
requested to merge
lukas_issue006_bug_make-prediction-correct-dims
into
develop
5 years ago
Overview
0
Commits
2
Pipelines
1
Changes
3
0
0
Merge request reports
Compare
develop
develop (base)
and
latest version
latest version
bf0f2d3c
2 commits,
5 years ago
3 files
+
105
−
3
Inline
Compare changes
Side-by-side
Inline
Show whitespace changes
Show one file at a time
Files
3
src/model_modules/model_class.py
+
85
−
0
View file @ bf0f2d3c
Edit in single-file editor
Open in Web IDE
Show full file
@@ -154,3 +154,88 @@ class MyLittleModel(AbstractModelClass):
"""
self
.
loss
=
keras
.
losses
.
mean_squared_error
class
MyBranchedModel
(
AbstractModelClass
):
"""
A customised model
with a 1x1 Conv, and 4 Dense layers (64, 32, 16, window_lead_time), where the last layer is the
output layer depending on the window_lead_time parameter. Dropout is used between the Convolution and the first
Dense layer.
"""
def
__init__
(
self
,
window_history_size
,
window_lead_time
,
channels
):
"""
Sets model and loss depending on the given arguments.
:param activation: activation function
:param window_history_size: number of historical time steps included in the input data
:param channels: number of variables used in input data
:param regularizer: <not used here>
:param dropout_rate: dropout rate used in the model [0, 1)
:param window_lead_time: number of time steps to forecast in the output layer
"""
super
().
__init__
()
# settings
self
.
window_history_size
=
window_history_size
self
.
window_lead_time
=
window_lead_time
self
.
channels
=
channels
self
.
dropout_rate
=
0.1
self
.
regularizer
=
keras
.
regularizers
.
l2
(
0.1
)
self
.
initial_lr
=
1e-2
self
.
optimizer
=
keras
.
optimizers
.
SGD
(
lr
=
self
.
initial_lr
,
momentum
=
0.9
)
self
.
lr_decay
=
helpers
.
LearningRateDecay
(
base_lr
=
self
.
initial_lr
,
drop
=
.
94
,
epochs_drop
=
10
)
self
.
epochs
=
2
self
.
batch_size
=
int
(
256
)
self
.
activation
=
keras
.
layers
.
PReLU
# apply to model
self
.
set_model
()
self
.
set_loss
()
def
set_model
(
self
):
"""
Build the model.
:param activation: activation function
:param window_history_size: number of historical time steps included in the input data
:param channels: number of variables used in input data
:param dropout_rate: dropout rate used in the model [0, 1)
:param window_lead_time: number of time steps to forecast in the output layer
:return: built keras model
"""
# add 1 to window_size to include current time step t0
x_input
=
keras
.
layers
.
Input
(
shape
=
(
self
.
window_history_size
+
1
,
1
,
self
.
channels
))
x_in
=
keras
.
layers
.
Conv2D
(
32
,
(
1
,
1
),
padding
=
'
same
'
,
name
=
'
{}_Conv_1x1
'
.
format
(
"
major
"
))(
x_input
)
x_in
=
self
.
activation
(
name
=
'
{}_conv_act
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
keras
.
layers
.
Flatten
(
name
=
'
{}
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
keras
.
layers
.
Dropout
(
self
.
dropout_rate
,
name
=
'
{}_Dropout_1
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
keras
.
layers
.
Dense
(
64
,
name
=
'
{}_Dense_64
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
self
.
activation
()(
x_in
)
out_minor_1
=
keras
.
layers
.
Dense
(
self
.
window_lead_time
,
name
=
'
{}_Dense
'
.
format
(
"
minor_1
"
))(
x_in
)
out_minor_1
=
self
.
activation
()(
out_minor_1
)
x_in
=
keras
.
layers
.
Dense
(
32
,
name
=
'
{}_Dense_32
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
self
.
activation
()(
x_in
)
out_minor_2
=
keras
.
layers
.
Dense
(
self
.
window_lead_time
,
name
=
'
{}_Dense
'
.
format
(
"
minor_2
"
))(
x_in
)
out_minor_2
=
self
.
activation
()(
out_minor_2
)
x_in
=
keras
.
layers
.
Dense
(
16
,
name
=
'
{}_Dense_16
'
.
format
(
"
major
"
))(
x_in
)
x_in
=
self
.
activation
()(
x_in
)
x_in
=
keras
.
layers
.
Dense
(
self
.
window_lead_time
,
name
=
'
{}_Dense
'
.
format
(
"
major
"
))(
x_in
)
out_main
=
self
.
activation
()(
x_in
)
self
.
model
=
keras
.
Model
(
inputs
=
x_input
,
outputs
=
[
out_minor_1
,
out_minor_2
,
out_main
])
def
set_loss
(
self
):
"""
Set the loss
:return: loss function
"""
self
.
loss
=
[
keras
.
losses
.
mean_absolute_error
]
+
[
keras
.
losses
.
mean_squared_error
]
+
\
[
keras
.
losses
.
mean_squared_error
]
Loading