Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
MLAir
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
esde
machine-learning
MLAir
Commits
573b3e19
Commit
573b3e19
authored
5 years ago
by
Felix Kleinert
Browse files
Options
Downloads
Patches
Plain Diff
update tests: check for BN layers; some standard keras namings seems to have some incons.
#62
parent
73a5fdd5
Branches
Branches containing commit
Tags
Tags containing commit
2 merge requests
!59
Develop
,
!53
Felix issue062 apply advanced pooling in inception block feat
Pipeline
#31047
passed
5 years ago
Stage: test
Stage: pages
Stage: deploy
Changes
1
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
test/test_model_modules/test_inception_model.py
+62
-2
62 additions, 2 deletions
test/test_model_modules/test_inception_model.py
with
62 additions
and
2 deletions
test/test_model_modules/test_inception_model.py
+
62
−
2
View file @
573b3e19
...
@@ -44,6 +44,7 @@ class TestInceptionModelBase:
...
@@ -44,6 +44,7 @@ class TestInceptionModelBase:
tower
=
base
.
create_conv_tower
(
**
opts
)
tower
=
base
.
create_conv_tower
(
**
opts
)
# check last element of tower (activation)
# check last element of tower (activation)
assert
base
.
part_of_block
==
1
assert
base
.
part_of_block
==
1
# assert tower.name == 'Block_0a_act_2_1/Relu:0'
assert
tower
.
name
==
'
Block_0a_act_2/Relu:0
'
assert
tower
.
name
==
'
Block_0a_act_2/Relu:0
'
act_layer
=
tower
.
_keras_history
[
0
]
act_layer
=
tower
.
_keras_history
[
0
]
assert
isinstance
(
act_layer
,
keras
.
layers
.
advanced_activations
.
ReLU
)
assert
isinstance
(
act_layer
,
keras
.
layers
.
advanced_activations
.
ReLU
)
...
@@ -74,6 +75,48 @@ class TestInceptionModelBase:
...
@@ -74,6 +75,48 @@ class TestInceptionModelBase:
assert
conv_layer2
.
name
==
'
Block_0a_1x1
'
assert
conv_layer2
.
name
==
'
Block_0a_1x1
'
assert
conv_layer2
.
input
.
_keras_shape
==
(
None
,
32
,
32
,
3
)
assert
conv_layer2
.
input
.
_keras_shape
==
(
None
,
32
,
32
,
3
)
def
test_create_conv_tower_3x3_batch_norm
(
self
,
base
,
input_x
):
# import keras
opts
=
{
'
input_x
'
:
input_x
,
'
reduction_filter
'
:
64
,
'
tower_filter
'
:
32
,
'
tower_kernel
'
:
(
3
,
3
),
'
padding
'
:
'
SymPad2D
'
,
'
batch_normalisation
'
:
True
}
tower
=
base
.
create_conv_tower
(
**
opts
)
# check last element of tower (activation)
assert
base
.
part_of_block
==
1
# assert tower.name == 'Block_0a_act_2/Relu:0'
assert
tower
.
name
==
'
Block_0a_act_2_1/Relu:0
'
act_layer
=
tower
.
_keras_history
[
0
]
assert
isinstance
(
act_layer
,
keras
.
layers
.
advanced_activations
.
ReLU
)
assert
act_layer
.
name
==
"
Block_0a_act_2
"
# check previous element of tower (batch_normal)
batch_layer
=
self
.
step_in
(
act_layer
)
assert
isinstance
(
batch_layer
,
keras
.
layers
.
BatchNormalization
)
assert
batch_layer
.
name
==
'
Block_0a_BN
'
# check previous element of tower (conv2D)
conv_layer
=
self
.
step_in
(
batch_layer
)
assert
isinstance
(
conv_layer
,
keras
.
layers
.
Conv2D
)
assert
conv_layer
.
filters
==
32
assert
conv_layer
.
padding
==
'
valid
'
assert
conv_layer
.
kernel_size
==
(
3
,
3
)
assert
conv_layer
.
strides
==
(
1
,
1
)
assert
conv_layer
.
name
==
"
Block_0a_3x3
"
# check previous element of tower (padding)
pad_layer
=
self
.
step_in
(
conv_layer
)
assert
isinstance
(
pad_layer
,
SymmetricPadding2D
)
assert
pad_layer
.
padding
==
((
1
,
1
),
(
1
,
1
))
assert
pad_layer
.
name
==
'
Block_0a_Pad
'
# check previous element of tower (activation)
act_layer2
=
self
.
step_in
(
pad_layer
)
assert
isinstance
(
act_layer2
,
keras
.
layers
.
advanced_activations
.
ReLU
)
assert
act_layer2
.
name
==
"
Block_0a_act_1
"
# check previous element of tower (conv2D)
conv_layer2
=
self
.
step_in
(
act_layer2
)
assert
isinstance
(
conv_layer2
,
keras
.
layers
.
Conv2D
)
assert
conv_layer2
.
filters
==
64
assert
conv_layer2
.
kernel_size
==
(
1
,
1
)
assert
conv_layer2
.
padding
==
'
valid
'
assert
conv_layer2
.
name
==
'
Block_0a_1x1
'
assert
conv_layer2
.
input
.
_keras_shape
==
(
None
,
32
,
32
,
3
)
def
test_create_conv_tower_3x3_activation
(
self
,
base
,
input_x
):
def
test_create_conv_tower_3x3_activation
(
self
,
base
,
input_x
):
# import keras
# import keras
opts
=
{
'
input_x
'
:
input_x
,
'
reduction_filter
'
:
64
,
'
tower_filter
'
:
32
,
'
tower_kernel
'
:
(
3
,
3
)}
opts
=
{
'
input_x
'
:
input_x
,
'
reduction_filter
'
:
64
,
'
tower_filter
'
:
32
,
'
tower_kernel
'
:
(
3
,
3
)}
...
@@ -96,7 +139,8 @@ class TestInceptionModelBase:
...
@@ -96,7 +139,8 @@ class TestInceptionModelBase:
tower
=
base
.
create_conv_tower
(
**
opts
)
tower
=
base
.
create_conv_tower
(
**
opts
)
# check last element of tower (activation)
# check last element of tower (activation)
assert
base
.
part_of_block
==
1
assert
base
.
part_of_block
==
1
assert
tower
.
name
==
'
Block_0a_act_1_1/Relu:0
'
assert
tower
.
name
==
'
Block_0a_act_1_2/Relu:0
'
# assert tower.name == 'Block_0a_act_1_1/Relu:0'
act_layer
=
tower
.
_keras_history
[
0
]
act_layer
=
tower
.
_keras_history
[
0
]
assert
isinstance
(
act_layer
,
keras
.
layers
.
advanced_activations
.
ReLU
)
assert
isinstance
(
act_layer
,
keras
.
layers
.
advanced_activations
.
ReLU
)
assert
act_layer
.
name
==
"
Block_0a_act_1
"
assert
act_layer
.
name
==
"
Block_0a_act_1
"
...
@@ -125,7 +169,7 @@ class TestInceptionModelBase:
...
@@ -125,7 +169,7 @@ class TestInceptionModelBase:
# check last element of tower (activation)
# check last element of tower (activation)
assert
base
.
part_of_block
==
1
assert
base
.
part_of_block
==
1
# assert tower.name == 'Block_0a_act_1/Relu:0'
# assert tower.name == 'Block_0a_act_1/Relu:0'
assert
tower
.
name
==
'
Block_0a_act_1_
3
/Relu:0
'
assert
tower
.
name
==
'
Block_0a_act_1_
4
/Relu:0
'
act_layer
=
tower
.
_keras_history
[
0
]
act_layer
=
tower
.
_keras_history
[
0
]
assert
isinstance
(
act_layer
,
keras
.
layers
.
advanced_activations
.
ReLU
)
assert
isinstance
(
act_layer
,
keras
.
layers
.
advanced_activations
.
ReLU
)
assert
act_layer
.
name
==
"
Block_0a_act_1
"
assert
act_layer
.
name
==
"
Block_0a_act_1
"
...
@@ -218,6 +262,22 @@ class TestInceptionModelBase:
...
@@ -218,6 +262,22 @@ class TestInceptionModelBase:
assert
self
.
step_in
(
block_pool
.
_keras_history
[
0
],
depth
=
3
).
name
==
'
Block_2c_Pad
'
assert
self
.
step_in
(
block_pool
.
_keras_history
[
0
],
depth
=
3
).
name
==
'
Block_2c_Pad
'
assert
isinstance
(
self
.
step_in
(
block_pool
.
_keras_history
[
0
],
depth
=
3
),
ReflectionPadding2D
)
assert
isinstance
(
self
.
step_in
(
block_pool
.
_keras_history
[
0
],
depth
=
3
),
ReflectionPadding2D
)
def
test_inception_block_invalid_batchnorm
(
self
,
base
,
input_x
):
conv
=
{
'
tower_1
'
:
{
'
reduction_filter
'
:
64
,
'
tower_kernel
'
:
(
3
,
3
),
'
tower_filter
'
:
64
,
},
'
tower_2
'
:
{
'
reduction_filter
'
:
64
,
'
tower_kernel
'
:
(
5
,
5
),
'
tower_filter
'
:
64
,
'
activation
'
:
'
tanh
'
,
'
padding
'
:
'
SymPad2D
'
,
},
}
pool
=
{
'
pool_kernel
'
:
(
3
,
3
),
'
tower_filter
'
:
64
,
'
padding
'
:
ReflectionPadding2D
,
'
max_pooling
'
:
'
yes
'
}
opts
=
{
'
input_x
'
:
input_x
,
'
tower_conv_parts
'
:
conv
,
'
tower_pool_parts
'
:
pool
,
}
with
pytest
.
raises
(
AttributeError
)
as
einfo
:
block
=
base
.
inception_block
(
**
opts
)
assert
"
max_pooling has to be either a bool or empty. Given was: yes
"
in
str
(
einfo
.
value
)
def
test_batch_normalisation
(
self
,
base
,
input_x
):
def
test_batch_normalisation
(
self
,
base
,
input_x
):
# import keras
# import keras
base
.
part_of_block
+=
1
base
.
part_of_block
+=
1
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment