From 7376d0c9d2fadaaa926e23f6120b34315a826d4f Mon Sep 17 00:00:00 2001 From: leufen1 <l.leufen@fz-juelich.de> Date: Mon, 15 Mar 2021 12:52:16 +0100 Subject: [PATCH] use he init when using relu activations --- mlair/model_modules/fully_connected_networks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mlair/model_modules/fully_connected_networks.py b/mlair/model_modules/fully_connected_networks.py index 007b8f0d..7108d9a3 100644 --- a/mlair/model_modules/fully_connected_networks.py +++ b/mlair/model_modules/fully_connected_networks.py @@ -67,7 +67,8 @@ class FCN(AbstractModelClass): "sigmoid": partial(keras.layers.Activation, "sigmoid"), "linear": partial(keras.layers.Activation, "linear"), "selu": partial(keras.layers.Activation, "selu")} - _initializer = {"selu": keras.initializers.lecun_normal()} + _initializer = {"tanh": "glorot_uniform", "sigmoid": "glorot_uniform", "linear": "glorot_uniform", + "relu": keras.initializers.he_normal(), "selu": keras.initializers.lecun_normal()} _optimizer = {"adam": keras.optimizers.adam, "sgd": keras.optimizers.SGD} _regularizer = {"l1": keras.regularizers.l1, "l2": keras.regularizers.l2, "l1_l2": keras.regularizers.l1_l2} _requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov", "l1", "l2"] -- GitLab