Skip to content
Snippets Groups Projects
Commit 7376d0c9 authored by leufen1's avatar leufen1
Browse files

use he init when using relu activations

parent 8b5f1346
No related branches found
No related tags found
8 merge requests!319add all changes of dev into release v1.4.0 branch,!318Resolve "release v1.4.0",!283Merge latest develop into falcos issue,!279include Develop,!278Felix issue295 transformation parameters in data handler,!275include lazy preprocessing,!273use he init when using relu activations,!259Draft: Resolve "WRF-Datahandler should inherit from SingleStationDatahandler"
Pipeline #62974 passed
......@@ -67,7 +67,8 @@ class FCN(AbstractModelClass):
"sigmoid": partial(keras.layers.Activation, "sigmoid"),
"linear": partial(keras.layers.Activation, "linear"),
"selu": partial(keras.layers.Activation, "selu")}
_initializer = {"selu": keras.initializers.lecun_normal()}
_initializer = {"tanh": "glorot_uniform", "sigmoid": "glorot_uniform", "linear": "glorot_uniform",
"relu": keras.initializers.he_normal(), "selu": keras.initializers.lecun_normal()}
_optimizer = {"adam": keras.optimizers.adam, "sgd": keras.optimizers.SGD}
_regularizer = {"l1": keras.regularizers.l1, "l2": keras.regularizers.l2, "l1_l2": keras.regularizers.l1_l2}
_requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov", "l1", "l2"]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment