diff --git a/mlair/model_modules/branched_input_networks.py b/mlair/model_modules/branched_input_networks.py index d078afd55aa8fcecc1f844542076befa048d415b..af3a8bffa3169556d55af94192915e3a27f89cc1 100644 --- a/mlair/model_modules/branched_input_networks.py +++ b/mlair/model_modules/branched_input_networks.py @@ -74,10 +74,13 @@ class BranchedInputCNN(CNNfromConfig): # pragma: no cover @staticmethod def _get_layer_name(layer: keras.layers, layer_kwargs: Union[dict, None], pos: int, branch: int = None): - name = layer.__name__ - if "Conv" in layer.__name__ and isinstance(layer_kwargs, dict) and "kernel_size" in layer_kwargs: + if isinstance(layer, partial): + name = layer.args[0] if layer.func.__name__ == "Activation" else layer.func.__name__ + else: + name = layer.__name__ + if "Conv" in name and isinstance(layer_kwargs, dict) and "kernel_size" in layer_kwargs: name = name + "_" + "x".join(map(str, layer_kwargs["kernel_size"])) - if "Pooling" in layer.__name__ and isinstance(layer_kwargs, dict) and "pool_size" in layer_kwargs: + if "Pooling" in name and isinstance(layer_kwargs, dict) and "pool_size" in layer_kwargs: name = name + "_" + "x".join(map(str, layer_kwargs["pool_size"])) if branch is not None: name += f"_branch{branch + 1}" diff --git a/mlair/model_modules/convolutional_networks.py b/mlair/model_modules/convolutional_networks.py index cd0e87d54995c70408f4c9fd57a95cf4e368632f..2270c1ee2abf8b17913e6017181cffcde17bd923 100644 --- a/mlair/model_modules/convolutional_networks.py +++ b/mlair/model_modules/convolutional_networks.py @@ -17,7 +17,7 @@ class CNNfromConfig(AbstractModelClass): "sigmoid": partial(keras.layers.Activation, "sigmoid"), "linear": partial(keras.layers.Activation, "linear"), "prelu": partial(keras.layers.PReLU, alpha_initializer=keras.initializers.constant(value=0.25)), - "leakyrelu": partial(keras.layers.LeakyReLU)} + "leakyrelu": keras.layers.LeakyReLU} _initializer = {"tanh": "glorot_uniform", "sigmoid": "glorot_uniform", "linear": "glorot_uniform", "relu": keras.initializers.he_normal(), "prelu": keras.initializers.he_normal()} _optimizer = {"adam": keras.optimizers.Adam, "sgd": keras.optimizers.SGD} @@ -99,10 +99,13 @@ class CNNfromConfig(AbstractModelClass): @staticmethod def _get_layer_name(layer: keras.layers, layer_kwargs: Union[dict, None], pos: int, *args): - name = layer.__name__ - if "Conv" in layer.__name__ and isinstance(layer_kwargs, dict) and "kernel_size" in layer_kwargs: + if isinstance(layer, partial): + name = layer.args[0] if layer.func.__name__ == "Activation" else layer.func.__name__ + else: + name = layer.__name__ + if "Conv" in name and isinstance(layer_kwargs, dict) and "kernel_size" in layer_kwargs: name = name + "_" + "x".join(map(str, layer_kwargs["kernel_size"])) - if "Pooling" in layer.__name__ and isinstance(layer_kwargs, dict) and "pool_size" in layer_kwargs: + if "Pooling" in name and isinstance(layer_kwargs, dict) and "pool_size" in layer_kwargs: name = name + "_" + "x".join(map(str, layer_kwargs["pool_size"])) name += f"_{pos + 1}" return name