diff --git a/mlair/model_modules/fully_connected_networks.py b/mlair/model_modules/fully_connected_networks.py
index 940c9846bbaff1a3e3664169cf46de4f177169bc..dbcd3a9f41ca1b9a7435be95b93eb40c2b37c5a0 100644
--- a/mlair/model_modules/fully_connected_networks.py
+++ b/mlair/model_modules/fully_connected_networks.py
@@ -63,12 +63,13 @@ class FCN(AbstractModelClass):
     """
 
     _activation = {"relu": keras.layers.ReLU, "tanh": partial(keras.layers.Activation, "tanh"),
-                   "sigmoid": partial(keras.layers.Activation, "sigmoid")}
+                   "sigmoid": partial(keras.layers.Activation, "sigmoid"),
+                   "linear": partial(keras.layers.Activation, "linear")}
     _optimizer = {"adam": keras.optimizers.adam, "sgd": keras.optimizers.SGD}
     _requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov"]
 
-    def __init__(self, input_shape: list, output_shape: list, activation="relu", optimizer="adam",
-                 n_layer=1, n_hidden=10, **kwargs):
+    def __init__(self, input_shape: list, output_shape: list, activation="relu", activation_output="linear",
+                 optimizer="adam", n_layer=1, n_hidden=10, **kwargs):
         """
         Sets model and loss depending on the given arguments.
 
@@ -82,6 +83,7 @@ class FCN(AbstractModelClass):
 
         # settings
         self.activation = self._set_activation(activation)
+        self.activation_output = self._set_activation(activation_output)
         self.optimizer = self._set_optimizer(optimizer, **kwargs)
         self.layer_configuration = (n_layer, n_hidden)
         self._update_model_name()
@@ -127,7 +129,7 @@ class FCN(AbstractModelClass):
             x_in = keras.layers.Dense(n_hidden)(x_in)
             x_in = self.activation()(x_in)
         x_in = keras.layers.Dense(self._output_shape)(x_in)
-        out = self.activation()(x_in)
+        out = self.activation_output()(x_in)
         self.model = keras.Model(inputs=x_input, outputs=[out])
 
     def set_compile_options(self):