diff --git a/mlair/model_modules/residual_networks.py b/mlair/model_modules/residual_networks.py
new file mode 100644
index 0000000000000000000000000000000000000000..5542c1da4820d3869e5637a1debc68812d6b671f
--- /dev/null
+++ b/mlair/model_modules/residual_networks.py
@@ -0,0 +1,86 @@
+__author__ = "Lukas Leufen"
+__date__ = "2021-08-23"
+
+
+from mlair.model_modules.branched_input_networks import BranchedInputCNN
+
+import tensorflow.keras as keras
+
+
+class BranchedInputResNet(BranchedInputCNN):
+    """
+    A convolutional neural network with multiple input branches and residual blocks (skip connections).
+
+    ```python
+    input_shape = [(65,1,9)]
+    output_shape = [(4, )]
+
+    # model
+    layer_configuration=[
+        {"type": "Conv2D", "activation": "relu", "kernel_size": (7, 1), "filters": 32, "padding": "same"},
+        {"type": "MaxPooling2D", "pool_size": (2, 1), "strides": (2, 1)},
+        {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 32, "strides": (1, 1), "kernel_regularizer": "l2"},
+        {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 32, "strides": (1, 1), "kernel_regularizer": "l2"},
+        {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 64, "strides": (1, 1), "kernel_regularizer": "l2", "use_1x1conv": True},
+        {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 64, "strides": (1, 1), "kernel_regularizer": "l2"},
+        {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 128, "strides": (1, 1), "kernel_regularizer": "l2", "use_1x1conv": True},
+        {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 128, "strides": (1, 1), "kernel_regularizer": "l2"},
+        {"type": "MaxPooling2D", "pool_size": (2, 1), "strides": (2, 1)},
+        {"type": "Dropout", "rate": 0.25},
+        {"type": "Flatten"},
+        {"type": "Concatenate"},
+        {"type": "Dense", "units": 128, "activation": "relu"}
+    ]
+
+    model = BranchedInputResNet(input_shape, output_shape, layer_configuration)
+    ```
+
+    """
+
+    def __init__(self, input_shape: list, output_shape: list, layer_configuration: list, optimizer="adam", **kwargs):
+
+        super().__init__(input_shape, output_shape, layer_configuration, optimizer=optimizer, **kwargs)
+
+    @staticmethod
+    def residual_block(**layer_kwargs):
+        layer_name = layer_kwargs.pop("name").split("_")
+        layer_name = "_".join([*layer_name[0:2], "%s", *layer_name[2:]])
+        act = layer_kwargs.pop("activation")
+        act_name = act.__name__
+        use_1x1conv = layer_kwargs.pop("use_1x1conv", False)
+
+        def block(x):
+            layer_kwargs.update({"strides": 2 if use_1x1conv else 1})
+            y = keras.layers.Conv2D(**layer_kwargs, padding='same', name=layer_name % "Conv1")(x)
+            y = act(name=layer_name % f"{act_name}1")(y)
+            layer_kwargs.update({"strides": 1})
+            y = keras.layers.Conv2D(**layer_kwargs, padding='same', name=layer_name % "Conv2")(y)
+            y = keras.layers.BatchNormalization(name=layer_name % "BN2")(y)
+            if use_1x1conv is True:
+                layer_kwargs.update({"strides": 2})
+                layer_kwargs.update({"kernel_size": 1})
+                x = keras.layers.Conv2D(**layer_kwargs, padding='same', name=layer_name % "Conv1x1")(x)
+            out = keras.layers.Add(name=layer_name % "Add")([x, y])
+            out = act(name=layer_name % f"{act_name}2")(out)
+            return out
+        return block
+
+    def _extract_layer_conf(self, layer_opts):
+        follow_up_layer = None
+        layer_type = layer_opts.pop("type")
+        activation_type = layer_opts.pop("activation", None)
+        if activation_type is not None:
+            activation = self._activation.get(activation_type)
+            kernel_initializer = self._initializer.get(activation_type, "glorot_uniform")
+            layer_opts["kernel_initializer"] = kernel_initializer
+            follow_up_layer = activation
+        regularizer_type = layer_opts.pop("kernel_regularizer", None)
+        if regularizer_type is not None:
+            layer_opts["kernel_regularizer"] = self._set_regularizer(regularizer_type, **self.kwargs)
+        if layer_type.lower() == "residual_block":
+            layer = self.residual_block
+            layer_opts["activation"] = follow_up_layer
+            follow_up_layer = None
+        else:
+            layer = getattr(keras.layers, layer_type, None)
+        return layer, layer_opts, follow_up_layer