Skip to content
Snippets Groups Projects

Resolve "release v2.3.0"

Merged Ghost User requested to merge release_v2.3.0 into master
2 files
+ 39
6
Compare changes
  • Side-by-side
  • Inline
Files
2
@@ -10,6 +10,7 @@ from mlair.helpers import select_from_dict, to_list
from mlair.model_modules.loss import var_loss
from mlair.model_modules.recurrent_networks import RNN
from mlair.model_modules.convolutional_networks import CNNfromConfig
from mlair.model_modules.residual_networks import ResNet
from mlair.model_modules.u_networks import UNet
@@ -522,3 +523,36 @@ class BranchedInputUNet(UNet, BranchedInputCNN): # pragma: no cover
print(self.model.summary())
class BranchedInputResNet(ResNet, BranchedInputCNN): # pragma: no cover
"""
A convolutional neural network with multiple input branches and residual blocks (skip connections).
```python
input_shape = [(65,1,9), (65,1,9)]
output_shape = [(4, )]
# model
layer_configuration=[
{"type": "Conv2D", "activation": "relu", "kernel_size": (7, 1), "filters": 32, "padding": "same"},
{"type": "MaxPooling2D", "pool_size": (2, 1), "strides": (2, 1)},
{"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 32, "strides": (1, 1), "kernel_regularizer": "l2"},
{"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 32, "strides": (1, 1), "kernel_regularizer": "l2"},
{"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 64, "strides": (1, 1), "kernel_regularizer": "l2", "use_1x1conv": True},
{"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 64, "strides": (1, 1), "kernel_regularizer": "l2"},
{"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 128, "strides": (1, 1), "kernel_regularizer": "l2", "use_1x1conv": True},
{"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 128, "strides": (1, 1), "kernel_regularizer": "l2"},
{"type": "MaxPooling2D", "pool_size": (2, 1), "strides": (2, 1)},
{"type": "Dropout", "rate": 0.25},
{"type": "Flatten"},
{"type": "Concatenate"},
{"type": "Dense", "units": 128, "activation": "relu"}
]
model = BranchedInputResNet(input_shape, output_shape, layer_configuration)
```
"""
def __init__(self, input_shape: list, output_shape: list, layer_configuration: list, optimizer="adam", **kwargs):
super().__init__(input_shape, output_shape, layer_configuration, optimizer=optimizer, **kwargs)
Loading