Coverage for mlair/model_modules/residual_networks.py: 0%
2 statements
« prev ^ index » next coverage.py v6.4.2, created at 2022-12-02 15:24 +0000
« prev ^ index » next coverage.py v6.4.2, created at 2022-12-02 15:24 +0000
1__author__ = "Lukas Leufen"
2__date__ = "2022-08-23"
4from functools import partial
6from mlair.model_modules.convolutional_networks import CNNfromConfig
8import tensorflow.keras as keras
11class ResNet(CNNfromConfig): # pragma: no cover
12 """
13 A convolutional neural network with residual blocks (skip connections).
15 ```python
16 input_shape = [(65,1,9)]
17 output_shape = [(4, )]
19 # model
20 layer_configuration=[
21 {"type": "Conv2D", "activation": "relu", "kernel_size": (7, 1), "filters": 32, "padding": "same"},
22 {"type": "MaxPooling2D", "pool_size": (2, 1), "strides": (2, 1)},
23 {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 32, "strides": (1, 1), "kernel_regularizer": "l2"},
24 {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 32, "strides": (1, 1), "kernel_regularizer": "l2"},
25 {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 64, "strides": (1, 1), "kernel_regularizer": "l2", "use_1x1conv": True},
26 {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 64, "strides": (1, 1), "kernel_regularizer": "l2"},
27 {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 128, "strides": (1, 1), "kernel_regularizer": "l2", "use_1x1conv": True},
28 {"type": "residual_block", "activation": "relu", "kernel_size": (3, 1), "filters": 128, "strides": (1, 1), "kernel_regularizer": "l2"},
29 {"type": "MaxPooling2D", "pool_size": (2, 1), "strides": (2, 1)},
30 {"type": "Dropout", "rate": 0.25},
31 {"type": "Flatten"},
32 {"type": "Dense", "units": 128, "activation": "relu"}
33 ]
35 model = ResNet(input_shape, output_shape, layer_configuration)
36 ```
38 """
40 def __init__(self, input_shape: list, output_shape: list, layer_configuration: list, optimizer="adam", **kwargs):
42 super().__init__(input_shape, output_shape, layer_configuration, optimizer=optimizer, **kwargs)
44 @staticmethod
45 def residual_block(**layer_kwargs):
46 layer_name = layer_kwargs.pop("name").split("_")
47 layer_name = "_".join([*layer_name[0:2], "%s", *layer_name[2:]])
48 act = layer_kwargs.pop("activation")
49 if isinstance(act, partial):
50 act_name = act.args[0] if act.func.__name__ == "Activation" else act.func.__name__
51 else:
52 act_name = act.__name__
53 use_1x1conv = layer_kwargs.pop("use_1x1conv", False)
55 def block(x):
56 layer_kwargs.update({"strides": 2 if use_1x1conv else 1})
57 y = keras.layers.Conv2D(**layer_kwargs, padding='same', name=layer_name % "Conv1")(x)
58 y = act(name=layer_name % f"{act_name}1")(y)
59 layer_kwargs.update({"strides": 1})
60 y = keras.layers.Conv2D(**layer_kwargs, padding='same', name=layer_name % "Conv2")(y)
61 y = keras.layers.BatchNormalization(name=layer_name % "BN2")(y)
62 if use_1x1conv is True:
63 layer_kwargs.update({"strides": 2})
64 layer_kwargs.update({"kernel_size": 1})
65 x = keras.layers.Conv2D(**layer_kwargs, padding='same', name=layer_name % "Conv1x1")(x)
66 out = keras.layers.Add(name=layer_name % "Add")([x, y])
67 out = act(name=layer_name % f"{act_name}2")(out)
68 return out
69 return block
71 def _extract_layer_conf(self, layer_opts):
72 follow_up_layer = None
73 layer_type = layer_opts.pop("type")
74 activation_type = layer_opts.pop("activation", None)
75 if activation_type is not None:
76 activation = self._activation.get(activation_type)
77 kernel_initializer = self._initializer.get(activation_type, "glorot_uniform")
78 layer_opts["kernel_initializer"] = kernel_initializer
79 follow_up_layer = activation
80 if self.bn is True and layer_type.lower() != "residual_block":
81 another_layer = keras.layers.BatchNormalization
82 if activation_type in ["relu", "linear", "prelu", "leakyrelu"]:
83 follow_up_layer = (another_layer, follow_up_layer)
84 else:
85 follow_up_layer = (follow_up_layer, another_layer)
86 regularizer_type = layer_opts.pop("kernel_regularizer", None)
87 if regularizer_type is not None:
88 layer_opts["kernel_regularizer"] = self._set_regularizer(regularizer_type, **self.kwargs)
89 if layer_type.lower() == "residual_block":
90 layer = self.residual_block
91 layer_opts["activation"] = follow_up_layer
92 follow_up_layer = None
93 else:
94 layer = getattr(keras.layers, layer_type, None)
95 return layer, layer_opts, follow_up_layer