From c937d9be09a7c0ff7266005074b0a63f28c80d57 Mon Sep 17 00:00:00 2001 From: Felix Kleinert <f.kleinert@fz-juelich.de> Date: Tue, 28 Apr 2020 13:00:57 +0200 Subject: [PATCH] update docstrings --- src/model_modules/advanced_paddings.py | 22 ++++++++++++++++++---- src/model_modules/flatten.py | 11 +++++++++++ 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/src/model_modules/advanced_paddings.py b/src/model_modules/advanced_paddings.py index d9e55c78..ea16e5b8 100644 --- a/src/model_modules/advanced_paddings.py +++ b/src/model_modules/advanced_paddings.py @@ -254,10 +254,24 @@ class SymmetricPadding2D(_ZeroPadding): class Padding2D: - ''' - This class combines the implemented padding methods. You can call this method by defining a specific padding type. - The __call__ method will return the corresponding Padding layer. - ''' + """ + Combine all implemented padding methods. + + You can call this method by defining a specific padding type. The __call__ method will return the corresponding + Padding layer. + + .. code-block:: python + + input_x = ... # your input data + kernel_size = (5, 1) + padding_size = PadUtils.get_padding_for_same(kernel_size) + + tower = layers.Conv2D(...)(input_x) + tower = layers.Activation(...)(tower) + tower = Padding2D('ZeroPad2D')(padding=padding_size, name=f'Custom_Pad')(tower) + + Padding type can either be set by a string or directly by using an instance of a valid padding class. + """ allowed_paddings = { **dict.fromkeys(("RefPad2D", "ReflectionPadding2D"), ReflectionPadding2D), diff --git a/src/model_modules/flatten.py b/src/model_modules/flatten.py index efb0e977..e2dde448 100644 --- a/src/model_modules/flatten.py +++ b/src/model_modules/flatten.py @@ -47,6 +47,17 @@ def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Ca :param kernel_regularizer: :return: + + .. code-block:: python + + input_x = ... # your input data + conv_out = Conv2D(*args)(input_x) # your convolutional stack + out = flatten_tail(conv_out, inner_neurons=64, activation=keras.layers.advanced_activations.ELU, + output_neurons=4 + output_activation='linear', reduction_filter=64, + name='Main', bound_weight=False, dropout_rate=.3, + kernel_regularizer=keras.regularizers.l2() + ) """ # compression layer if reduction_filter is None: -- GitLab