Skip to content
Snippets Groups Projects
Select Git revision
  • f15583ba658c0ee2e5d07c8bf4670a7e5bae9f80
  • main default protected
  • index_out_issue
  • 3.2
  • 0.3.1
  • 0.2.11
  • 0.2.10
  • 0.2.9
  • 0.2.8
  • 0.2.7
  • 0.2.6
  • 0.2.5
  • 0.2.4
  • 0.2.2
  • 0.2.1
  • 0.1.23
  • 0.1.22
  • 0.1.21
  • 0.1.20
  • 0.1.19
  • 0.1.18
  • 0.1.17
  • 0.1.16
23 results

test_arc.py

Blame
  • flatten.py 4.25 KiB
    __author__ = "Felix Kleinert, Lukas Leufen"
    __date__ = '2019-12-02'
    
    from typing import Union, Callable
    
    import keras
    
    
    def get_activation(input_to_activate: keras.layers, activation: Union[Callable, str], **kwargs):
        """
        Apply activation on a given input layer.
    
        This helper function is able to handle advanced keras activations as well as strings for standard activations.
    
        :param input_to_activate: keras layer to apply activation on
        :param activation: activation to apply on `input_to_activate'. Can be a standard keras strings or activation layers
        :param kwargs: keyword arguments used inside activation layer
    
        :return: activation
    
        .. code-block:: python
    
            input_x = ... # your input data
            x_in = keras.layer(<without activation>)(input_x)
    
            # get activation via string
            x_act_string = get_activation(x_in, 'relu')
            # or get activation via layer callable
            x_act_layer = get_activation(x_in, keras.layers.advanced_activations.ELU)
    
        """
        if isinstance(activation, str):
            name = kwargs.pop('name', None)
            kwargs['name'] = f'{name}_{activation}'
            act = keras.layers.Activation(activation, **kwargs)(input_to_activate)
        else:
            act = activation(**kwargs)(input_to_activate)
        return act
    
    
    def flatten_tail(input_x: keras.layers, inner_neurons: int, activation: Union[Callable, str],
                     output_neurons: int, output_activation: Union[Callable, str],
                     reduction_filter: int = None,
                     name: str = None,
                     bound_weight: bool = False,
                     dropout_rate: float = None,
                     kernel_regularizer: keras.regularizers = None
                     ):
        """
        Flatten output of convolutional layers.
    
        :param input_x: Multidimensional keras layer (ConvLayer)
        :param output_neurons: Number of neurons in the last layer (must fit the shape of labels)
        :param output_activation: final activation function
        :param name: Name of the flatten tail.
        :param bound_weight: Use `tanh' as inner activation if set to True, otherwise `activation'
        :param dropout_rate: Dropout rate to be applied between trainable layers
        :param activation: activation to after conv and dense layers
        :param reduction_filter: number of filters used for information compression on `input_x' before flatten()
        :param inner_neurons: Number of neurons in inner dense layer
        :param kernel_regularizer: regularizer to apply on conv and dense layers
    
        :return: flatten branch with size n=output_neurons
    
        .. code-block:: python
    
            input_x = ... # your input data
            conv_out = Conv2D(*args)(input_x) # your convolution stack
            out = flatten_tail(conv_out, inner_neurons=64, activation=keras.layers.advanced_activations.ELU,
                               output_neurons=4
                               output_activation='linear', reduction_filter=64,
                               name='Main', bound_weight=False, dropout_rate=.3,
                               kernel_regularizer=keras.regularizers.l2()
                               )
            model = keras.Model(inputs=input_x, outputs=[out])
    
        """
        # compression layer
        if reduction_filter is None:
            x_in = input_x
        else:
            x_in = keras.layers.Conv2D(reduction_filter, (1, 1), name=f'{name}_Conv_1x1',
                                       kernel_regularizer=kernel_regularizer)(input_x)
            x_in = get_activation(x_in, activation, name=f'{name}_conv_act')
    
        x_in = keras.layers.Flatten(name='{}'.format(name))(x_in)
    
        if dropout_rate is not None:
            x_in = keras.layers.Dropout(dropout_rate, name=f'{name}_Dropout_1')(x_in)
        x_in = keras.layers.Dense(inner_neurons, kernel_regularizer=kernel_regularizer,
                                  name=f'{name}_inner_Dense')(x_in)
        if bound_weight:
            x_in = keras.layers.Activation('tanh')(x_in)
        else:
            x_in = get_activation(x_in, activation, name=f'{name}_act')
    
        if dropout_rate is not None:
            x_in = keras.layers.Dropout(dropout_rate, name='{}_Dropout_2'.format(name))(x_in)
        out = keras.layers.Dense(output_neurons, kernel_regularizer=kernel_regularizer,
                                 name=f'{name}_out_Dense')(x_in)
        out = get_activation(out, output_activation, name=f'{name}_final_act')
        return out