Skip to content
Snippets Groups Projects

Resolve "release v1.4.0"

Merged Ghost User requested to merge release_v1.4.0 into master
3 files
+ 69
6
Compare changes
  • Side-by-side
  • Inline
Files
3
@@ -5,6 +5,7 @@ from functools import reduce, partial
@@ -5,6 +5,7 @@ from functools import reduce, partial
from mlair.model_modules import AbstractModelClass
from mlair.model_modules import AbstractModelClass
from mlair.helpers import select_from_dict
from mlair.helpers import select_from_dict
 
from mlair.model_modules.loss import var_loss, custom_loss
import keras
import keras
@@ -64,7 +65,9 @@ class FCN(AbstractModelClass):
@@ -64,7 +65,9 @@ class FCN(AbstractModelClass):
_activation = {"relu": keras.layers.ReLU, "tanh": partial(keras.layers.Activation, "tanh"),
_activation = {"relu": keras.layers.ReLU, "tanh": partial(keras.layers.Activation, "tanh"),
"sigmoid": partial(keras.layers.Activation, "sigmoid"),
"sigmoid": partial(keras.layers.Activation, "sigmoid"),
"linear": partial(keras.layers.Activation, "linear")}
"linear": partial(keras.layers.Activation, "linear"),
 
"selu": partial(keras.layers.Activation, "selu")}
 
_initializer = {"selu": keras.initializers.lecun_normal()}
_optimizer = {"adam": keras.optimizers.adam, "sgd": keras.optimizers.SGD}
_optimizer = {"adam": keras.optimizers.adam, "sgd": keras.optimizers.SGD}
_requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov"]
_requirements = ["lr", "beta_1", "beta_2", "epsilon", "decay", "amsgrad", "momentum", "nesterov"]
@@ -87,6 +90,7 @@ class FCN(AbstractModelClass):
@@ -87,6 +90,7 @@ class FCN(AbstractModelClass):
self.optimizer = self._set_optimizer(optimizer, **kwargs)
self.optimizer = self._set_optimizer(optimizer, **kwargs)
self.layer_configuration = (n_layer, n_hidden)
self.layer_configuration = (n_layer, n_hidden)
self._update_model_name()
self._update_model_name()
 
self.kernel_initializer = self._initializer.get(activation, "glorot_uniform")
# apply to model
# apply to model
self.set_model()
self.set_model()
@@ -126,11 +130,12 @@ class FCN(AbstractModelClass):
@@ -126,11 +130,12 @@ class FCN(AbstractModelClass):
x_in = keras.layers.Flatten()(x_input)
x_in = keras.layers.Flatten()(x_input)
n_layer, n_hidden = self.layer_configuration
n_layer, n_hidden = self.layer_configuration
for layer in range(n_layer):
for layer in range(n_layer):
x_in = keras.layers.Dense(n_hidden)(x_in)
x_in = keras.layers.Dense(n_hidden, kernel_initializer=self.kernel_initializer)(x_in)
x_in = self.activation()(x_in)
x_in = self.activation()(x_in)
x_in = keras.layers.Dense(self._output_shape)(x_in)
x_in = keras.layers.Dense(self._output_shape)(x_in)
out = self.activation_output()(x_in)
out = self.activation_output()(x_in)
self.model = keras.Model(inputs=x_input, outputs=[out])
self.model = keras.Model(inputs=x_input, outputs=[out])
def set_compile_options(self):
def set_compile_options(self):
self.compile_options = {"loss": [keras.losses.mean_squared_error], "metrics": ["mse", "mae"]}
self.compile_options = {"loss": [custom_loss([keras.losses.mean_squared_error, var_loss])],
 
"metrics": ["mse", "mae", var_loss]}
Loading