diff --git a/src/model_modules/model_class.py b/src/model_modules/model_class.py
index 7e76145e0445df0384462974c88bdf797880ce43..85e5b6ecadb28428e1e78a00ce2da4dc690a3604 100644
--- a/src/model_modules/model_class.py
+++ b/src/model_modules/model_class.py
@@ -1,20 +1,21 @@
 import src.model_modules.keras_extensions
 
-__author__ = "Lukas Leufen"
-__date__ = '2019-12-12'
+__author__ = "Lukas Leufen, Felix Kleinert"
+# __date__ = '2019-12-12'
+__date__ = '2020-05-12'
 
 
 from abc import ABC
 from typing import Any, Callable, Dict
 
 import keras
+import tensorflow as tf
 import logging
 from src.model_modules.inception_model import InceptionModelBase
 from src.model_modules.flatten import flatten_tail
 from src.model_modules.advanced_paddings import PadUtils, Padding2D
 
 
-
 class AbstractModelClass(ABC):
 
     """
@@ -31,7 +32,6 @@ class AbstractModelClass(ABC):
         """
 
         self.__model = None
-        self.__loss = None
         self.model_name = self.__class__.__name__
         self.__custom_objects = {}
         self.__allowed_compile_options = {'optimizer': None,
@@ -72,25 +72,6 @@ class AbstractModelClass(ABC):
     def model(self, value):
         self.__model = value
 
-    @property
-    def loss(self) -> Callable:
-
-        """
-        The loss property containing a callable loss function. The loss function can be any keras loss or a customised
-        function. If the loss is a customised function, it must contain the internal loss(y_true, y_pred) function:
-            def customised_loss(args):
-                def loss(y_true, y_pred):
-                    return actual_function(y_true, y_pred, args)
-            return loss
-        :return: the loss function
-        """
-
-        return self.__loss
-
-    @loss.setter
-    def loss(self, value) -> None:
-        self.__loss = value
-
     @property
     def custom_objects(self) -> Dict:
         """
@@ -161,15 +142,33 @@ class AbstractModelClass(ABC):
             elif value is None:
                 new_v_dic = None
             else:
-                raise TypeError(f'compile_options must be dict or None, but is {type(value)}.')
-            if (new_v_attr == new_v_dic) or ((new_v_attr is None) ^ (new_v_dic is None)):
+                raise TypeError(f"`compile_options' must be `dict' or `None', but is {type(value)}.")
+            if (new_v_attr == new_v_dic or self.__compare_keras_optimizers(new_v_attr, new_v_dic)) or (
+                    (new_v_attr is None) ^ (new_v_dic is None)):
                 if new_v_attr is not None:
                     self.__compile_options[allow_k] = new_v_attr
                 else:
                     self.__compile_options[allow_k] = new_v_dic
+
             else:
-                raise SyntaxError(
-                    f"Got different values for same argument: self.{allow_k}={new_v_attr} and '{allow_k}': {new_v_dic}")
+                raise ValueError(
+                    f"Got different values or arguments for same argument: self.{allow_k}={new_v_attr.__class__} and '{allow_k}': {new_v_dic.__class__}")
+
+    @staticmethod
+    def __compare_keras_optimizers(first, second):
+        if first.__class__ == second.__class__ and first.__module__ == 'keras.optimizers':
+            res = True
+            init = tf.global_variables_initializer()
+            with tf.Session() as sess:
+                sess.run(init)
+                for k, v in first.__dict__.items():
+                    try:
+                        res *= sess.run(v) == sess.run(second.__dict__[k])
+                    except TypeError:
+                        res *= v == second.__dict__[k]
+        else:
+            res = False
+        return bool(res)
 
     def get_settings(self) -> Dict:
         """
@@ -181,14 +180,13 @@ class AbstractModelClass(ABC):
     def set_model(self):
         pass
 
-    def set_loss(self):
-        pass
-
     def set_compile_options(self):
         """
         This method only has to be defined in child class, when additional compile options should be used ()
         (other options than optimizer and loss)
-        Has to be set as dictionary: {'metrics': None,
+        Has to be set as dictionary: {'optimizer': None,
+                                      'loss': None,
+                                      'metrics': None,
                                       'loss_weights': None,
                                       'sample_weight_mode': None,
                                       'weighted_metrics': None,
@@ -246,9 +244,8 @@ class MyLittleModel(AbstractModelClass):
 
         # apply to model
         self.set_model()
-        self.set_loss()
         self.set_compile_options()
-        self.set_custom_objects(loss=self.loss)
+        self.set_custom_objects(loss=self.compile_options['loss'])
 
     def set_model(self):
 
@@ -278,15 +275,6 @@ class MyLittleModel(AbstractModelClass):
         out_main = self.activation()(x_in)
         self.model = keras.Model(inputs=x_input, outputs=[out_main])
 
-    def set_loss(self):
-
-        """
-        Set the loss
-        :return: loss function
-        """
-
-        # self.loss = keras.losses.mean_squared_error
-
     def set_compile_options(self):
         self.initial_lr = 1e-2
         self.optimizer = keras.optimizers.SGD(lr=self.initial_lr, momentum=0.9)
@@ -326,17 +314,14 @@ class MyBranchedModel(AbstractModelClass):
         self.channels = channels
         self.dropout_rate = 0.1
         self.regularizer = keras.regularizers.l2(0.1)
-        self.initial_lr = 1e-2
-        self.optimizer = keras.optimizers.SGD(lr=self.initial_lr, momentum=0.9)
-        self.lr_decay = src.model_modules.keras_extensions.LearningRateDecay(base_lr=self.initial_lr, drop=.94, epochs_drop=10)
         self.epochs = 20
         self.batch_size = int(256)
         self.activation = keras.layers.PReLU
 
         # apply to model
         self.set_model()
-        self.set_loss()
-        self.set_custom_objects(loss=self.loss)
+        self.set_compile_options()
+        self.set_custom_objects(loss=self.compile_options["loss"])
 
     def set_model(self):
 
@@ -370,15 +355,13 @@ class MyBranchedModel(AbstractModelClass):
         out_main = self.activation(name="main")(x_in)
         self.model = keras.Model(inputs=x_input, outputs=[out_minor_1, out_minor_2, out_main])
 
-    def set_loss(self):
-
-        """
-        Set the loss
-        :return: loss function
-        """
-
-        self.loss = [keras.losses.mean_absolute_error] + [keras.losses.mean_squared_error] + \
-                    [keras.losses.mean_squared_error]
+    def set_compile_options(self):
+        self.initial_lr = 1e-2
+        self.optimizer = keras.optimizers.SGD(lr=self.initial_lr, momentum=0.9)
+        self.lr_decay = src.model_modules.keras_extensions.LearningRateDecay(base_lr=self.initial_lr, drop=.94,
+                                                                             epochs_drop=10)
+        self.compile_options = {"loss": [keras.losses.mean_absolute_error] + [keras.losses.mean_squared_error] + [
+            keras.losses.mean_squared_error], "metrics": ["mse", "mae"]}
 
 
 class MyTowerModel(AbstractModelClass):
@@ -404,7 +387,6 @@ class MyTowerModel(AbstractModelClass):
         self.dropout_rate = 1e-2
         self.regularizer = keras.regularizers.l2(0.1)
         self.initial_lr = 1e-2
-        self.optimizer = keras.optimizers.adam(lr=self.initial_lr)
         self.lr_decay = src.model_modules.keras_extensions.LearningRateDecay(base_lr=self.initial_lr, drop=.94, epochs_drop=10)
         self.epochs = 20
         self.batch_size = int(256*4)
@@ -412,9 +394,8 @@ class MyTowerModel(AbstractModelClass):
 
         # apply to model
         self.set_model()
-        self.set_loss()
         self.set_compile_options()
-        self.set_custom_objects(loss=self.loss)
+        self.set_custom_objects(loss=self.compile_options["loss"])
 
     def set_model(self):
 
@@ -488,17 +469,9 @@ class MyTowerModel(AbstractModelClass):
 
         self.model = keras.Model(inputs=X_input, outputs=[out_main])
 
-    def set_loss(self):
-
-        """
-        Set the loss
-        :return: loss function
-        """
-
-        self.loss = [keras.losses.mean_squared_error]
-
     def set_compile_options(self):
-        self.compile_options = {"metrics": ["mse"]}
+        self.optimizer = keras.optimizers.adam(lr=self.initial_lr)
+        self.compile_options = {"loss": [keras.losses.mean_squared_error], "metrics": ["mse"]}
 
 
 class MyPaperModel(AbstractModelClass):
@@ -524,8 +497,6 @@ class MyPaperModel(AbstractModelClass):
         self.dropout_rate = .3
         self.regularizer = keras.regularizers.l2(0.001)
         self.initial_lr = 1e-3
-        # self.optimizer = keras.optimizers.adam(lr=self.initial_lr, amsgrad=True)
-        self.optimizer = keras.optimizers.SGD(lr=self.initial_lr, momentum=0.9)
         self.lr_decay = src.model_modules.keras_extensions.LearningRateDecay(base_lr=self.initial_lr, drop=.94, epochs_drop=10)
         self.epochs = 150
         self.batch_size = int(256 * 2)
@@ -534,8 +505,8 @@ class MyPaperModel(AbstractModelClass):
 
         # apply to model
         self.set_model()
-        self.set_loss()
-        self.set_custom_objects(loss=self.loss, Padding2D=Padding2D)
+        self.set_compile_options()
+        self.set_custom_objects(loss=self.compile_options["loss"], Padding2D=Padding2D)
 
     def set_model(self):
 
@@ -633,11 +604,7 @@ class MyPaperModel(AbstractModelClass):
 
         self.model = keras.Model(inputs=X_input, outputs=[out_minor1, out_main])
 
-    def set_loss(self):
-
-        """
-        Set the loss
-        :return: loss function
-        """
-
-        self.loss = [keras.losses.mean_squared_error, keras.losses.mean_squared_error]
+    def set_compile_options(self):
+        self.optimizer = keras.optimizers.SGD(lr=self.initial_lr, momentum=0.9)
+        self.compile_options = {"loss": [keras.losses.mean_squared_error, keras.losses.mean_squared_error],
+                                "metrics": ['mse', 'mea']}
diff --git a/test/test_model_modules/test_model_class.py b/test/test_model_modules/test_model_class.py
index 147c92532465574b625907d13c814c5cfcbaeac9..a5dbb35ee213a8b4b1f538f6fa1d7e8dcc688dea 100644
--- a/test/test_model_modules/test_model_class.py
+++ b/test/test_model_modules/test_model_class.py
@@ -28,7 +28,7 @@ class TestAbstractModelClass:
 
     def test_init(self, amc):
         assert amc.model is None
-        assert amc.loss is None
+        # assert amc.loss is None
         assert amc.model_name == "AbstractModelClass"
         assert amc.custom_objects == {}
 
@@ -36,19 +36,136 @@ class TestAbstractModelClass:
         amc.model = keras.Model()
         assert isinstance(amc.model, keras.Model) is True
 
-    def test_loss_property(self, amc):
+    # def test_loss_property(self, amc):
+    #     amc.loss = keras.losses.mean_absolute_error
+    #     assert amc.loss == keras.losses.mean_absolute_error
+
+    def test_compile_options_setter_all_empty(self, amc):
+        amc.compile_options = None
+        assert amc.compile_options == {'optimizer': None,
+                                       'loss': None,
+                                       'metrics': None,
+                                       'loss_weights': None,
+                                       'sample_weight_mode': None,
+                                       'weighted_metrics': None,
+                                       'target_tensors': None
+                                       }
+
+    def test_compile_options_setter_as_dict(self, amc):
+        amc.compile_options = {"optimizer": keras.optimizers.SGD(),
+                               "loss": keras.losses.mean_absolute_error,
+                               "metrics": ["mse", "mae"]}
+        assert isinstance(amc.compile_options["optimizer"], keras.optimizers.SGD)
+        assert amc.compile_options["loss"] == keras.losses.mean_absolute_error
+        assert amc.compile_options["metrics"] == ["mse", "mae"]
+        assert amc.compile_options["loss_weights"] is None
+        assert amc.compile_options["sample_weight_mode"] is None
+        assert amc.compile_options["target_tensors"] is None
+        assert amc.compile_options["weighted_metrics"] is None
+
+    def test_compile_options_setter_as_attr(self, amc):
+        amc.optimizer = keras.optimizers.SGD()
         amc.loss = keras.losses.mean_absolute_error
+        amc.compile_options = None  # This line has to be called!
+        # optimizer check
+        assert isinstance(amc.optimizer, keras.optimizers.SGD)
+        assert isinstance(amc.compile_options["optimizer"], keras.optimizers.SGD)
+        # loss check
         assert amc.loss == keras.losses.mean_absolute_error
-
-    def test_compile_options_property(self, amc):
-        amc.compile_options = {"metrics": ["mse", "mae"]}
-        assert amc.compile_options == {'loss_weights': None, 'metrics': ['mse', 'mae'], 'sample_weight_mode': None,
-                                       'target_tensors': None, 'weighted_metrics': None}
+        assert amc.compile_options["loss"] == keras.losses.mean_absolute_error
+        # check rest (all None as not set)
+        assert amc.compile_options["metrics"] is None
+        assert amc.compile_options["loss_weights"] is None
+        assert amc.compile_options["sample_weight_mode"] is None
+        assert amc.compile_options["target_tensors"] is None
+        assert amc.compile_options["weighted_metrics"] is None
+
+    def test_compile_options_setter_as_mix_attr_dict_no_duplicates(self, amc):
+        amc.optimizer = keras.optimizers.SGD()
+        amc.compile_options = {"loss": keras.losses.mean_absolute_error,
+                               "loss_weights": [0.2, 0.8]}
+        # check setting by attribute
+        assert isinstance(amc.optimizer, keras.optimizers.SGD)
+        assert isinstance(amc.compile_options["optimizer"], keras.optimizers.SGD)
+        # check setting by dict
+        assert amc.compile_options["loss"] == keras.losses.mean_absolute_error
+        assert amc.compile_options["loss_weights"] == [0.2, 0.8]
+        # check rest (all None as not set)
+        assert amc.compile_options["metrics"] is None
+        assert amc.compile_options["sample_weight_mode"] is None
+        assert amc.compile_options["target_tensors"] is None
+        assert amc.compile_options["weighted_metrics"] is None
+
+    def test_compile_options_setter_as_mix_attr_dict_valid_duplicates_optimizer(self, amc):
+        amc.optimizer = keras.optimizers.SGD()
+        amc.metrics = ['mse']
+        amc.compile_options = {"optimizer": keras.optimizers.SGD(),
+                               "loss": keras.losses.mean_absolute_error}
+        # check duplicate (attr and dic)
+        assert isinstance(amc.optimizer, keras.optimizers.SGD)
+        assert isinstance(amc.compile_options["optimizer"], keras.optimizers.SGD)
+        # check setting by dict
+        assert amc.compile_options["loss"] == keras.losses.mean_absolute_error
+        # check setting by attr
+        assert amc.metrics == ['mse']
+        assert amc.compile_options["metrics"] == ['mse']
+        # check rest (all None as not set)
+        assert amc.compile_options["loss_weights"] is None
+        assert amc.compile_options["sample_weight_mode"] is None
+        assert amc.compile_options["target_tensors"] is None
+        assert amc.compile_options["weighted_metrics"] is None
+
+    def test_compile_options_setter_as_mix_attr_dict_valid_duplicates_none_optimizer(self, amc):
+        amc.optimizer = keras.optimizers.SGD()
+        amc.metrics = ['mse']
+        amc.compile_options = {"metrics": ['mse'],
+                               "loss": keras.losses.mean_absolute_error}
+        # check duplicate (attr and dic)
+        assert amc.metrics == ['mse']
+        assert amc.compile_options["metrics"] == ['mse']
+        # check setting by dict
+        assert amc.compile_options["loss"] == keras.losses.mean_absolute_error
+        # check setting by attr
+        assert isinstance(amc.optimizer, keras.optimizers.SGD)
+        assert isinstance(amc.compile_options["optimizer"], keras.optimizers.SGD)
+        # check rest (all None as not set)
+        assert amc.compile_options["loss_weights"] is None
+        assert amc.compile_options["sample_weight_mode"] is None
+        assert amc.compile_options["target_tensors"] is None
+        assert amc.compile_options["weighted_metrics"] is None
 
     def test_compile_options_property_type_error(self, amc):
         with pytest.raises(TypeError) as einfo:
             amc.compile_options = 'hello world'
-        assert "`value' has to be a dictionary. But it is <class 'str'>" in str(einfo.value)
+        assert "`compile_options' must be `dict' or `None', but is <class 'str'>." in str(einfo.value)
+
+    def test_compile_options_setter_as_mix_attr_dict_invalid_duplicates_other_optimizer(self, amc):
+        amc.optimizer = keras.optimizers.SGD()
+        with pytest.raises(ValueError) as einfo:
+            amc.compile_options = {"optimizer": keras.optimizers.Adam()}
+        assert "Got different values or arguments for same argument: self.optimizer=<class" \
+               " 'keras.optimizers.SGD'> and 'optimizer': <class 'keras.optimizers.Adam'>" in str(einfo.value)
+
+    def test_compile_options_setter_as_mix_attr_dict_invalid_duplicates_same_optimizer_other_args(self, amc):
+        amc.optimizer = keras.optimizers.SGD(lr=0.1)
+        with pytest.raises(ValueError) as einfo:
+            amc.compile_options = {"optimizer": keras.optimizers.SGD(lr=0.001)}
+        assert "Got different values or arguments for same argument: self.optimizer=<class" \
+               " 'keras.optimizers.SGD'> and 'optimizer': <class 'keras.optimizers.SGD'>" in str(einfo.value)
+
+    def test_compare_keras_optimizers_equal(self, amc):
+        assert amc._AbstractModelClass__compare_keras_optimizers(keras.optimizers.SGD(), keras.optimizers.SGD()) is True
+
+    def test_compare_keras_optimizers_no_optimizer(self, amc):
+        assert amc._AbstractModelClass__compare_keras_optimizers('NoOptimizer', keras.optimizers.SGD()) is False
+
+    def test_compare_keras_optimizers_other_parameters_run_sess(self, amc):
+        assert amc._AbstractModelClass__compare_keras_optimizers(keras.optimizers.SGD(lr=0.1),
+                                                                 keras.optimizers.SGD(lr=0.01)) is False
+
+    def test_compare_keras_optimizers_other_parameters_none_sess(self, amc):
+        assert amc._AbstractModelClass__compare_keras_optimizers(keras.optimizers.SGD(decay=1),
+                                                                 keras.optimizers.SGD(decay=0.01)) is False
 
     def test_getattr(self, amc):
         amc.model = keras.Model()
@@ -84,9 +201,10 @@ class TestMyPaperModel:
         # check if loss number of loss functions fit to model outputs
         #       same loss fkts. for all tails               or different fkts. per tail
         if isinstance(mpm.model.output_shape, list):
-            assert (callable(mpm.loss) or (len(mpm.loss) == 1)) or (len(mpm.loss) == len(mpm.model.output_shape))
+            assert (callable(mpm.compile_options["loss"]) or (len(mpm.compile_options["loss"]) == 1)) or (
+                        len(mpm.compile_options["loss"]) == len(mpm.model.output_shape))
         elif isinstance(mpm.model.output_shape, tuple):
-            assert callable(mpm.loss) or (len(mpm.loss) == 1)
+            assert callable(mpm.compile_options["loss"]) or (len(mpm.compile_options["loss"]) == 1)
 
     def test_set_model(self, mpm):
         assert isinstance(mpm.model, keras.Model)
@@ -101,6 +219,9 @@ class TestMyPaperModel:
             raise TypeError(f"Type of model.output_shape as to be a tuple (one tail)"
                             f" or a list of tuples (multiple tails). Received: {type(mpm.model.output_shape)}")
 
-    def test_set_loss(self, mpm):
-        assert callable(mpm.loss) or (len(mpm.loss) > 0)
+    # def test_set_loss(self, mpm):
+    #     assert callable(mpm.loss) or (len(mpm.loss) > 0)
+
+    def test_set_compile_options(self, mpm):
+        assert callable(mpm.compile_options["loss"]) or (len(mpm.compile_options["loss"]) > 0)