diff --git a/src/data_handling/data_generator.py b/src/data_handling/data_generator.py
index 732a7efdf8f360b49823dfb6ca5ca3239cc774af..92ff8b718dbb7bbbcbebe4e80fb82e0e2a7886c6 100644
--- a/src/data_handling/data_generator.py
+++ b/src/data_handling/data_generator.py
@@ -90,8 +90,7 @@ class DataGenerator(keras.utils.Sequence):
         :return: The generator's time series of history data and its labels
         """
         data = self.get_data_generator(key=item)
-        return data.history.transpose("datetime", "window", "Stations", "variables"), \
-            data.label.squeeze("Stations").transpose("datetime", "window")
+        return data.get_transposed_history(), data.label.squeeze("Stations").transpose("datetime", "window")
 
     def get_data_generator(self, key: Union[str, int] = None, local_tmp_storage: bool = True) -> DataPrep:
         """
@@ -124,7 +123,10 @@ class DataGenerator(keras.utils.Sequence):
         Save given data locally as .pickle in self.data_path_tmp with name '<station>_<var1>_<var2>_..._<varX>.pickle'
         :param data: any data, that should be saved
         """
-        file = os.path.join(self.data_path_tmp, f"{''.join(data.station)}_{'_'.join(sorted(data.variables))}.pickle")
+        date = f"{self.kwargs.get('start')}_{self.kwargs.get('end')}"
+        vars = '_'.join(sorted(data.variables))
+        station = ''.join(data.station)
+        file = os.path.join(self.data_path_tmp, f"{station}_{vars}_{date}_.pickle")
         with open(file, "wb") as f:
             pickle.dump(data, f)
         logging.debug(f"save pickle data to {file}")
@@ -136,7 +138,10 @@ class DataGenerator(keras.utils.Sequence):
         :param variables: list of variables to load
         :return: loaded data
         """
-        file = os.path.join(self.data_path_tmp, f"{''.join(station)}_{'_'.join(sorted(variables))}.pickle")
+        date = f"{self.kwargs.get('start')}_{self.kwargs.get('end')}"
+        vars = '_'.join(sorted(variables))
+        station = ''.join(station)
+        file = os.path.join(self.data_path_tmp, f"{station}_{vars}_{date}_.pickle")
         with open(file, "rb") as f:
             data = pickle.load(f)
         logging.debug(f"load pickle data from {file}")
diff --git a/src/data_handling/data_preparation.py b/src/data_handling/data_preparation.py
index c39625b1e02506696ee5b4c13ac86c7e73420acf..81ce5cddf05cc0158f81a7666cd7a4956bf0a400 100644
--- a/src/data_handling/data_preparation.py
+++ b/src/data_handling/data_preparation.py
@@ -385,6 +385,10 @@ class DataPrep(object):
         data.loc[..., used_chem_vars] = data.loc[..., used_chem_vars].clip(min=minimum)
         return data
 
+    def get_transposed_history(self):
+        if self.history is not None:
+            return self.history.transpose("datetime", "window", "Stations", "variables")
+
 
 if __name__ == "__main__":
     dp = DataPrep('data/', 'dummy', 'DEBW107', ['o3', 'temp'], statistics_per_var={'o3': 'dma8eu', 'temp': 'maximum'})