Skip to content
Snippets Groups Projects
Commit f9c10fe3 authored by leufen1's avatar leufen1
Browse files

replace all pickle calls with dill calls

parent b5f3f9ec
Branches
Tags
8 merge requests!319add all changes of dev into release v1.4.0 branch,!318Resolve "release v1.4.0",!283Merge latest develop into falcos issue,!279include Develop,!278Felix issue295 transformation parameters in data handler,!275include lazy preprocessing,!274Resolve "implement lazy data preprocessing",!259Draft: Resolve "WRF-Datahandler should inherit from SingleStationDatahandler"
Pipeline #63265 passed
This commit is part of merge request !318. Comments created here will be created in the context of that merge request.
...@@ -8,6 +8,7 @@ import gc ...@@ -8,6 +8,7 @@ import gc
import logging import logging
import os import os
import pickle import pickle
import dill
import shutil import shutil
from functools import reduce from functools import reduce
from typing import Tuple, Union, List from typing import Tuple, Union, List
...@@ -86,7 +87,7 @@ class DefaultDataHandler(AbstractDataHandler): ...@@ -86,7 +87,7 @@ class DefaultDataHandler(AbstractDataHandler):
data = {"X": self._X, "Y": self._Y, "X_extreme": self._X_extreme, "Y_extreme": self._Y_extreme} data = {"X": self._X, "Y": self._Y, "X_extreme": self._X_extreme, "Y_extreme": self._Y_extreme}
data = self._force_dask_computation(data) data = self._force_dask_computation(data)
with open(self._save_file, "wb") as f: with open(self._save_file, "wb") as f:
pickle.dump(data, f) dill.dump(data, f)
logging.debug(f"save pickle data to {self._save_file}") logging.debug(f"save pickle data to {self._save_file}")
self._reset_data() self._reset_data()
...@@ -101,7 +102,7 @@ class DefaultDataHandler(AbstractDataHandler): ...@@ -101,7 +102,7 @@ class DefaultDataHandler(AbstractDataHandler):
def _load(self): def _load(self):
try: try:
with open(self._save_file, "rb") as f: with open(self._save_file, "rb") as f:
data = pickle.load(f) data = dill.load(f)
logging.debug(f"load pickle data from {self._save_file}") logging.debug(f"load pickle data from {self._save_file}")
self._X, self._Y = data["X"], data["Y"] self._X, self._Y = data["X"], data["Y"]
self._X_extreme, self._Y_extreme = data["X_extreme"], data["Y_extreme"] self._X_extreme, self._Y_extreme = data["X_extreme"], data["Y_extreme"]
......
...@@ -9,6 +9,7 @@ import math ...@@ -9,6 +9,7 @@ import math
import os import os
import shutil import shutil
import pickle import pickle
import dill
from typing import Tuple, List from typing import Tuple, List
...@@ -109,7 +110,7 @@ class KerasIterator(keras.utils.Sequence): ...@@ -109,7 +110,7 @@ class KerasIterator(keras.utils.Sequence):
"""Load pickle data from disk.""" """Load pickle data from disk."""
file = self._path % index file = self._path % index
with open(file, "rb") as f: with open(file, "rb") as f:
data = pickle.load(f) data = dill.load(f)
return data["X"], data["Y"] return data["X"], data["Y"]
@staticmethod @staticmethod
...@@ -167,7 +168,7 @@ class KerasIterator(keras.utils.Sequence): ...@@ -167,7 +168,7 @@ class KerasIterator(keras.utils.Sequence):
data = {"X": X, "Y": Y} data = {"X": X, "Y": Y}
file = self._path % index file = self._path % index
with open(file, "wb") as f: with open(file, "wb") as f:
pickle.dump(data, f) dill.dump(data, f)
def _get_number_of_mini_batches(self, number_of_samples: int) -> int: def _get_number_of_mini_batches(self, number_of_samples: int) -> int:
"""Return number of mini batches as the floored ration of number of samples to batch size.""" """Return number of mini batches as the floored ration of number of samples to batch size."""
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment