diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/PKG-INFO b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/PKG-INFO new file mode 100644 index 0000000000000000000000000000000000000000..5f8214504c72f2cfb7307cf8259de678fba12236 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,207 @@ +Metadata-Version: 2.1 +Name: hickle +Version: 3.4.3 +Summary: Hickle - a HDF5 based version of pickle +Home-page: http://github.com/telegraphic/hickle +Author: Danny Price +Author-email: dan@thetelegraphic.com +License: UNKNOWN +Download-URL: https://github.com/telegraphic/hickle/archive/3.4.3.tar.gz +Description: [](https://travis-ci.org/telegraphic/hickle) + [](http://joss.theoj.org/papers/0c6638f84a1a574913ed7c6dd1051847) + + + Hickle + ====== + + Hickle is a [HDF5](https://www.hdfgroup.org/solutions/hdf5/) based clone of `pickle`, with a twist: instead of serializing to a pickle file, + Hickle dumps to a HDF5 file (Hierarchical Data Format). It is designed to be a "drop-in" replacement for pickle (for common data objects), but is + really an amalgam of `h5py` and `dill`/`pickle` with extended functionality. + + That is: `hickle` is a neat little way of dumping python variables to HDF5 files that can be read in most programming + languages, not just Python. Hickle is fast, and allows for transparent compression of your data (LZF / GZIP). + + Why use Hickle? + --------------- + + While `hickle` is designed to be a drop-in replacement for `pickle` (or something like `json`), it works very differently. + Instead of serializing / json-izing, it instead stores the data using the excellent [h5py](https://www.h5py.org/) module. + + The main reasons to use hickle are: + + 1. It's faster than pickle and cPickle. + 2. It stores data in HDF5. + 3. You can easily compress your data. + + The main reasons not to use hickle are: + + 1. You don't want to store your data in HDF5. While hickle can serialize arbitrary python objects, this functionality is provided only for convenience, and you're probably better off just using the pickle module. + 2. You want to convert your data in human-readable JSON/YAML, in which case, you should do that instead. + + So, if you want your data in HDF5, or if your pickling is taking too long, give hickle a try. + Hickle is particularly good at storing large numpy arrays, thanks to `h5py` running under the hood. + + Documentation + ------------- + + Documentation for hickle can be found at [telegraphic.github.io/hickle/](http://telegraphic.github.io/hickle/). + + + Usage example + ------------- + + Hickle is nice and easy to use, and should look very familiar to those of you who have pickled before. + + In short, `hickle` provides two methods: a [hickle.load](http://telegraphic.github.io/hickle/toc.html#hickle.load) + method, for loading hickle files, and a [hickle.dump](http://telegraphic.github.io/hickle/toc.html#hickle.dump) + method, for dumping data into HDF5. Here's a complete example: + + ```python + import os + import hickle as hkl + import numpy as np + + # Create a numpy array of data + array_obj = np.ones(32768, dtype='float32') + + # Dump to file + hkl.dump(array_obj, 'test.hkl', mode='w') + + # Dump data, with compression + hkl.dump(array_obj, 'test_gzip.hkl', mode='w', compression='gzip') + + # Compare filesizes + print('uncompressed: %i bytes' % os.path.getsize('test.hkl')) + print('compressed: %i bytes' % os.path.getsize('test_gzip.hkl')) + + # Load data + array_hkl = hkl.load('test_gzip.hkl') + + # Check the two are the same file + assert array_hkl.dtype == array_obj.dtype + assert np.all((array_hkl, array_obj)) + ``` + + ### HDF5 compression options + + A major benefit of `hickle` over `pickle` is that it allows fancy HDF5 features to + be applied, by passing on keyword arguments on to `h5py`. So, you can do things like: + ```python + hkl.dump(array_obj, 'test_lzf.hkl', mode='w', compression='lzf', scaleoffset=0, + chunks=(100, 100), shuffle=True, fletcher32=True) + ``` + A detailed explanation of these keywords is given at http://docs.h5py.org/en/latest/high/dataset.html, + but we give a quick rundown below. + + In HDF5, datasets are stored as B-trees, a tree data structure that has speed benefits over contiguous + blocks of data. In the B-tree, data are split into [chunks](http://docs.h5py.org/en/latest/high/dataset.html#chunked-storage), + which is leveraged to allow [dataset resizing](http://docs.h5py.org/en/latest/high/dataset.html#resizable-datasets) and + compression via [filter pipelines](http://docs.h5py.org/en/latest/high/dataset.html#filter-pipeline). Filters such as + `shuffle` and `scaleoffset` move your data around to improve compression ratios, and `fletcher32` computes a checksum. + These file-level options are abstracted away from the data model. + + Recent changes + -------------- + + * December 2018: Accepted to Journal of Open-Source Software (JOSS). + * June 2018: Major refactor and support for Python 3. + * Aug 2016: Added support for scipy sparse matrices `bsr_matrix`, `csr_matrix` and `csc_matrix`. + + Performance comparison + ---------------------- + + Hickle runs a lot faster than pickle with its default settings, and a little faster than pickle with `protocol=2` set: + + ```Python + In [1]: import numpy as np + + In [2]: x = np.random.random((2000, 2000)) + + In [3]: import pickle + + In [4]: f = open('foo.pkl', 'w') + + In [5]: %time pickle.dump(x, f) # slow by default + CPU times: user 2 s, sys: 274 ms, total: 2.27 s + Wall time: 2.74 s + + In [6]: f = open('foo.pkl', 'w') + + In [7]: %time pickle.dump(x, f, protocol=2) # actually very fast + CPU times: user 18.8 ms, sys: 36 ms, total: 54.8 ms + Wall time: 55.6 ms + + In [8]: import hickle + + In [9]: f = open('foo.hkl', 'w') + + In [10]: %time hickle.dump(x, f) # a bit faster + dumping <type 'numpy.ndarray'> to file <HDF5 file "foo.hkl" (mode r+)> + CPU times: user 764 us, sys: 35.6 ms, total: 36.4 ms + Wall time: 36.2 ms + ``` + + So if you do continue to use pickle, add the `protocol=2` keyword (thanks @mrocklin for pointing this out). + + For storing python dictionaries of lists, hickle beats the python json encoder, but is slower than uJson. For a dictionary with 64 entries, each containing a 4096 length list of random numbers, the times are: + + + json took 2633.263 ms + uJson took 138.482 ms + hickle took 232.181 ms + + + It should be noted that these comparisons are of course not fair: storing in HDF5 will not help you convert something into JSON, nor will it help you serialize a string. But for quick storage of the contents of a python variable, it's a pretty good option. + + Installation guidelines (for Linux and Mac OS). + ----------------------------------------------- + + ### Easy method + Install with `pip` by running `pip install hickle` from the command line. + + ### Manual install + + 1. You should have Python 2.7 and above installed + + 2. Install h5py + (Official page: http://docs.h5py.org/en/latest/build.html) + + 3. Install hdf5 + (Official page: http://www.hdfgroup.org/ftp/HDF5/current/src/unpacked/release_docs/INSTALL) + + 4. Download `hickle`: + via terminal: git clone https://github.com/telegraphic/hickle.git + via manual download: Go to https://github.com/telegraphic/hickle and on right hand side you will find `Download ZIP` file + + 5. cd to your downloaded `hickle` directory + + 6. Then run the following command in the `hickle` directory: + `python setup.py install` + + ### Testing + + Once installed from source, run `python setup.py test` to check it's all working. + + + Bugs & contributing + -------------------- + + Contributions and bugfixes are very welcome. Please check out our [contribution guidelines](https://github.com/telegraphic/hickle/blob/master/CONTRIBUTING.md) + for more details on how to contribute to development. + + + Referencing hickle + ------------------ + + If you use `hickle` in academic research, we would be grateful if you could reference [our paper](http://joss.theoj.org/papers/0c6638f84a1a574913ed7c6dd1051847) in the [Journal of Open-Source Software (JOSS)](http://joss.theoj.org/about). + + ``` + Price et al., (2018). Hickle: A HDF5-based python pickle replacement. Journal of Open Source Software, 3(32), 1115, https://doi.org/10.21105/joss.01115 + ``` + +Keywords: pickle,hdf5,data storage,data export +Platform: Cross platform (Linux +Platform: Mac OSX +Platform: Windows) +Requires-Python: >=2.7 +Description-Content-Type: text/markdown diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/SOURCES.txt b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/SOURCES.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf56f059f14d80d641efba6de75e401b4410786f --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/SOURCES.txt @@ -0,0 +1,52 @@ +.gitignore +.nojekyll +.pylintrc +.travis.yml +CODE_OF_CONDUCT.md +CONTRIBUTING.md +LICENSE +README.md +_config.yml +paper.bib +paper.md +requirements.txt +setup.cfg +setup.py +docs/Makefile +docs/make_docs.sh +docs/source/conf.py +docs/source/index.md +docs/source/toc.rst +docs/source/_static/empty.txt +docs/source/_templates/empty.txt +hickle/__init__.py +hickle/helpers.py +hickle/hickle.py +hickle/hickle_legacy.py +hickle/hickle_legacy2.py +hickle/lookup.py +hickle.egg-info/PKG-INFO +hickle.egg-info/SOURCES.txt +hickle.egg-info/dependency_links.txt +hickle.egg-info/not-zip-safe +hickle.egg-info/requires.txt +hickle.egg-info/top_level.txt +hickle/loaders/__init__.py +hickle/loaders/load_astropy.py +hickle/loaders/load_numpy.py +hickle/loaders/load_pandas.py +hickle/loaders/load_python.py +hickle/loaders/load_python3.py +hickle/loaders/load_scipy.py +tests/__init__.py +tests/test_astropy.py +tests/test_hickle.py +tests/test_hickle_helpers.py +tests/test_legacy_load.py +tests/test_scipy.py +tests/legacy_hkls/generate_test_hickle.py +tests/legacy_hkls/hickle_1_1_0.hkl +tests/legacy_hkls/hickle_1_3_2.hkl +tests/legacy_hkls/hickle_1_4_0.hkl +tests/legacy_hkls/hickle_2_0_5.hkl +tests/legacy_hkls/hickle_2_1_0.hkl \ No newline at end of file diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/dependency_links.txt b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/dependency_links.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/not-zip-safe b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/not-zip-safe new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/not-zip-safe @@ -0,0 +1 @@ + diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/requires.txt b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/requires.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ccd55587b619ea766f8d1a76bc06739e176f552 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/requires.txt @@ -0,0 +1,2 @@ +numpy +h5py diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/top_level.txt b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce3b9fb874814125f842378fab0204ff0e9184a3 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/EGG-INFO/top_level.txt @@ -0,0 +1,2 @@ +hickle +tests diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/__init__.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..46e2ea2c6d0f5578529b3e40e060b1a244420772 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/__init__.py @@ -0,0 +1,4 @@ +from .hickle import dump, load +from .hickle import __version__ + + diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/helpers.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..6c3d7f9f3853101723380f4658487978605f0cf3 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/helpers.py @@ -0,0 +1,113 @@ +import re +import six + +def get_type_and_data(h_node): + """ Helper function to return the py_type and data block for a HDF node """ + py_type = h_node.attrs["type"][0] + data = h_node[()] +# if h_node.shape == (): +# data = h_node.value +# else: +# data = h_node[:] + return py_type, data + +def get_type(h_node): + """ Helper function to return the py_type for a HDF node """ + py_type = h_node.attrs["type"][0] + return py_type + +def sort_keys(key_list): + """ Take a list of strings and sort it by integer value within string + + Args: + key_list (list): List of keys + + Returns: + key_list_sorted (list): List of keys, sorted by integer + """ + + # Py3 h5py returns an irritating KeysView object + # Py3 also complains about bytes and strings, convert all keys to bytes + if six.PY3: + key_list2 = [] + for key in key_list: + if isinstance(key, str): + key = bytes(key, 'ascii') + key_list2.append(key) + key_list = key_list2 + + # Check which keys contain a number + numbered_keys = [re.search(b'\d+', key) for key in key_list] + + # Sort the keys on number if they have it, or normally if not + if(len(key_list) and not numbered_keys.count(None)): + to_int = lambda x: int(re.search(b'\d+', x).group(0)) + return(sorted(key_list, key=to_int)) + else: + return(sorted(key_list)) + + +def check_is_iterable(py_obj): + """ Check whether a python object is iterable. + + Note: this treats unicode and string as NON ITERABLE + + Args: + py_obj: python object to test + + Returns: + iter_ok (bool): True if item is iterable, False is item is not + """ + if six.PY2: + string_types = (str, unicode) + else: + string_types = (str, bytes, bytearray) + if isinstance(py_obj, string_types): + return False + try: + iter(py_obj) + return True + except TypeError: + return False + + +def check_is_hashable(py_obj): + """ Check if a python object is hashable + + Note: this function is currently not used, but is useful for future + development. + + Args: + py_obj: python object to test + """ + + try: + py_obj.__hash__() + return True + except TypeError: + return False + + +def check_iterable_item_type(iter_obj): + """ Check if all items within an iterable are the same type. + + Args: + iter_obj: iterable object + + Returns: + iter_type: type of item contained within the iterable. If + the iterable has many types, a boolean False is returned instead. + + References: + http://stackoverflow.com/questions/13252333/python-check-if-all-elements-of-a-list-are-the-same-type + """ + iseq = iter(iter_obj) + + try: + first_type = type(next(iseq)) + except StopIteration: + return False + except Exception as ex: + return False + else: + return first_type if all((type(x) is first_type) for x in iseq) else False diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle.py new file mode 100644 index 0000000000000000000000000000000000000000..24b38c3e1283618c9ce2c4d97b6960334cc08530 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle.py @@ -0,0 +1,611 @@ +# encoding: utf-8 +""" +# hickle.py + +Created by Danny Price 2016-02-03. + +Hickle is a HDF5 based clone of Pickle. Instead of serializing to a pickle +file, Hickle dumps to a HDF5 file. It is designed to be as similar to pickle in +usage as possible, providing a load() and dump() function. + +## Notes + +Hickle has two main advantages over Pickle: +1) LARGE PICKLE HANDLING. Unpickling a large pickle is slow, as the Unpickler +reads the entire pickle thing and loads it into memory. In comparison, HDF5 +files are designed for large datasets. Things are only loaded when accessed. + +2) CROSS PLATFORM SUPPORT. Attempting to unpickle a pickle pickled on Windows +on Linux and vice versa is likely to fail with errors like "Insecure string +pickle". HDF5 files will load fine, as long as both machines have +h5py installed. + +""" + +from __future__ import absolute_import, division, print_function +import sys +import os +from pkg_resources import get_distribution, DistributionNotFound +from ast import literal_eval + +import numpy as np +import h5py as h5 + + +from .helpers import get_type, sort_keys, check_is_iterable, check_iterable_item_type +from .lookup import types_dict, hkl_types_dict, types_not_to_sort, \ + container_types_dict, container_key_types_dict +from .lookup import check_is_ndarray_like + + +try: + from exceptions import Exception + from types import NoneType +except ImportError: + pass # above imports will fail in python3 + +from six import PY2, PY3, string_types, integer_types +import io + +# Make several aliases for Python2/Python3 compatibility +if PY3: + file = io.TextIOWrapper + +# Import a default 'pickler' +# Not the nicest import code, but should work on Py2/Py3 +try: + import dill as pickle +except ImportError: + try: + import cPickle as pickle + except ImportError: + import pickle + +import warnings + +try: + __version__ = get_distribution('hickle').version +except DistributionNotFound: + __version__ = '0.0.0 - please install via pip/setup.py' + +################## +# Error handling # +################## + +class FileError(Exception): + """ An exception raised if the file is fishy """ + def __init__(self): + return + + def __str__(self): + return ("Cannot open file. Please pass either a filename " + "string, a file object, or a h5py.File") + + +class ClosedFileError(Exception): + """ An exception raised if the file is fishy """ + def __init__(self): + return + + def __str__(self): + return ("HDF5 file has been closed. Please pass either " + "a filename string, a file object, or an open h5py.File") + + +class NoMatchError(Exception): + """ An exception raised if the object type is not understood (or + supported)""" + def __init__(self): + return + + def __str__(self): + return ("Error: this type of python object cannot be converted into a " + "hickle.") + + +class ToDoError(Exception): + """ An exception raised for non-implemented functionality""" + def __init__(self): + return + + def __str__(self): + return "Error: this functionality hasn't been implemented yet." + + +class SerializedWarning(UserWarning): + """ An object type was not understood + + The data will be serialized using pickle. + """ + pass + + +###################### +# H5PY file wrappers # +###################### + +class H5GroupWrapper(h5.Group): + """ Group wrapper that provides a track_times kwarg. + + track_times is a boolean flag that can be set to False, so that two + files created at different times will have identical MD5 hashes. + """ + def create_dataset(self, *args, **kwargs): + kwargs['track_times'] = getattr(self, 'track_times', True) + return super(H5GroupWrapper, self).create_dataset(*args, **kwargs) + + def create_group(self, *args, **kwargs): + group = super(H5GroupWrapper, self).create_group(*args, **kwargs) + group.__class__ = H5GroupWrapper + group.track_times = getattr(self, 'track_times', True) + return group + + +class H5FileWrapper(h5.File): + """ Wrapper for h5py File that provides a track_times kwarg. + + track_times is a boolean flag that can be set to False, so that two + files created at different times will have identical MD5 hashes. + """ + def create_dataset(self, *args, **kwargs): + kwargs['track_times'] = getattr(self, 'track_times', True) + return super(H5FileWrapper, self).create_dataset(*args, **kwargs) + + def create_group(self, *args, **kwargs): + group = super(H5FileWrapper, self).create_group(*args, **kwargs) + group.__class__ = H5GroupWrapper + group.track_times = getattr(self, 'track_times', True) + return group + + +def file_opener(f, mode='r', track_times=True): + """ A file opener helper function with some error handling. This can open + files through a file object, a h5py file, or just the filename. + + Args: + f (file, h5py.File, or string): File-identifier, e.g. filename or file object. + mode (str): File open mode. Only required if opening by filename string. + track_times (bool): Track time in HDF5; turn off if you want hickling at + different times to produce identical files (e.g. for MD5 hash check). + + """ + + # Assume that we will have to close the file after dump or load + close_flag = True + + # Were we handed a file object or just a file name string? + if isinstance(f, (file, io.TextIOWrapper)): + filename, mode = f.name, f.mode + f.close() + h5f = h5.File(filename, mode) + elif isinstance(f, string_types): + filename = f + h5f = h5.File(filename, mode) + elif isinstance(f, (H5FileWrapper, h5._hl.files.File)): + try: + filename = f.filename + except ValueError: + raise ClosedFileError + h5f = f + # Since this file was already open, do not close the file afterward + close_flag = False + else: + print(f.__class__) + raise FileError + + h5f.__class__ = H5FileWrapper + h5f.track_times = track_times + return(h5f, close_flag) + + +########### +# DUMPERS # +########### + + +def _dump(py_obj, h_group, call_id=0, **kwargs): + """ Dump a python object to a group within a HDF5 file. + + This function is called recursively by the main dump() function. + + Args: + py_obj: python object to dump. + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + + # Get list of dumpable dtypes + dumpable_dtypes = [] + for lst in [[bool, complex, bytes, float], string_types, integer_types]: + dumpable_dtypes.extend(lst) + + # Firstly, check if item is a numpy array. If so, just dump it. + if check_is_ndarray_like(py_obj): + create_hkl_dataset(py_obj, h_group, call_id, **kwargs) + + # Next, check if item is a dict + elif isinstance(py_obj, dict): + create_hkl_dataset(py_obj, h_group, call_id, **kwargs) + + # If not, check if item is iterable + elif check_is_iterable(py_obj): + item_type = check_iterable_item_type(py_obj) + + # item_type == False implies multiple types. Create a dataset + if item_type is False: + h_subgroup = create_hkl_group(py_obj, h_group, call_id) + for ii, py_subobj in enumerate(py_obj): + _dump(py_subobj, h_subgroup, call_id=ii, **kwargs) + + # otherwise, subitems have same type. Check if subtype is an iterable + # (e.g. list of lists), or not (e.g. list of ints, which should be treated + # as a single dataset). + else: + if item_type in dumpable_dtypes: + create_hkl_dataset(py_obj, h_group, call_id, **kwargs) + else: + h_subgroup = create_hkl_group(py_obj, h_group, call_id) + for ii, py_subobj in enumerate(py_obj): + _dump(py_subobj, h_subgroup, call_id=ii, **kwargs) + + # item is not iterable, so create a dataset for it + else: + create_hkl_dataset(py_obj, h_group, call_id, **kwargs) + + +def dump(py_obj, file_obj, mode='w', track_times=True, path='/', **kwargs): + """ Write a pickled representation of obj to the open file object file. + + Args: + obj (object): python object o store in a Hickle + file: file object, filename string, or h5py.File object + file in which to store the object. A h5py.File or a filename is also + acceptable. + mode (str): optional argument, 'r' (read only), 'w' (write) or 'a' (append). + Ignored if file is a file object. + compression (str): optional argument. Applies compression to dataset. Options: None, gzip, + lzf (+ szip, if installed) + track_times (bool): optional argument. If set to False, repeated hickling will produce + identical files. + path (str): path within hdf5 file to save data to. Defaults to root / + """ + + # Make sure that file is not closed unless modified + # This is to avoid trying to close a file that was never opened + close_flag = False + + try: + # Open the file + h5f, close_flag = file_opener(file_obj, mode, track_times) + h5f.attrs["CLASS"] = b'hickle' + h5f.attrs["VERSION"] = get_distribution('hickle').version + h5f.attrs["type"] = [b'hickle'] + # Log which version of python was used to generate the hickle file + pv = sys.version_info + py_ver = "%i.%i.%i" % (pv[0], pv[1], pv[2]) + h5f.attrs["PYTHON_VERSION"] = py_ver + + h_root_group = h5f.get(path) + + if h_root_group is None: + h_root_group = h5f.create_group(path) + h_root_group.attrs["type"] = [b'hickle'] + + _dump(py_obj, h_root_group, **kwargs) + except NoMatchError: + fname = h5f.filename + h5f.close() + try: + os.remove(fname) + except OSError: + warnings.warn("Dump failed. Could not remove %s" % fname) + finally: + raise NoMatchError + finally: + # Close the file if requested. + # Closing a file twice will not cause any problems + if close_flag: + h5f.close() + + +def create_dataset_lookup(py_obj): + """ What type of object are we trying to pickle? This is a python + dictionary based equivalent of a case statement. It returns the correct + helper function for a given data type. + + Args: + py_obj: python object to look-up what function to use to dump to disk + + Returns: + match: function that should be used to dump data to a new dataset + """ + t = type(py_obj) + types_lookup = {dict: create_dict_dataset} + types_lookup.update(types_dict) + + match = types_lookup.get(t, no_match) + + return match + + + +def create_hkl_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Create a dataset within the hickle HDF5 file + + Args: + py_obj: python object to dump. + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + + """ + #lookup dataset creator type based on python object type + create_dataset = create_dataset_lookup(py_obj) + + # do the creation + create_dataset(py_obj, h_group, call_id, **kwargs) + + +def create_hkl_group(py_obj, h_group, call_id=0): + """ Create a new group within the hickle file + + Args: + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + + """ + h_subgroup = h_group.create_group('data_%i' % call_id) + h_subgroup.attrs['type'] = [str(type(py_obj)).encode('ascii', 'ignore')] + return h_subgroup + + +def create_dict_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Creates a data group for each key in dictionary + + Notes: + This is a very important function which uses the recursive _dump + method to build up hierarchical data models stored in the HDF5 file. + As this is critical to functioning, it is kept in the main hickle.py + file instead of in the loaders/ directory. + + Args: + py_obj: python object to dump; should be dictionary + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + h_dictgroup = h_group.create_group('data_%i' % call_id) + h_dictgroup.attrs['type'] = [str(type(py_obj)).encode('ascii', 'ignore')] + + for key, py_subobj in py_obj.items(): + if isinstance(key, string_types): + h_subgroup = h_dictgroup.create_group("%r" % (key)) + else: + h_subgroup = h_dictgroup.create_group(str(key)) + h_subgroup.attrs["type"] = [b'dict_item'] + + h_subgroup.attrs["key_type"] = [str(type(key)).encode('ascii', 'ignore')] + + _dump(py_subobj, h_subgroup, call_id=0, **kwargs) + + +def no_match(py_obj, h_group, call_id=0, **kwargs): + """ If no match is made, raise an exception + + Args: + py_obj: python object to dump; default if item is not matched. + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + pickled_obj = pickle.dumps(py_obj) + d = h_group.create_dataset('data_%i' % call_id, data=[pickled_obj]) + d.attrs["type"] = [b'pickle'] + + warnings.warn("%s type not understood, data have been serialized" % type(py_obj), + SerializedWarning) + + + +############# +## LOADERS ## +############# + +class PyContainer(list): + """ A group-like object into which to load datasets. + + In order to build up a tree-like structure, we need to be able + to load datasets into a container with an append() method. + Python tuples and sets do not allow this. This class provides + a list-like object that be converted into a list, tuple, set or dict. + """ + def __init__(self): + super(PyContainer, self).__init__() + self.container_type = None + self.name = None + self.key_type = None + + def convert(self): + """ Convert from PyContainer to python core data type. + + Returns: self, either as a list, tuple, set or dict + (or other type specified in lookup.py) + """ + + if self.container_type in container_types_dict.keys(): + convert_fn = container_types_dict[self.container_type] + return convert_fn(self) + if self.container_type == str(dict).encode('ascii', 'ignore'): + keys = [] + for item in self: + key = item.name.split('/')[-1] + key_type = item.key_type[0] + if key_type in container_key_types_dict.keys(): + to_type_fn = container_key_types_dict[key_type] + key = to_type_fn(key) + keys.append(key) + + items = [item[0] for item in self] + return dict(zip(keys, items)) + else: + return self + +def no_match_load(key): + """ If no match is made when loading, need to raise an exception + """ + raise RuntimeError("Cannot load %s data type" % key) + #pass + +def load_dataset_lookup(key): + """ What type of object are we trying to unpickle? This is a python + dictionary based equivalent of a case statement. It returns the type + a given 'type' keyword in the hickle file. + + Args: + py_obj: python object to look-up what function to use to dump to disk + + Returns: + match: function that should be used to dump data to a new dataset + """ + + match = hkl_types_dict.get(key, no_match_load) + + return match + +def load(fileobj, path='/', safe=True): + """ Load a hickle file and reconstruct a python object + + Args: + fileobj: file object, h5py.File, or filename string + safe (bool): Disable automatic depickling of arbitrary python objects. + DO NOT set this to False unless the file is from a trusted source. + (see http://www.cs.jhu.edu/~s/musings/pickle.html for an explanation) + + path (str): path within hdf5 file to save data to. Defaults to root / + """ + + # Make sure that the file is not closed unless modified + # This is to avoid trying to close a file that was never opened + close_flag = False + + try: + h5f, close_flag = file_opener(fileobj) + h_root_group = h5f.get(path) + try: + assert 'CLASS' in h5f.attrs.keys() + assert 'VERSION' in h5f.attrs.keys() + VER = h5f.attrs['VERSION'] + try: + VER_MAJOR = int(VER) + except ValueError: + VER_MAJOR = int(VER[0]) + if VER_MAJOR == 1: + if PY2: + warnings.warn("Hickle file versioned as V1, attempting legacy loading...") + from . import hickle_legacy + return hickle_legacy.load(fileobj, safe) + else: + raise RuntimeError("Cannot open file. This file was likely" + " created with Python 2 and an old hickle version.") + elif VER_MAJOR == 2: + if PY2: + warnings.warn("Hickle file appears to be old version (v2), attempting " + "legacy loading...") + from . import hickle_legacy2 + return hickle_legacy2.load(fileobj, path=path, safe=safe) + else: + raise RuntimeError("Cannot open file. This file was likely" + " created with Python 2 and an old hickle version.") + # There is an unfortunate period of time where hickle 2.1.0 claims VERSION = int(3) + # For backward compatibility we really need to catch this. + # Actual hickle v3 files are versioned as A.B.C (e.g. 3.1.0) + elif VER_MAJOR == 3 and VER == VER_MAJOR: + if PY2: + warnings.warn("Hickle file appears to be old version (v2.1.0), attempting " + "legacy loading...") + from . import hickle_legacy2 + return hickle_legacy2.load(fileobj, path=path, safe=safe) + else: + raise RuntimeError("Cannot open file. This file was likely" + " created with Python 2 and an old hickle version.") + elif VER_MAJOR >= 3: + py_container = PyContainer() + py_container.container_type = 'hickle' + py_container = _load(py_container, h_root_group) + return py_container[0][0] + + except AssertionError: + if PY2: + warnings.warn("Hickle file is not versioned, attempting legacy loading...") + from . import hickle_legacy + return hickle_legacy.load(fileobj, safe) + else: + raise RuntimeError("Cannot open file. This file was likely" + " created with Python 2 and an old hickle version.") + finally: + # Close the file if requested. + # Closing a file twice will not cause any problems + if close_flag: + h5f.close() + +def load_dataset(h_node): + """ Load a dataset, converting into its correct python type + + Args: + h_node (h5py dataset): h5py dataset object to read + + Returns: + data: reconstructed python object from loaded data + """ + py_type = get_type(h_node) + + try: + load_fn = load_dataset_lookup(py_type) + return load_fn(h_node) + except: + raise + #raise RuntimeError("Hickle type %s not understood." % py_type) + +def _load(py_container, h_group): + """ Load a hickle file + + Recursive funnction to load hdf5 data into a PyContainer() + + Args: + py_container (PyContainer): Python container to load data into + h_group (h5 group or dataset): h5py object, group or dataset, to spider + and load all datasets. + """ + + group_dtype = h5._hl.group.Group + dataset_dtype = h5._hl.dataset.Dataset + + #either a file, group, or dataset + if isinstance(h_group, (H5FileWrapper, group_dtype)): + + py_subcontainer = PyContainer() + try: + py_subcontainer.container_type = bytes(h_group.attrs['type'][0]) + except KeyError: + raise + #py_subcontainer.container_type = '' + py_subcontainer.name = h_group.name + + if py_subcontainer.container_type == b'dict_item': + py_subcontainer.key_type = h_group.attrs['key_type'] + + if py_subcontainer.container_type not in types_not_to_sort: + h_keys = sort_keys(h_group.keys()) + else: + h_keys = h_group.keys() + + for h_name in h_keys: + h_node = h_group[h_name] + py_subcontainer = _load(py_subcontainer, h_node) + + sub_data = py_subcontainer.convert() + py_container.append(sub_data) + + else: + # must be a dataset + subdata = load_dataset(h_group) + py_container.append(subdata) + + return py_container diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle_legacy.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..61a171fde3d39304d78d1ddede9656dd7ad50940 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle_legacy.py @@ -0,0 +1,535 @@ +# encoding: utf-8 +""" +# hickle_legacy.py + +Created by Danny Price 2012-05-28. + +Hickle is a HDF5 based clone of Pickle. Instead of serializing to a +pickle file, Hickle dumps to a HDF5 file. It is designed to be as similar +to pickle in usage as possible. + +## Notes + +This is a legacy handler, for hickle v1 files. +If V2 reading fails, this will be called as a fail-over. + +""" + +import os +import sys +import numpy as np +import h5py as h5 + +if sys.version_info.major == 3: + NoneType = type(None) +else: + from types import NoneType + +__version__ = "1.3.0" +__author__ = "Danny Price" + +#################### +## Error handling ## +#################### + + +class FileError(Exception): + """ An exception raised if the file is fishy""" + + def __init__(self): + return + + def __str__(self): + print("Error: cannot open file. Please pass either a filename string, a file object, " + "or a h5py.File") + + +class NoMatchError(Exception): + """ An exception raised if the object type is not understood (or supported)""" + + def __init__(self): + return + + def __str__(self): + print("Error: this type of python object cannot be converted into a hickle.") + + +class ToDoError(Exception): + """ An exception raised for non-implemented functionality""" + + def __init__(self): + return + + def __str__(self): + print("Error: this functionality hasn't been implemented yet.") + + +class H5GroupWrapper(h5.Group): + def create_dataset(self, *args, **kwargs): + kwargs['track_times'] = getattr(self, 'track_times', True) + return super(H5GroupWrapper, self).create_dataset(*args, **kwargs) + + def create_group(self, *args, **kwargs): + group = super(H5GroupWrapper, self).create_group(*args, **kwargs) + group.__class__ = H5GroupWrapper + group.track_times = getattr(self, 'track_times', True) + return group + + +class H5FileWrapper(h5.File): + def create_dataset(self, *args, **kwargs): + kwargs['track_times'] = getattr(self, 'track_times', True) + return super(H5FileWrapper, self).create_dataset(*args, **kwargs) + + def create_group(self, *args, **kwargs): + group = super(H5FileWrapper, self).create_group(*args, **kwargs) + group.__class__ = H5GroupWrapper + group.track_times = getattr(self, 'track_times', True) + return group + + +def file_opener(f, mode='r', track_times=True): + """ A file opener helper function with some error handling. + + This can open files through a file object, a h5py file, or just the filename. + """ + # Were we handed a file object or just a file name string? + if isinstance(f, file): + filename, mode = f.name, f.mode + f.close() + h5f = h5.File(filename, mode) + + elif isinstance(f, h5._hl.files.File): + h5f = f + elif isinstance(f, str): + filename = f + h5f = h5.File(filename, mode) + else: + raise FileError + + h5f.__class__ = H5FileWrapper + h5f.track_times = track_times + return h5f + + +############# +## dumpers ## +############# + +def dump_ndarray(obj, h5f, **kwargs): + """ dumps an ndarray object to h5py file""" + h5f.create_dataset('data', data=obj, **kwargs) + h5f.create_dataset('type', data=['ndarray']) + + +def dump_np_dtype(obj, h5f, **kwargs): + """ dumps an np dtype object to h5py file""" + h5f.create_dataset('data', data=obj) + h5f.create_dataset('type', data=['np_dtype']) + + +def dump_np_dtype_dict(obj, h5f, **kwargs): + """ dumps an np dtype object within a group""" + h5f.create_dataset('data', data=obj) + h5f.create_dataset('_data', data=['np_dtype']) + + +def dump_masked(obj, h5f, **kwargs): + """ dumps an ndarray object to h5py file""" + h5f.create_dataset('data', data=obj, **kwargs) + h5f.create_dataset('mask', data=obj.mask, **kwargs) + h5f.create_dataset('type', data=['masked']) + + +def dump_list(obj, h5f, **kwargs): + """ dumps a list object to h5py file""" + + # Check if there are any numpy arrays in the list + contains_numpy = any(isinstance(el, np.ndarray) for el in obj) + + if contains_numpy: + _dump_list_np(obj, h5f, **kwargs) + else: + h5f.create_dataset('data', data=obj, **kwargs) + h5f.create_dataset('type', data=['list']) + + +def _dump_list_np(obj, h5f, **kwargs): + """ Dump a list of numpy objects to file """ + + np_group = h5f.create_group('data') + h5f.create_dataset('type', data=['np_list']) + + ii = 0 + for np_item in obj: + np_group.create_dataset("%s" % ii, data=np_item, **kwargs) + ii += 1 + + +def dump_tuple(obj, h5f, **kwargs): + """ dumps a list object to h5py file""" + + # Check if there are any numpy arrays in the list + contains_numpy = any(isinstance(el, np.ndarray) for el in obj) + + if contains_numpy: + _dump_tuple_np(obj, h5f, **kwargs) + else: + h5f.create_dataset('data', data=obj, **kwargs) + h5f.create_dataset('type', data=['tuple']) + + +def _dump_tuple_np(obj, h5f, **kwargs): + """ Dump a tuple of numpy objects to file """ + + np_group = h5f.create_group('data') + h5f.create_dataset('type', data=['np_tuple']) + + ii = 0 + for np_item in obj: + np_group.create_dataset("%s" % ii, data=np_item, **kwargs) + ii += 1 + + +def dump_set(obj, h5f, **kwargs): + """ dumps a set object to h5py file""" + obj = list(obj) + h5f.create_dataset('data', data=obj, **kwargs) + h5f.create_dataset('type', data=['set']) + + +def dump_string(obj, h5f, **kwargs): + """ dumps a list object to h5py file""" + h5f.create_dataset('data', data=[obj], **kwargs) + h5f.create_dataset('type', data=['string']) + + +def dump_none(obj, h5f, **kwargs): + """ Dump None type to file """ + h5f.create_dataset('data', data=[0], **kwargs) + h5f.create_dataset('type', data=['none']) + + +def dump_unicode(obj, h5f, **kwargs): + """ dumps a list object to h5py file""" + dt = h5.special_dtype(vlen=unicode) + ll = len(obj) + dset = h5f.create_dataset('data', shape=(ll, ), dtype=dt, **kwargs) + dset[:ll] = obj + h5f.create_dataset('type', data=['unicode']) + + +def _dump_dict(dd, hgroup, **kwargs): + for key in dd: + if type(dd[key]) in (str, int, float, unicode, bool): + # Figure out type to be stored + types = {str: 'str', int: 'int', float: 'float', + unicode: 'unicode', bool: 'bool', NoneType: 'none'} + _key = types.get(type(dd[key])) + + # Store along with dtype info + if _key == 'unicode': + dd[key] = str(dd[key]) + + hgroup.create_dataset("%s" % key, data=[dd[key]], **kwargs) + hgroup.create_dataset("_%s" % key, data=[_key]) + + elif type(dd[key]) in (type(np.array([1])), type(np.ma.array([1]))): + + if hasattr(dd[key], 'mask'): + hgroup.create_dataset("_%s" % key, data=["masked"]) + hgroup.create_dataset("%s" % key, data=dd[key].data, **kwargs) + hgroup.create_dataset("_%s_mask" % key, data=dd[key].mask, **kwargs) + else: + hgroup.create_dataset("_%s" % key, data=["ndarray"]) + hgroup.create_dataset("%s" % key, data=dd[key], **kwargs) + + elif type(dd[key]) is list: + hgroup.create_dataset("%s" % key, data=dd[key], **kwargs) + hgroup.create_dataset("_%s" % key, data=["list"]) + + elif type(dd[key]) is tuple: + hgroup.create_dataset("%s" % key, data=dd[key], **kwargs) + hgroup.create_dataset("_%s" % key, data=["tuple"]) + + elif type(dd[key]) is set: + hgroup.create_dataset("%s" % key, data=list(dd[key]), **kwargs) + hgroup.create_dataset("_%s" % key, data=["set"]) + + elif isinstance(dd[key], dict): + new_group = hgroup.create_group("%s" % key) + _dump_dict(dd[key], new_group, **kwargs) + + elif type(dd[key]) is NoneType: + hgroup.create_dataset("%s" % key, data=[0], **kwargs) + hgroup.create_dataset("_%s" % key, data=["none"]) + + else: + if type(dd[key]).__module__ == np.__name__: + #print type(dd[key]) + hgroup.create_dataset("%s" % key, data=dd[key]) + hgroup.create_dataset("_%s" % key, data=["np_dtype"]) + #new_group = hgroup.create_group("%s" % key) + #dump_np_dtype_dict(dd[key], new_group) + else: + raise NoMatchError + + +def dump_dict(obj, h5f='', **kwargs): + """ dumps a dictionary to h5py file """ + h5f.create_dataset('type', data=['dict']) + hgroup = h5f.create_group('data') + _dump_dict(obj, hgroup, **kwargs) + + +def no_match(obj, h5f, *args, **kwargs): + """ If no match is made, raise an exception """ + try: + import dill as cPickle + except ImportError: + import cPickle + + pickled_obj = cPickle.dumps(obj) + h5f.create_dataset('type', data=['pickle']) + h5f.create_dataset('data', data=[pickled_obj]) + + print("Warning: %s type not understood, data have been serialized" % type(obj)) + #raise NoMatchError + + +def dumper_lookup(obj): + """ What type of object are we trying to pickle? + + This is a python dictionary based equivalent of a case statement. + It returns the correct helper function for a given data type. + """ + t = type(obj) + + types = { + list: dump_list, + tuple: dump_tuple, + set: dump_set, + dict: dump_dict, + str: dump_string, + unicode: dump_unicode, + NoneType: dump_none, + np.ndarray: dump_ndarray, + np.ma.core.MaskedArray: dump_masked, + np.float16: dump_np_dtype, + np.float32: dump_np_dtype, + np.float64: dump_np_dtype, + np.int8: dump_np_dtype, + np.int16: dump_np_dtype, + np.int32: dump_np_dtype, + np.int64: dump_np_dtype, + np.uint8: dump_np_dtype, + np.uint16: dump_np_dtype, + np.uint32: dump_np_dtype, + np.uint64: dump_np_dtype, + np.complex64: dump_np_dtype, + np.complex128: dump_np_dtype, + } + + match = types.get(t, no_match) + return match + + +def dump(obj, file, mode='w', track_times=True, **kwargs): + """ Write a pickled representation of obj to the open file object file. + + Parameters + ---------- + obj: object + python object o store in a Hickle + file: file object, filename string, or h5py.File object + file in which to store the object. A h5py.File or a filename is also acceptable. + mode: string + optional argument, 'r' (read only), 'w' (write) or 'a' (append). Ignored if file + is a file object. + compression: str + optional argument. Applies compression to dataset. Options: None, gzip, lzf (+ szip, + if installed) + track_times: bool + optional argument. If set to False, repeated hickling will produce identical files. + """ + + try: + # See what kind of object to dump + dumper = dumper_lookup(obj) + # Open the file + h5f = file_opener(file, mode, track_times) + print("dumping %s to file %s" % (type(obj), repr(h5f))) + dumper(obj, h5f, **kwargs) + h5f.close() + except NoMatchError: + fname = h5f.filename + h5f.close() + try: + os.remove(fname) + except: + print("Warning: dump failed. Could not remove %s" % fname) + finally: + raise NoMatchError + + +############# +## loaders ## +############# + +def load(file, safe=True): + """ Load a hickle file and reconstruct a python object + + Parameters + ---------- + file: file object, h5py.File, or filename string + + safe (bool): Disable automatic depickling of arbitrary python objects. + DO NOT set this to False unless the file is from a trusted source. + (see http://www.cs.jhu.edu/~s/musings/pickle.html for an explanation) + """ + + try: + h5f = file_opener(file) + dtype = h5f["type"][0] + + if dtype == 'dict': + group = h5f["data"] + data = load_dict(group) + elif dtype == 'pickle': + data = load_pickle(h5f, safe) + elif dtype == 'np_list': + group = h5f["data"] + data = load_np_list(group) + elif dtype == 'np_tuple': + group = h5f["data"] + data = load_np_tuple(group) + elif dtype == 'masked': + data = np.ma.array(h5f["data"][:], mask=h5f["mask"][:]) + elif dtype == 'none': + data = None + else: + if dtype in ('string', 'unicode'): + data = h5f["data"][0] + else: + try: + data = h5f["data"][:] + except ValueError: + data = h5f["data"] + types = { + 'list': list, + 'set': set, + 'unicode': unicode, + 'string': str, + 'ndarray': load_ndarray, + 'np_dtype': load_np_dtype + } + + mod = types.get(dtype, no_match) + data = mod(data) + finally: + if 'h5f' in locals(): + h5f.close() + return data + + +def load_pickle(h5f, safe=True): + """ Deserialize and load a pickled object within a hickle file + + WARNING: Pickle has + + Parameters + ---------- + h5f: h5py.File object + + safe (bool): Disable automatic depickling of arbitrary python objects. + DO NOT set this to False unless the file is from a trusted source. + (see http://www.cs.jhu.edu/~s/musings/pickle.html for an explanation) + """ + + if not safe: + try: + import dill as cPickle + except ImportError: + import cPickle + + data = h5f["data"][:] + data = cPickle.loads(data[0]) + return data + else: + print("\nWarning: Object is of an unknown type, and has not been loaded") + print(" for security reasons (it could be malicious code). If") + print(" you wish to continue, manually set safe=False\n") + + +def load_np_list(group): + """ load a numpy list """ + np_list = [] + for key in sorted(group.keys()): + data = group[key][:] + np_list.append(data) + return np_list + + +def load_np_tuple(group): + """ load a tuple containing numpy arrays """ + return tuple(load_np_list(group)) + + +def load_ndarray(arr): + """ Load a numpy array """ + # Nothing to be done! + return arr + + +def load_np_dtype(arr): + """ Load a numpy array """ + # Just return first value + return arr.value + + +def load_dict(group): + """ Load dictionary """ + + dd = {} + for key in group.keys(): + if isinstance(group[key], h5._hl.group.Group): + new_group = group[key] + dd[key] = load_dict(new_group) + elif not key.startswith("_"): + _key = "_%s" % key + + if group[_key][0] == 'np_dtype': + dd[key] = group[key].value + elif group[_key][0] in ('str', 'int', 'float', 'unicode', 'bool'): + dd[key] = group[key][0] + elif group[_key][0] == 'masked': + key_ma = "_%s_mask" % key + dd[key] = np.ma.array(group[key][:], mask=group[key_ma]) + else: + dd[key] = group[key][:] + + # Convert numpy constructs back to string + dtype = group[_key][0] + types = {'str': str, 'int': int, 'float': float, + 'unicode': unicode, 'bool': bool, 'list': list, 'none' : NoneType} + try: + mod = types.get(dtype) + if dtype == 'none': + dd[key] = None + else: + dd[key] = mod(dd[key]) + except: + pass + return dd + + +def load_large(file): + """ Load a large hickle file (returns the h5py object not the data) + + Parameters + ---------- + file: file object, h5py.File, or filename string + """ + + h5f = file_opener(file) + return h5f diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle_legacy2.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle_legacy2.py new file mode 100644 index 0000000000000000000000000000000000000000..4d018fde9a161713213b00190267439257cb876d --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/hickle_legacy2.py @@ -0,0 +1,672 @@ +# encoding: utf-8 +""" +# hickle_legacy2.py + +Created by Danny Price 2016-02-03. + +This is a legacy handler, for hickle v2 files. +If V3 reading fails, this will be called as a fail-over. + +""" + +import os +import numpy as np +import h5py as h5 +import re + +try: + from exceptions import Exception + from types import NoneType +except ImportError: + pass # above imports will fail in python3 + +import warnings +__version__ = "2.0.4" +__author__ = "Danny Price" + + +################## +# Error handling # +################## + +class FileError(Exception): + """ An exception raised if the file is fishy """ + def __init__(self): + return + + def __str__(self): + return ("Cannot open file. Please pass either a filename " + "string, a file object, or a h5py.File") + + +class ClosedFileError(Exception): + """ An exception raised if the file is fishy """ + def __init__(self): + return + + def __str__(self): + return ("HDF5 file has been closed. Please pass either " + "a filename string, a file object, or an open h5py.File") + + +class NoMatchError(Exception): + """ An exception raised if the object type is not understood (or + supported)""" + def __init__(self): + return + + def __str__(self): + return ("Error: this type of python object cannot be converted into a " + "hickle.") + + +class ToDoError(Exception): + """ An exception raised for non-implemented functionality""" + def __init__(self): + return + + def __str__(self): + return "Error: this functionality hasn't been implemented yet." + + +###################### +# H5PY file wrappers # +###################### + +class H5GroupWrapper(h5.Group): + """ Group wrapper that provides a track_times kwarg. + + track_times is a boolean flag that can be set to False, so that two + files created at different times will have identical MD5 hashes. + """ + def create_dataset(self, *args, **kwargs): + kwargs['track_times'] = getattr(self, 'track_times', True) + return super(H5GroupWrapper, self).create_dataset(*args, **kwargs) + + def create_group(self, *args, **kwargs): + group = super(H5GroupWrapper, self).create_group(*args, **kwargs) + group.__class__ = H5GroupWrapper + group.track_times = getattr(self, 'track_times', True) + return group + + +class H5FileWrapper(h5.File): + """ Wrapper for h5py File that provides a track_times kwarg. + + track_times is a boolean flag that can be set to False, so that two + files created at different times will have identical MD5 hashes. + """ + def create_dataset(self, *args, **kwargs): + kwargs['track_times'] = getattr(self, 'track_times', True) + return super(H5FileWrapper, self).create_dataset(*args, **kwargs) + + def create_group(self, *args, **kwargs): + group = super(H5FileWrapper, self).create_group(*args, **kwargs) + group.__class__ = H5GroupWrapper + group.track_times = getattr(self, 'track_times', True) + return group + + +def file_opener(f, mode='r', track_times=True): + """ A file opener helper function with some error handling. This can open + files through a file object, a h5py file, or just the filename. + + Args: + f (file, h5py.File, or string): File-identifier, e.g. filename or file object. + mode (str): File open mode. Only required if opening by filename string. + track_times (bool): Track time in HDF5; turn off if you want hickling at + different times to produce identical files (e.g. for MD5 hash check). + + """ + # Were we handed a file object or just a file name string? + if isinstance(f, file): + filename, mode = f.name, f.mode + f.close() + h5f = h5.File(filename, mode) + elif isinstance(f, str) or isinstance(f, unicode): + filename = f + h5f = h5.File(filename, mode) + elif isinstance(f, H5FileWrapper) or isinstance(f, h5._hl.files.File): + try: + filename = f.filename + except ValueError: + raise ClosedFileError() + h5f = f + else: + print(type(f)) + raise FileError + + h5f.__class__ = H5FileWrapper + h5f.track_times = track_times + return h5f + + +########### +# DUMPERS # +########### + +def check_is_iterable(py_obj): + """ Check whether a python object is iterable. + + Note: this treats unicode and string as NON ITERABLE + + Args: + py_obj: python object to test + + Returns: + iter_ok (bool): True if item is iterable, False is item is not + """ + if type(py_obj) in (str, unicode): + return False + try: + iter(py_obj) + return True + except TypeError: + return False + + +def check_iterable_item_type(iter_obj): + """ Check if all items within an iterable are the same type. + + Args: + iter_obj: iterable object + + Returns: + iter_type: type of item contained within the iterable. If + the iterable has many types, a boolean False is returned instead. + + References: + http://stackoverflow.com/questions/13252333/python-check-if-all-elements-of-a-list-are-the-same-type + """ + iseq = iter(iter_obj) + first_type = type(next(iseq)) + return first_type if all((type(x) is first_type) for x in iseq) else False + + +def check_is_numpy_array(py_obj): + """ Check if a python object is a numpy array (masked or regular) + + Args: + py_obj: python object to check whether it is a numpy array + + Returns + is_numpy (bool): Returns True if it is a numpy array, else False if it isn't + """ + + is_numpy = type(py_obj) in (type(np.array([1])), type(np.ma.array([1]))) + + return is_numpy + + +def _dump(py_obj, h_group, call_id=0, **kwargs): + """ Dump a python object to a group within a HDF5 file. + + This function is called recursively by the main dump() function. + + Args: + py_obj: python object to dump. + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + + dumpable_dtypes = set([bool, int, float, long, complex, str, unicode]) + + # Firstly, check if item is a numpy array. If so, just dump it. + if check_is_numpy_array(py_obj): + create_hkl_dataset(py_obj, h_group, call_id, **kwargs) + + # next, check if item is iterable + elif check_is_iterable(py_obj): + item_type = check_iterable_item_type(py_obj) + + # item_type == False implies multiple types. Create a dataset + if item_type is False: + h_subgroup = create_hkl_group(py_obj, h_group, call_id) + for ii, py_subobj in enumerate(py_obj): + _dump(py_subobj, h_subgroup, call_id=ii, **kwargs) + + # otherwise, subitems have same type. Check if subtype is an iterable + # (e.g. list of lists), or not (e.g. list of ints, which should be treated + # as a single dataset). + else: + if item_type in dumpable_dtypes: + create_hkl_dataset(py_obj, h_group, call_id, **kwargs) + else: + h_subgroup = create_hkl_group(py_obj, h_group, call_id) + for ii, py_subobj in enumerate(py_obj): + #print py_subobj, h_subgroup, ii + _dump(py_subobj, h_subgroup, call_id=ii, **kwargs) + + # item is not iterable, so create a dataset for it + else: + create_hkl_dataset(py_obj, h_group, call_id, **kwargs) + + +def dump(py_obj, file_obj, mode='w', track_times=True, path='/', **kwargs): + """ Write a pickled representation of obj to the open file object file. + + Args: + obj (object): python object o store in a Hickle + file: file object, filename string, or h5py.File object + file in which to store the object. A h5py.File or a filename is also + acceptable. + mode (str): optional argument, 'r' (read only), 'w' (write) or 'a' (append). + Ignored if file is a file object. + compression (str): optional argument. Applies compression to dataset. Options: None, gzip, + lzf (+ szip, if installed) + track_times (bool): optional argument. If set to False, repeated hickling will produce + identical files. + path (str): path within hdf5 file to save data to. Defaults to root / + """ + + try: + # Open the file + h5f = file_opener(file_obj, mode, track_times) + h5f.attrs["CLASS"] = 'hickle' + h5f.attrs["VERSION"] = 2 + h5f.attrs["type"] = ['hickle'] + + h_root_group = h5f.get(path) + + if h_root_group is None: + h_root_group = h5f.create_group(path) + h_root_group.attrs["type"] = ['hickle'] + + _dump(py_obj, h_root_group, **kwargs) + h5f.close() + except NoMatchError: + fname = h5f.filename + h5f.close() + try: + os.remove(fname) + except OSError: + warnings.warn("Dump failed. Could not remove %s" % fname) + finally: + raise NoMatchError + + +def create_dataset_lookup(py_obj): + """ What type of object are we trying to pickle? This is a python + dictionary based equivalent of a case statement. It returns the correct + helper function for a given data type. + + Args: + py_obj: python object to look-up what function to use to dump to disk + + Returns: + match: function that should be used to dump data to a new dataset + """ + t = type(py_obj) + + types = { + dict: create_dict_dataset, + list: create_listlike_dataset, + tuple: create_listlike_dataset, + set: create_listlike_dataset, + str: create_stringlike_dataset, + unicode: create_stringlike_dataset, + int: create_python_dtype_dataset, + float: create_python_dtype_dataset, + long: create_python_dtype_dataset, + bool: create_python_dtype_dataset, + complex: create_python_dtype_dataset, + NoneType: create_none_dataset, + np.ndarray: create_np_array_dataset, + np.ma.core.MaskedArray: create_np_array_dataset, + np.float16: create_np_dtype_dataset, + np.float32: create_np_dtype_dataset, + np.float64: create_np_dtype_dataset, + np.int8: create_np_dtype_dataset, + np.int16: create_np_dtype_dataset, + np.int32: create_np_dtype_dataset, + np.int64: create_np_dtype_dataset, + np.uint8: create_np_dtype_dataset, + np.uint16: create_np_dtype_dataset, + np.uint32: create_np_dtype_dataset, + np.uint64: create_np_dtype_dataset, + np.complex64: create_np_dtype_dataset, + np.complex128: create_np_dtype_dataset + } + + match = types.get(t, no_match) + return match + + +def create_hkl_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Create a dataset within the hickle HDF5 file + + Args: + py_obj: python object to dump. + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + + """ + #lookup dataset creator type based on python object type + create_dataset = create_dataset_lookup(py_obj) + + # do the creation + create_dataset(py_obj, h_group, call_id, **kwargs) + + +def create_hkl_group(py_obj, h_group, call_id=0): + """ Create a new group within the hickle file + + Args: + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + + """ + h_subgroup = h_group.create_group('data_%i' % call_id) + h_subgroup.attrs["type"] = [str(type(py_obj))] + return h_subgroup + + +def create_listlike_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Dumper for list, set, tuple + + Args: + py_obj: python object to dump; should be list-like + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + dtype = str(type(py_obj)) + obj = list(py_obj) + d = h_group.create_dataset('data_%i' % call_id, data=obj, **kwargs) + d.attrs["type"] = [dtype] + + +def create_np_dtype_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps an np dtype object to h5py file + + Args: + py_obj: python object to dump; should be a numpy scalar, e.g. np.float16(1) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, **kwargs) + d.attrs["type"] = ['np_dtype'] + d.attrs["np_dtype"] = str(d.dtype) + + +def create_python_dtype_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps a python dtype object to h5py file + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, + dtype=type(py_obj), **kwargs) + d.attrs["type"] = ['python_dtype'] + d.attrs['python_subdtype'] = str(type(py_obj)) + + +def create_dict_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Creates a data group for each key in dictionary + + Args: + py_obj: python object to dump; should be dictionary + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + h_dictgroup = h_group.create_group('data_%i' % call_id) + h_dictgroup.attrs["type"] = ['dict'] + for key, py_subobj in py_obj.items(): + h_subgroup = h_dictgroup.create_group(key) + h_subgroup.attrs["type"] = ['dict_item'] + _dump(py_subobj, h_subgroup, call_id=0, **kwargs) + + +def create_np_array_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps an ndarray object to h5py file + + Args: + py_obj: python object to dump; should be a numpy array or np.ma.array (masked) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + if isinstance(py_obj, type(np.ma.array([1]))): + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, **kwargs) + #m = h_group.create_dataset('mask_%i' % call_id, data=py_obj.mask, **kwargs) + m = h_group.create_dataset('data_%i_mask' % call_id, data=py_obj.mask, **kwargs) + d.attrs["type"] = ['ndarray_masked_data'] + m.attrs["type"] = ['ndarray_masked_mask'] + else: + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, **kwargs) + d.attrs["type"] = ['ndarray'] + + +def create_stringlike_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps a list object to h5py file + + Args: + py_obj: python object to dump; should be string-like (unicode or string) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + if isinstance(py_obj, str): + d = h_group.create_dataset('data_%i' % call_id, data=[py_obj], **kwargs) + d.attrs["type"] = ['string'] + else: + dt = h5.special_dtype(vlen=unicode) + dset = h_group.create_dataset('data_%i' % call_id, shape=(1, ), dtype=dt, **kwargs) + dset[0] = py_obj + dset.attrs['type'] = ['unicode'] + + +def create_none_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Dump None type to file + + Args: + py_obj: python object to dump; must be None object + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + d = h_group.create_dataset('data_%i' % call_id, data=[0], **kwargs) + d.attrs["type"] = ['none'] + + +def no_match(py_obj, h_group, call_id=0, **kwargs): + """ If no match is made, raise an exception + + Args: + py_obj: python object to dump; default if item is not matched. + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + try: + import dill as cPickle + except ImportError: + import cPickle + + pickled_obj = cPickle.dumps(py_obj) + d = h_group.create_dataset('data_%i' % call_id, data=[pickled_obj]) + d.attrs["type"] = ['pickle'] + + warnings.warn("%s type not understood, data have been " + "serialized" % type(py_obj)) + + +############# +## LOADERS ## +############# + +class PyContainer(list): + """ A group-like object into which to load datasets. + + In order to build up a tree-like structure, we need to be able + to load datasets into a container with an append() method. + Python tuples and sets do not allow this. This class provides + a list-like object that be converted into a list, tuple, set or dict. + """ + def __init__(self): + super(PyContainer, self).__init__() + self.container_type = None + self.name = None + + def convert(self): + """ Convert from PyContainer to python core data type. + + Returns: self, either as a list, tuple, set or dict + """ + if self.container_type == "<type 'list'>": + return list(self) + if self.container_type == "<type 'tuple'>": + return tuple(self) + if self.container_type == "<type 'set'>": + return set(self) + if self.container_type == "dict": + keys = [str(item.name.split('/')[-1]) for item in self] + items = [item[0] for item in self] + return dict(zip(keys, items)) + else: + return self + + +def load(fileobj, path='/', safe=True): + """ Load a hickle file and reconstruct a python object + + Args: + fileobj: file object, h5py.File, or filename string + safe (bool): Disable automatic depickling of arbitrary python objects. + DO NOT set this to False unless the file is from a trusted source. + (see http://www.cs.jhu.edu/~s/musings/pickle.html for an explanation) + + path (str): path within hdf5 file to save data to. Defaults to root / + """ + + try: + h5f = file_opener(fileobj) + h_root_group = h5f.get(path) + + try: + assert 'CLASS' in h5f.attrs.keys() + assert 'VERSION' in h5f.attrs.keys() + py_container = PyContainer() + py_container.container_type = 'hickle' + py_container = _load(py_container, h_root_group) + return py_container[0][0] + except AssertionError: + import hickle_legacy + return hickle_legacy.load(fileobj, safe) + finally: + if 'h5f' in locals(): + h5f.close() + + +def load_dataset(h_node): + """ Load a dataset, converting into its correct python type + + Args: + h_node (h5py dataset): h5py dataset object to read + + Returns: + data: reconstructed python object from loaded data + """ + py_type = h_node.attrs["type"][0] + + if h_node.shape == (): + data = h_node.value + else: + data = h_node[:] + + if py_type == "<type 'list'>": + #print self.name + return list(data) + elif py_type == "<type 'tuple'>": + return tuple(data) + elif py_type == "<type 'set'>": + return set(data) + elif py_type == "np_dtype": + subtype = h_node.attrs["np_dtype"] + data = np.array(data, dtype=subtype) + return data + elif py_type == 'ndarray': + return np.array(data) + elif py_type == 'ndarray_masked_data': + try: + mask_path = h_node.name + "_mask" + h_root = h_node.parent + mask = h_root.get(mask_path)[:] + except IndexError: + mask = h_root.get(mask_path) + except ValueError: + mask = h_root.get(mask_path) + data = np.ma.array(data, mask=mask) + return data + elif py_type == 'python_dtype': + subtype = h_node.attrs["python_subdtype"] + type_dict = { + "<type 'int'>": int, + "<type 'float'>": float, + "<type 'long'>": long, + "<type 'bool'>": bool, + "<type 'complex'>": complex + } + tcast = type_dict.get(subtype) + return tcast(data) + elif py_type == 'string': + return str(data[0]) + elif py_type == 'unicode': + return unicode(data[0]) + elif py_type == 'none': + return None + else: + print(h_node.name, py_type, h_node.attrs.keys()) + return data + + +def sort_keys(key_list): + """ Take a list of strings and sort it by integer value within string + + Args: + key_list (list): List of keys + + Returns: + key_list_sorted (list): List of keys, sorted by integer + """ + to_int = lambda x: int(re.search('\d+', x).group(0)) + keys_by_int = sorted([(to_int(key), key) for key in key_list]) + return [ii[1] for ii in keys_by_int] + + +def _load(py_container, h_group): + """ Load a hickle file + + Recursive funnction to load hdf5 data into a PyContainer() + + Args: + py_container (PyContainer): Python container to load data into + h_group (h5 group or dataset): h5py object, group or dataset, to spider + and load all datasets. + """ + + group_dtype = h5._hl.group.Group + dataset_dtype = h5._hl.dataset.Dataset + + #either a file, group, or dataset + if isinstance(h_group, H5FileWrapper) or isinstance(h_group, group_dtype): + py_subcontainer = PyContainer() + py_subcontainer.container_type = h_group.attrs['type'][0] + py_subcontainer.name = h_group.name + + if py_subcontainer.container_type != 'dict': + h_keys = sort_keys(h_group.keys()) + else: + h_keys = h_group.keys() + + for h_name in h_keys: + h_node = h_group[h_name] + py_subcontainer = _load(py_subcontainer, h_node) + + sub_data = py_subcontainer.convert() + py_container.append(sub_data) + + else: + # must be a dataset + subdata = load_dataset(h_group) + py_container.append(subdata) + + #print h_group.name, py_container + return py_container diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/__init__.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3be6bd298581fb3086bb5a261de72a56970faddf --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/__init__.py @@ -0,0 +1 @@ +from __future__ import absolute_import \ No newline at end of file diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_astropy.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_astropy.py new file mode 100644 index 0000000000000000000000000000000000000000..dd8efce655c2223262b42868cbb1d9ba5c580acb --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_astropy.py @@ -0,0 +1,237 @@ +import numpy as np +from astropy.units import Quantity +from astropy.coordinates import Angle, SkyCoord +from astropy.constants import Constant, EMConstant +from astropy.table import Table +from astropy.time import Time + +from hickle.helpers import get_type_and_data +import six + +def create_astropy_quantity(py_obj, h_group, call_id=0, **kwargs): + """ dumps an astropy quantity + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + # kwarg compression etc does not work on scalars + d = h_group.create_dataset('data_%i' % call_id, data=py_obj.value, + dtype='float64') #, **kwargs) + d.attrs["type"] = [b'astropy_quantity'] + if six.PY3: + unit = bytes(str(py_obj.unit), 'ascii') + else: + unit = str(py_obj.unit) + d.attrs['unit'] = [unit] + +def create_astropy_angle(py_obj, h_group, call_id=0, **kwargs): + """ dumps an astropy quantity + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + # kwarg compression etc does not work on scalars + d = h_group.create_dataset('data_%i' % call_id, data=py_obj.value, + dtype='float64') #, **kwargs) + d.attrs["type"] = [b'astropy_angle'] + if six.PY3: + unit = str(py_obj.unit).encode('ascii') + else: + unit = str(py_obj.unit) + d.attrs['unit'] = [unit] + +def create_astropy_skycoord(py_obj, h_group, call_id=0, **kwargs): + """ dumps an astropy quantity + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + # kwarg compression etc does not work on scalars + lat = py_obj.data.lat.value + lon = py_obj.data.lon.value + dd = np.column_stack((lon, lat)) + + d = h_group.create_dataset('data_%i' % call_id, data=dd, + dtype='float64') #, **kwargs) + d.attrs["type"] = [b'astropy_skycoord'] + if six.PY3: + lon_unit = str(py_obj.data.lon.unit).encode('ascii') + lat_unit = str(py_obj.data.lat.unit).encode('ascii') + else: + lon_unit = str(py_obj.data.lon.unit) + lat_unit = str(py_obj.data.lat.unit) + d.attrs['lon_unit'] = [lon_unit] + d.attrs['lat_unit'] = [lat_unit] + +def create_astropy_time(py_obj, h_group, call_id=0, **kwargs): + """ dumps an astropy Time object + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + + # kwarg compression etc does not work on scalars + data = py_obj.value + dtype = str(py_obj.value.dtype) + + # Need to catch string times + if '<U' in dtype: + dtype = dtype.replace('<U', '|S') + print(dtype) + data = [] + for item in py_obj.value: + data.append(str(item).encode('ascii')) + + d = h_group.create_dataset('data_%i' % call_id, data=data, dtype=dtype) #, **kwargs) + d.attrs["type"] = [b'astropy_time'] + if six.PY2: + fmt = str(py_obj.format) + scale = str(py_obj.scale) + else: + fmt = str(py_obj.format).encode('ascii') + scale = str(py_obj.scale).encode('ascii') + d.attrs['format'] = [fmt] + d.attrs['scale'] = [scale] + +def create_astropy_constant(py_obj, h_group, call_id=0, **kwargs): + """ dumps an astropy constant + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + # kwarg compression etc does not work on scalars + d = h_group.create_dataset('data_%i' % call_id, data=py_obj.value, + dtype='float64') #, **kwargs) + d.attrs["type"] = [b'astropy_constant'] + d.attrs["unit"] = [str(py_obj.unit)] + d.attrs["abbrev"] = [str(py_obj.abbrev)] + d.attrs["name"] = [str(py_obj.name)] + d.attrs["reference"] = [str(py_obj.reference)] + d.attrs["uncertainty"] = [py_obj.uncertainty] + + if py_obj.system: + d.attrs["system"] = [py_obj.system] + + +def create_astropy_table(py_obj, h_group, call_id=0, **kwargs): + """ Dump an astropy Table + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + data = py_obj.as_array() + d = h_group.create_dataset('data_%i' % call_id, data=data, dtype=data.dtype, **kwargs) + d.attrs['type'] = [b'astropy_table'] + + if six.PY3: + colnames = [bytes(cn, 'ascii') for cn in py_obj.colnames] + else: + colnames = py_obj.colnames + d.attrs['colnames'] = colnames + for key, value in py_obj.meta.items(): + d.attrs[key] = value + + +def load_astropy_quantity_dataset(h_node): + py_type, data = get_type_and_data(h_node) + unit = h_node.attrs["unit"][0] + q = Quantity(data, unit) + return q + +def load_astropy_time_dataset(h_node): + py_type, data = get_type_and_data(h_node) + if six.PY3: + fmt = h_node.attrs["format"][0].decode('ascii') + scale = h_node.attrs["scale"][0].decode('ascii') + else: + fmt = h_node.attrs["format"][0] + scale = h_node.attrs["scale"][0] + q = Time(data, format=fmt, scale=scale) + return q + +def load_astropy_angle_dataset(h_node): + py_type, data = get_type_and_data(h_node) + unit = h_node.attrs["unit"][0] + q = Angle(data, unit) + return q + +def load_astropy_skycoord_dataset(h_node): + py_type, data = get_type_and_data(h_node) + lon_unit = h_node.attrs["lon_unit"][0] + lat_unit = h_node.attrs["lat_unit"][0] + q = SkyCoord(data[:,0], data[:, 1], unit=(lon_unit, lat_unit)) + return q + +def load_astropy_constant_dataset(h_node): + py_type, data = get_type_and_data(h_node) + unit = h_node.attrs["unit"][0] + abbrev = h_node.attrs["abbrev"][0] + name = h_node.attrs["name"][0] + ref = h_node.attrs["reference"][0] + unc = h_node.attrs["uncertainty"][0] + + system = None + if "system" in h_node.attrs.keys(): + system = h_node.attrs["system"][0] + + c = Constant(abbrev, name, data, unit, unc, ref, system) + return c + +def load_astropy_table(h_node): + py_type, data = get_type_and_data(h_node) + metadata = dict(h_node.attrs.items()) + metadata.pop('type') + metadata.pop('colnames') + + if six.PY3: + colnames = [cn.decode('ascii') for cn in h_node.attrs["colnames"]] + else: + colnames = h_node.attrs["colnames"] + + t = Table(data, names=colnames, meta=metadata) + return t + +def check_is_astropy_table(py_obj): + return isinstance(py_obj, Table) + +def check_is_astropy_quantity_array(py_obj): + if isinstance(py_obj, Quantity) or isinstance(py_obj, Time) or \ + isinstance(py_obj, Angle) or isinstance(py_obj, SkyCoord): + if py_obj.isscalar: + return False + else: + return True + else: + return False + + +##################### +# Lookup dictionary # +##################### + +class_register = [ + [Quantity, b'astropy_quantity', create_astropy_quantity, load_astropy_quantity_dataset, + True, check_is_astropy_quantity_array], + [Time, b'astropy_time', create_astropy_time, load_astropy_time_dataset, + True, check_is_astropy_quantity_array], + [Angle, b'astropy_angle', create_astropy_angle, load_astropy_angle_dataset, + True, check_is_astropy_quantity_array], + [SkyCoord, b'astropy_skycoord', create_astropy_skycoord, load_astropy_skycoord_dataset, + True, check_is_astropy_quantity_array], + [Constant, b'astropy_constant', create_astropy_constant, load_astropy_constant_dataset, + True, None], + [Table, b'astropy_table', create_astropy_table, load_astropy_table, + True, check_is_astropy_table] +] diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_numpy.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_numpy.py new file mode 100644 index 0000000000000000000000000000000000000000..7a31b12e235b07cccb6b1f0045ca9ccbfb874454 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_numpy.py @@ -0,0 +1,145 @@ +# encoding: utf-8 +""" +# load_numpy.py + +Utilities and dump / load handlers for handling numpy and scipy arrays + +""" +import six +import numpy as np + + +from hickle.helpers import get_type_and_data + + +def check_is_numpy_array(py_obj): + """ Check if a python object is a numpy array (masked or regular) + + Args: + py_obj: python object to check whether it is a numpy array + + Returns + is_numpy (bool): Returns True if it is a numpy array, else False if it isn't + """ + + is_numpy = type(py_obj) in (type(np.array([1])), type(np.ma.array([1]))) + + return is_numpy + + +def create_np_scalar_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps an np dtype object to h5py file + + Args: + py_obj: python object to dump; should be a numpy scalar, e.g. np.float16(1) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + + # DO NOT PASS KWARGS TO SCALAR DATASETS! + d = h_group.create_dataset('data_%i' % call_id, data=py_obj) # **kwargs) + d.attrs["type"] = [b'np_scalar'] + + if six.PY2: + d.attrs["np_dtype"] = str(d.dtype) + else: + d.attrs["np_dtype"] = bytes(str(d.dtype), 'ascii') + + +def create_np_dtype(py_obj, h_group, call_id=0, **kwargs): + """ dumps an np dtype object to h5py file + + Args: + py_obj: python object to dump; should be a numpy scalar, e.g. np.float16(1) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + d = h_group.create_dataset('data_%i' % call_id, data=[str(py_obj)]) + d.attrs["type"] = [b'np_dtype'] + + +def create_np_array_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps an ndarray object to h5py file + + Args: + py_obj: python object to dump; should be a numpy array or np.ma.array (masked) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + if isinstance(py_obj, type(np.ma.array([1]))): + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, **kwargs) + #m = h_group.create_dataset('mask_%i' % call_id, data=py_obj.mask, **kwargs) + m = h_group.create_dataset('data_%i_mask' % call_id, data=py_obj.mask, **kwargs) + d.attrs["type"] = [b'ndarray_masked_data'] + m.attrs["type"] = [b'ndarray_masked_mask'] + else: + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, **kwargs) + d.attrs["type"] = [b'ndarray'] + + + + +####################### +## Lookup dictionary ## +####################### + +types_dict = { + np.ndarray: create_np_array_dataset, + np.ma.core.MaskedArray: create_np_array_dataset, + np.float16: create_np_scalar_dataset, + np.float32: create_np_scalar_dataset, + np.float64: create_np_scalar_dataset, + np.int8: create_np_scalar_dataset, + np.int16: create_np_scalar_dataset, + np.int32: create_np_scalar_dataset, + np.int64: create_np_scalar_dataset, + np.uint8: create_np_scalar_dataset, + np.uint16: create_np_scalar_dataset, + np.uint32: create_np_scalar_dataset, + np.uint64: create_np_scalar_dataset, + np.complex64: create_np_scalar_dataset, + np.complex128: create_np_scalar_dataset, + np.dtype: create_np_dtype +} + +def load_np_dtype_dataset(h_node): + py_type, data = get_type_and_data(h_node) + data = np.dtype(data[0]) + return data + +def load_np_scalar_dataset(h_node): + py_type, data = get_type_and_data(h_node) + subtype = h_node.attrs["np_dtype"] + data = np.array([data], dtype=subtype)[0] + return data + +def load_ndarray_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return np.array(data, copy=False) + +def load_ndarray_masked_dataset(h_node): + py_type, data = get_type_and_data(h_node) + try: + mask_path = h_node.name + "_mask" + h_root = h_node.parent + mask = h_root.get(mask_path)[:] + except IndexError: + mask = h_root.get(mask_path) + except ValueError: + mask = h_root.get(mask_path) + data = np.ma.array(data, mask=mask) + return data + +def load_nothing(h_hode): + pass + +hkl_types_dict = { + b"np_dtype" : load_np_dtype_dataset, + b"np_scalar" : load_np_scalar_dataset, + b"ndarray" : load_ndarray_dataset, + b"numpy.ndarray" : load_ndarray_dataset, + b"ndarray_masked_data" : load_ndarray_masked_dataset, + b"ndarray_masked_mask" : load_nothing # Loaded autormatically +} + + diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_pandas.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_pandas.py new file mode 100644 index 0000000000000000000000000000000000000000..0b5185533dafe9d2f8b2c45405967d7489ce7caf --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_pandas.py @@ -0,0 +1,4 @@ +import pandas as pd + +# TODO: populate with classes to load +class_register = [] \ No newline at end of file diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_python.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_python.py new file mode 100644 index 0000000000000000000000000000000000000000..58de921ed13e2e9b0c57ad724e94fa2ac9a3268f --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_python.py @@ -0,0 +1,141 @@ +# encoding: utf-8 +""" +# load_python.py + +Handlers for dumping and loading built-in python types. +NB: As these are for built-in types, they are critical to the functioning of hickle. + +""" + +from hickle.helpers import get_type_and_data + +import sys +if sys.version_info.major == 3: + unicode = type(str) + str = type(bytes) + long = type(int) + NoneType = type(None) +else: + from types import NoneType + +import h5py as h5 + +def create_listlike_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Dumper for list, set, tuple + + Args: + py_obj: python object to dump; should be list-like + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + dtype = str(type(py_obj)) + obj = list(py_obj) + d = h_group.create_dataset('data_%i' % call_id, data=obj, **kwargs) + d.attrs["type"] = [dtype] + + +def create_python_dtype_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps a python dtype object to h5py file + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + # kwarg compression etc does not work on scalars + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, + dtype=type(py_obj)) #, **kwargs) + d.attrs["type"] = ['python_dtype'] + d.attrs['python_subdtype'] = str(type(py_obj)) + + +def create_stringlike_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps a list object to h5py file + + Args: + py_obj: python object to dump; should be string-like (unicode or string) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + if isinstance(py_obj, str): + d = h_group.create_dataset('data_%i' % call_id, data=[py_obj], **kwargs) + d.attrs["type"] = ['string'] + else: + dt = h5.special_dtype(vlen=unicode) + dset = h_group.create_dataset('data_%i' % call_id, shape=(1, ), dtype=dt, **kwargs) + dset[0] = py_obj + dset.attrs['type'] = ['unicode'] + + +def create_none_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Dump None type to file + + Args: + py_obj: python object to dump; must be None object + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + d = h_group.create_dataset('data_%i' % call_id, data=[0], **kwargs) + d.attrs["type"] = ['none'] + + +def load_list_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return list(data) + +def load_tuple_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return tuple(data) + +def load_set_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return set(data) + +def load_string_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return str(data[0]) + +def load_unicode_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return unicode(data[0]) + +def load_none_dataset(h_node): + return None + +def load_python_dtype_dataset(h_node): + py_type, data = get_type_and_data(h_node) + subtype = h_node.attrs["python_subdtype"] + type_dict = { + "<type 'int'>": int, + "<type 'float'>": float, + "<type 'long'>": long, + "<type 'bool'>": bool, + "<type 'complex'>": complex + } + tcast = type_dict.get(subtype) + return tcast(data) + +types_dict = { + list: create_listlike_dataset, + tuple: create_listlike_dataset, + set: create_listlike_dataset, + str: create_stringlike_dataset, + unicode: create_stringlike_dataset, + int: create_python_dtype_dataset, + float: create_python_dtype_dataset, + long: create_python_dtype_dataset, + bool: create_python_dtype_dataset, + complex: create_python_dtype_dataset, + NoneType: create_none_dataset, +} + +hkl_types_dict = { + "<type 'list'>" : load_list_dataset, + "<type 'tuple'>" : load_tuple_dataset, + "<type 'set'>" : load_set_dataset, + "python_dtype" : load_python_dtype_dataset, + "string" : load_string_dataset, + "unicode" : load_unicode_dataset, + "none" : load_none_dataset +} + diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_python3.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_python3.py new file mode 100644 index 0000000000000000000000000000000000000000..c6b173fd07af42735dd05dd7acb9c42e1c651e38 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_python3.py @@ -0,0 +1,201 @@ +# encoding: utf-8 +""" +# load_python.py + +Handlers for dumping and loading built-in python types. +NB: As these are for built-in types, they are critical to the functioning of hickle. + +""" + +import six +from hickle.helpers import get_type_and_data + +try: + from exceptions import Exception +except ImportError: + pass # above imports will fail in python3 + +try: + ModuleNotFoundError # This fails on Py3.5 and below +except NameError: + ModuleNotFoundError = ImportError + +import h5py as h5 + + +def get_py3_string_type(h_node): + """ Helper function to return the python string type for items in a list. + + Notes: + Py3 string handling is a bit funky and doesn't play too nicely with HDF5. + We needed to add metadata to say if the strings in a list started off as + bytes, string, etc. This helper loads + + """ + try: + py_type = h_node.attrs["py3_string_type"][0] + return py_type + except: + return None + +def create_listlike_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Dumper for list, set, tuple + + Args: + py_obj: python object to dump; should be list-like + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + dtype = str(type(py_obj)) + obj = list(py_obj) + + # h5py does not handle Py3 'str' objects well. Need to catch this + # Only need to check first element as this method + # is only called if all elements have same dtype + py3_str_type = None + if type(obj[0]) in (str, bytes): + py3_str_type = bytes(str(type(obj[0])), 'ascii') + + if type(obj[0]) is str: + #print(py3_str_type) + #print(obj, "HERE") + obj = [bytes(oo, 'utf8') for oo in obj] + #print(obj, "HERE") + + + d = h_group.create_dataset('data_%i' % call_id, data=obj, **kwargs) + d.attrs["type"] = [bytes(dtype, 'ascii')] + + # Need to add some metadata to aid in unpickling if it's a string type + if py3_str_type is not None: + d.attrs["py3_string_type"] = [py3_str_type] + + + +def create_python_dtype_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps a python dtype object to h5py file + + Args: + py_obj: python object to dump; should be a python type (int, float, bool etc) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + # kwarg compression etc does not work on scalars + d = h_group.create_dataset('data_%i' % call_id, data=py_obj, + dtype=type(py_obj)) #, **kwargs) + d.attrs["type"] = [b'python_dtype'] + d.attrs['python_subdtype'] = bytes(str(type(py_obj)), 'ascii') + + +def create_stringlike_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps a list object to h5py file + + Args: + py_obj: python object to dump; should be string-like (unicode or string) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + if isinstance(py_obj, bytes): + d = h_group.create_dataset('data_%i' % call_id, data=[py_obj], **kwargs) + d.attrs["type"] = [b'bytes'] + elif isinstance(py_obj, str): + dt = h5.special_dtype(vlen=str) + dset = h_group.create_dataset('data_%i' % call_id, shape=(1, ), dtype=dt, **kwargs) + dset[0] = py_obj + dset.attrs['type'] = [b'string'] + +def create_none_dataset(py_obj, h_group, call_id=0, **kwargs): + """ Dump None type to file + + Args: + py_obj: python object to dump; must be None object + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + d = h_group.create_dataset('data_%i' % call_id, data=[0], **kwargs) + d.attrs["type"] = [b'none'] + + +def load_list_dataset(h_node): + py_type, data = get_type_and_data(h_node) + py3_str_type = get_py3_string_type(h_node) + + if py3_str_type == b"<class 'bytes'>": + # Yuck. Convert numpy._bytes -> str -> bytes + return [bytes(str(item, 'utf8'), 'utf8') for item in data] + if py3_str_type == b"<class 'str'>": + return [str(item, 'utf8') for item in data] + else: + return list(data) + +def load_tuple_dataset(h_node): + data = load_list_dataset(h_node) + return tuple(data) + +def load_set_dataset(h_node): + data = load_list_dataset(h_node) + return set(data) + +def load_bytes_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return bytes(data[0]) + +def load_string_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return str(data[0]) + +def load_unicode_dataset(h_node): + py_type, data = get_type_and_data(h_node) + return unicode(data[0]) + +def load_none_dataset(h_node): + return None + +def load_pickled_data(h_node): + py_type, data = get_type_and_data(h_node) + try: + import cPickle as pickle + except ModuleNotFoundError: + import pickle + return pickle.loads(data[0]) + + +def load_python_dtype_dataset(h_node): + py_type, data = get_type_and_data(h_node) + subtype = h_node.attrs["python_subdtype"] + type_dict = { + b"<class 'int'>": int, + b"<class 'float'>": float, + b"<class 'bool'>": bool, + b"<class 'complex'>": complex + } + + tcast = type_dict.get(subtype) + return tcast(data) + + + +types_dict = { + list: create_listlike_dataset, + tuple: create_listlike_dataset, + set: create_listlike_dataset, + bytes: create_stringlike_dataset, + str: create_stringlike_dataset, + #bytearray: create_stringlike_dataset, + int: create_python_dtype_dataset, + float: create_python_dtype_dataset, + bool: create_python_dtype_dataset, + complex: create_python_dtype_dataset, + type(None): create_none_dataset, +} + +hkl_types_dict = { + b"<class 'list'>" : load_list_dataset, + b"<class 'tuple'>" : load_tuple_dataset, + b"<class 'set'>" : load_set_dataset, + b"bytes" : load_bytes_dataset, + b"python_dtype" : load_python_dtype_dataset, + b"string" : load_string_dataset, + b"pickle" : load_pickled_data, + b"none" : load_none_dataset, +} diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_scipy.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_scipy.py new file mode 100644 index 0000000000000000000000000000000000000000..ab09fe23c69ea791371e4b6a808b553c84195289 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/loaders/load_scipy.py @@ -0,0 +1,92 @@ +import six +import scipy +from scipy import sparse + +from hickle.helpers import get_type_and_data + +def check_is_scipy_sparse_array(py_obj): + """ Check if a python object is a scipy sparse array + + Args: + py_obj: python object to check whether it is a sparse array + + Returns + is_numpy (bool): Returns True if it is a sparse array, else False if it isn't + """ + t_csr = type(scipy.sparse.csr_matrix([0])) + t_csc = type(scipy.sparse.csc_matrix([0])) + t_bsr = type(scipy.sparse.bsr_matrix([0])) + is_sparse = type(py_obj) in (t_csr, t_csc, t_bsr) + + return is_sparse + + +def create_sparse_dataset(py_obj, h_group, call_id=0, **kwargs): + """ dumps an sparse array to h5py file + + Args: + py_obj: python object to dump; should be a numpy array or np.ma.array (masked) + h_group (h5.File.group): group to dump data into. + call_id (int): index to identify object's relative location in the iterable. + """ + h_sparsegroup = h_group.create_group('data_%i' % call_id) + data = h_sparsegroup.create_dataset('data', data=py_obj.data, **kwargs) + indices = h_sparsegroup.create_dataset('indices', data=py_obj.indices, **kwargs) + indptr = h_sparsegroup.create_dataset('indptr', data=py_obj.indptr, **kwargs) + shape = h_sparsegroup.create_dataset('shape', data=py_obj.shape, **kwargs) + + if isinstance(py_obj, type(sparse.csr_matrix([0]))): + type_str = 'csr' + elif isinstance(py_obj, type(sparse.csc_matrix([0]))): + type_str = 'csc' + elif isinstance(py_obj, type(sparse.bsr_matrix([0]))): + type_str = 'bsr' + + if six.PY2: + h_sparsegroup.attrs["type"] = [b'%s_matrix' % type_str] + data.attrs["type"] = [b"%s_matrix_data" % type_str] + indices.attrs["type"] = [b"%s_matrix_indices" % type_str] + indptr.attrs["type"] = [b"%s_matrix_indptr" % type_str] + shape.attrs["type"] = [b"%s_matrix_shape" % type_str] + else: + h_sparsegroup.attrs["type"] = [bytes(str('%s_matrix' % type_str), 'ascii')] + data.attrs["type"] = [bytes(str("%s_matrix_data" % type_str), 'ascii')] + indices.attrs["type"] = [bytes(str("%s_matrix_indices" % type_str), 'ascii')] + indptr.attrs["type"] = [bytes(str("%s_matrix_indptr" % type_str), 'ascii')] + shape.attrs["type"] = [bytes(str("%s_matrix_shape" % type_str), 'ascii')] + +def load_sparse_matrix_data(h_node): + + py_type, data = get_type_and_data(h_node) + h_root = h_node.parent + indices = h_root.get('indices')[:] + indptr = h_root.get('indptr')[:] + shape = h_root.get('shape')[:] + + if py_type == b'csc_matrix_data': + smat = sparse.csc_matrix((data, indices, indptr), dtype=data.dtype, shape=shape) + elif py_type == b'csr_matrix_data': + smat = sparse.csr_matrix((data, indices, indptr), dtype=data.dtype, shape=shape) + elif py_type == b'bsr_matrix_data': + smat = sparse.bsr_matrix((data, indices, indptr), dtype=data.dtype, shape=shape) + return smat + + + + + +class_register = [ + [scipy.sparse.csr_matrix, b'csr_matrix_data', create_sparse_dataset, load_sparse_matrix_data, False, check_is_scipy_sparse_array], + [scipy.sparse.csc_matrix, b'csc_matrix_data', create_sparse_dataset, load_sparse_matrix_data, False, check_is_scipy_sparse_array], + [scipy.sparse.bsr_matrix, b'bsr_matrix_data', create_sparse_dataset, load_sparse_matrix_data, False, check_is_scipy_sparse_array], +] + +exclude_register = [] + +# Need to ignore things like csc_matrix_indices which are loaded automatically +for mat_type in ('csr', 'csc', 'bsr'): + for attrib in ('indices', 'indptr', 'shape'): + hkl_key = "%s_matrix_%s" % (mat_type, attrib) + if not six.PY2: + hkl_key = hkl_key.encode('ascii') + exclude_register.append(hkl_key) diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/lookup.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/lookup.py new file mode 100644 index 0000000000000000000000000000000000000000..99d13df9315be642540e46efc44d8e3d293de708 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/hickle/lookup.py @@ -0,0 +1,238 @@ +""" +#lookup.py + +This file contains all the mappings between hickle/HDF5 metadata and python types. +There are four dictionaries and one set that are populated here: + +1) types_dict +types_dict: mapping between python types and dataset creation functions, e.g. + types_dict = { + list: create_listlike_dataset, + int: create_python_dtype_dataset, + np.ndarray: create_np_array_dataset + } + +2) hkl_types_dict +hkl_types_dict: mapping between hickle metadata and dataset loading functions, e.g. + hkl_types_dict = { + "<type 'list'>" : load_list_dataset, + "<type 'tuple'>" : load_tuple_dataset + } + +3) container_types_dict +container_types_dict: mapping required to convert the PyContainer object in hickle.py + back into the required native type. PyContainer is required as + some iterable types are immutable (do not have an append() function). + Here is an example: + container_types_dict = { + "<type 'list'>": list, + "<type 'tuple'>": tuple + } + +4) container_key_types_dict +container_key_types_dict: mapping specifically for converting hickled dict data back into + a dictionary with the same key type. While python dictionary keys + can be any hashable object, in HDF5 a unicode/string is required + for a dataset name. Example: + container_key_types_dict = { + "<type 'str'>": str, + "<type 'unicode'>": unicode + } + +5) types_not_to_sort +type_not_to_sort is a list of hickle type attributes that may be hierarchical, +but don't require sorting by integer index. + +## Extending hickle to add support for other classes and types + +The process to add new load/dump capabilities is as follows: + +1) Create a file called load_[newstuff].py in loaders/ +2) In the load_[newstuff].py file, define your create_dataset and load_dataset functions, + along with all required mapping dictionaries. +3) Add an import call here, and populate the lookup dictionaries with update() calls: + # Add loaders for [newstuff] + try: + from .loaders.load_[newstuff[ import types_dict as ns_types_dict + from .loaders.load_[newstuff[ import hkl_types_dict as ns_hkl_types_dict + types_dict.update(ns_types_dict) + hkl_types_dict.update(ns_hkl_types_dict) + ... (Add container_types_dict etc if required) + except ImportError: + raise +""" + +import six +from ast import literal_eval + +def return_first(x): + """ Return first element of a list """ + return x[0] + +def load_nothing(h_hode): + pass + +types_dict = {} + +hkl_types_dict = {} + +types_not_to_sort = [b'dict', b'csr_matrix', b'csc_matrix', b'bsr_matrix'] + +container_types_dict = { + b"<type 'list'>": list, + b"<type 'tuple'>": tuple, + b"<type 'set'>": set, + b"<class 'list'>": list, + b"<class 'tuple'>": tuple, + b"<class 'set'>": set, + b"csr_matrix": return_first, + b"csc_matrix": return_first, + b"bsr_matrix": return_first + } + +# Technically, any hashable object can be used, for now sticking with built-in types +container_key_types_dict = { + b"<type 'str'>": literal_eval, + b"<type 'float'>": float, + b"<type 'bool'>": bool, + b"<type 'int'>": int, + b"<type 'complex'>": complex, + b"<type 'tuple'>": literal_eval, + b"<class 'str'>": literal_eval, + b"<class 'float'>": float, + b"<class 'bool'>": bool, + b"<class 'int'>": int, + b"<class 'complex'>": complex, + b"<class 'tuple'>": literal_eval + } + +if six.PY2: + container_key_types_dict[b"<type 'unicode'>"] = literal_eval + container_key_types_dict[b"<type 'long'>"] = long + +# Add loaders for built-in python types +if six.PY2: + from .loaders.load_python import types_dict as py_types_dict + from .loaders.load_python import hkl_types_dict as py_hkl_types_dict +else: + from .loaders.load_python3 import types_dict as py_types_dict + from .loaders.load_python3 import hkl_types_dict as py_hkl_types_dict + +types_dict.update(py_types_dict) +hkl_types_dict.update(py_hkl_types_dict) + +# Add loaders for numpy types +from .loaders.load_numpy import types_dict as np_types_dict +from .loaders.load_numpy import hkl_types_dict as np_hkl_types_dict +from .loaders.load_numpy import check_is_numpy_array +types_dict.update(np_types_dict) +hkl_types_dict.update(np_hkl_types_dict) + +####################### +## ND-ARRAY checking ## +####################### + +ndarray_like_check_fns = [ + check_is_numpy_array +] + +def check_is_ndarray_like(py_obj): + is_ndarray_like = False + for ii, check_fn in enumerate(ndarray_like_check_fns): + is_ndarray_like = check_fn(py_obj) + if is_ndarray_like: + break + return is_ndarray_like + + + + +####################### +## loading optional ## +####################### + +def register_class(myclass_type, hkl_str, dump_function, load_function, + to_sort=True, ndarray_check_fn=None): + """ Register a new hickle class. + + Args: + myclass_type type(class): type of class + dump_function (function def): function to write data to HDF5 + load_function (function def): function to load data from HDF5 + is_iterable (bool): Is the item iterable? + hkl_str (str): String to write to HDF5 file to describe class + to_sort (bool): If the item is iterable, does it require sorting? + ndarray_check_fn (function def): function to use to check if + + """ + types_dict.update({myclass_type: dump_function}) + hkl_types_dict.update({hkl_str: load_function}) + if to_sort == False: + types_not_to_sort.append(hkl_str) + if ndarray_check_fn is not None: + ndarray_like_check_fns.append(ndarray_check_fn) + +def register_class_list(class_list): + """ Register multiple classes in a list + + Args: + class_list (list): A list, where each item is an argument to + the register_class() function. + + Notes: This just runs the code: + for item in mylist: + register_class(*item) + """ + for class_item in class_list: + register_class(*class_item) + +def register_class_exclude(hkl_str_to_ignore): + """ Tell loading funciton to ignore any HDF5 dataset with attribute 'type=XYZ' + + Args: + hkl_str_to_ignore (str): attribute type=string to ignore and exclude from loading. + """ + hkl_types_dict[hkl_str_to_ignore] = load_nothing + +def register_exclude_list(exclude_list): + """ Ignore HDF5 datasets with attribute type='XYZ' from loading + + ArgsL + exclude_list (list): List of strings, which correspond to hdf5/hickle + type= attributes not to load. + """ + for hkl_str in exclude_list: + register_class_exclude(hkl_str) + +######################## +## Scipy sparse array ## +######################## + +try: + from .loaders.load_scipy import class_register, exclude_register + register_class_list(class_register) + register_exclude_list(exclude_register) +except ImportError: + pass +except NameError: + pass + +#################### +## Astropy stuff ## +#################### + +try: + from .loaders.load_astropy import class_register + register_class_list(class_register) +except ImportError: + pass + +################## +## Pandas stuff ## +################## + +try: + from .loaders.load_pandas import class_register + register_class_list(class_register) +except ImportError: + pass diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/__init__.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_astropy.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_astropy.py new file mode 100644 index 0000000000000000000000000000000000000000..2086ec37456b2bbcde77fbed2d5370b67ee89381 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_astropy.py @@ -0,0 +1,133 @@ +import hickle as hkl +from astropy.units import Quantity +from astropy.time import Time +from astropy.coordinates import Angle, SkyCoord +from astropy.constants import Constant, EMConstant, G +from astropy.table import Table +import numpy as np +from py.path import local + +# Set the current working directory to the temporary directory +local.get_temproot().chdir() + +def test_astropy_quantity(): + + for uu in ['m^3', 'm^3 / s', 'kg/pc']: + a = Quantity(7, unit=uu) + + hkl.dump(a, "test_ap.h5") + b = hkl.load("test_ap.h5") + + assert a == b + assert a.unit == b.unit + + a *= a + hkl.dump(a, "test_ap.h5") + b = hkl.load("test_ap.h5") + assert a == b + assert a.unit == b.unit + +def TODO_test_astropy_constant(): + hkl.dump(G, "test_ap.h5") + gg = hkl.load("test_ap.h5") + + print(G) + print(gg) + +def test_astropy_table(): + t = Table([[1, 2], [3, 4]], names=('a', 'b'), meta={'name': 'test_thing'}) + + hkl.dump({'a': t}, "test_ap.h5") + t2 = hkl.load("test_ap.h5")['a'] + + print(t) + print(t.meta) + print(t2) + print(t2.meta) + + print(t.dtype, t2.dtype) + assert t.meta == t2.meta + assert t.dtype == t2.dtype + + assert np.allclose(t['a'].astype('float32'), t2['a'].astype('float32')) + assert np.allclose(t['b'].astype('float32'), t2['b'].astype('float32')) + +def test_astropy_quantity_array(): + a = Quantity([1,2,3], unit='m') + + hkl.dump(a, "test_ap.h5") + b = hkl.load("test_ap.h5") + + assert np.allclose(a.value, b.value) + assert a.unit == b.unit + +def test_astropy_time_array(): + times = ['1999-01-01T00:00:00.123456789', '2010-01-01T00:00:00'] + t1 = Time(times, format='isot', scale='utc') + hkl.dump(t1, "test_ap2.h5") + t2 = hkl.load("test_ap2.h5") + + print(t1) + print(t2) + assert t1.value.shape == t2.value.shape + for ii in range(len(t1)): + assert t1.value[ii] == t2.value[ii] + assert t1.format == t2.format + assert t1.scale == t2.scale + + times = [58264, 58265, 58266] + t1 = Time(times, format='mjd', scale='utc') + hkl.dump(t1, "test_ap2.h5") + t2 = hkl.load("test_ap2.h5") + + print(t1) + print(t2) + assert t1.value.shape == t2.value.shape + assert np.allclose(t1.value, t2.value) + assert t1.format == t2.format + assert t1.scale == t2.scale + +def test_astropy_angle(): + for uu in ['radian', 'degree']: + a = Angle(1.02, unit=uu) + + hkl.dump(a, "test_ap.h5") + b = hkl.load("test_ap.h5") + assert a == b + assert a.unit == b.unit + +def test_astropy_angle_array(): + a = Angle([1,2,3], unit='degree') + + hkl.dump(a, "test_ap.h5") + b = hkl.load("test_ap.h5") + + assert np.allclose(a.value, b.value) + assert a.unit == b.unit + +def test_astropy_skycoord(): + ra = Angle(['1d20m', '1d21m'], unit='degree') + dec = Angle(['33d0m0s', '33d01m'], unit='degree') + radec = SkyCoord(ra, dec) + hkl.dump(radec, "test_ap.h5") + radec2 = hkl.load("test_ap.h5") + assert np.allclose(radec.ra.value, radec2.ra.value) + assert np.allclose(radec.dec.value, radec2.dec.value) + + ra = Angle(['1d20m', '1d21m'], unit='hourangle') + dec = Angle(['33d0m0s', '33d01m'], unit='degree') + radec = SkyCoord(ra, dec) + hkl.dump(radec, "test_ap.h5") + radec2 = hkl.load("test_ap.h5") + assert np.allclose(radec.ra.value, radec2.ra.value) + assert np.allclose(radec.dec.value, radec2.dec.value) + +if __name__ == "__main__": + test_astropy_quantity() + #test_astropy_constant() + test_astropy_table() + test_astropy_quantity_array() + test_astropy_time_array() + test_astropy_angle() + test_astropy_angle_array() + test_astropy_skycoord() diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_hickle.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_hickle.py new file mode 100644 index 0000000000000000000000000000000000000000..5491054239372a3b5d42c9e6f07b6fc5701ed933 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_hickle.py @@ -0,0 +1,826 @@ +#! /usr/bin/env python +# encoding: utf-8 +""" +# test_hickle.py + +Unit tests for hickle module. + +""" + +import h5py +import hashlib +import numpy as np +import os +import six +import time +from pprint import pprint + +from py.path import local + +import hickle +from hickle.hickle import * + + +# Set current working directory to the temporary directory +local.get_temproot().chdir() + +NESTED_DICT = { + "level1_1": { + "level2_1": [1, 2, 3], + "level2_2": [4, 5, 6] + }, + "level1_2": { + "level2_1": [1, 2, 3], + "level2_2": [4, 5, 6] + }, + "level1_3": { + "level2_1": { + "level3_1": [1, 2, 3], + "level3_2": [4, 5, 6] + }, + "level2_2": [4, 5, 6] + } +} + +DUMP_CACHE = [] # Used in test_track_times() + + +def test_string(): + """ Dumping and loading a string """ + if six.PY2: + filename, mode = 'test.h5', 'w' + string_obj = "The quick brown fox jumps over the lazy dog" + dump(string_obj, filename, mode) + string_hkl = load(filename) + #print "Initial list: %s"%list_obj + #print "Unhickled data: %s"%list_hkl + assert type(string_obj) == type(string_hkl) == str + assert string_obj == string_hkl + else: + pass + + +def test_unicode(): + """ Dumping and loading a unicode string """ + if six.PY2: + filename, mode = 'test.h5', 'w' + u = unichr(233) + unichr(0x0bf2) + unichr(3972) + unichr(6000) + dump(u, filename, mode) + u_hkl = load(filename) + + assert type(u) == type(u_hkl) == unicode + assert u == u_hkl + # For those interested, uncomment below to see what those codes are: + # for i, c in enumerate(u_hkl): + # print i, '%04x' % ord(c), unicodedata.category(c), + # print unicodedata.name(c) + else: + pass + + +def test_unicode2(): + if six.PY2: + a = u"unicode test" + dump(a, 'test.hkl', mode='w') + + z = load('test.hkl') + assert a == z + assert type(a) == type(z) == unicode + pprint(z) + else: + pass + +def test_list(): + """ Dumping and loading a list """ + filename, mode = 'test_list.h5', 'w' + list_obj = [1, 2, 3, 4, 5] + dump(list_obj, filename, mode=mode) + list_hkl = load(filename) + #print(f'Initial list: {list_obj}') + #print(f'Unhickled data: {list_hkl}') + try: + assert type(list_obj) == type(list_hkl) == list + assert list_obj == list_hkl + import h5py + a = h5py.File(filename) + a.close() + + except AssertionError: + print("ERR:", list_obj, list_hkl) + import h5py + + raise() + + +def test_set(): + """ Dumping and loading a list """ + filename, mode = 'test_set.h5', 'w' + list_obj = set([1, 0, 3, 4.5, 11.2]) + dump(list_obj, filename, mode) + list_hkl = load(filename) + #print "Initial list: %s"%list_obj + #print "Unhickled data: %s"%list_hkl + try: + assert type(list_obj) == type(list_hkl) == set + assert list_obj == list_hkl + except AssertionError: + print(type(list_obj)) + print(type(list_hkl)) + #os.remove(filename) + raise + + +def test_numpy(): + """ Dumping and loading numpy array """ + filename, mode = 'test.h5', 'w' + dtypes = ['float32', 'float64', 'complex64', 'complex128'] + + for dt in dtypes: + array_obj = np.ones(8, dtype=dt) + dump(array_obj, filename, mode) + array_hkl = load(filename) + try: + assert array_hkl.dtype == array_obj.dtype + assert np.all((array_hkl, array_obj)) + except AssertionError: + print(array_hkl) + print(array_obj) + raise + + +def test_masked(): + """ Test masked numpy array """ + filename, mode = 'test.h5', 'w' + a = np.ma.array([1,2,3,4], dtype='float32', mask=[0,1,0,0]) + + dump(a, filename, mode) + a_hkl = load(filename) + + try: + assert a_hkl.dtype == a.dtype + assert np.all((a_hkl, a)) + except AssertionError: + print(a_hkl) + print(a) + raise + + +def test_dict(): + """ Test dictionary dumping and loading """ + filename, mode = 'test.h5', 'w' + + dd = { + 'name' : b'Danny', + 'age' : 28, + 'height' : 6.1, + 'dork' : True, + 'nums' : [1, 2, 3], + 'narr' : np.array([1,2,3]), + #'unic' : u'dan[at]thetelegraphic.com' + } + + + dump(dd, filename, mode) + dd_hkl = load(filename) + + for k in dd.keys(): + try: + assert k in dd_hkl.keys() + + if type(dd[k]) is type(np.array([1])): + assert np.all((dd[k], dd_hkl[k])) + else: + #assert dd_hkl[k] == dd[k] + pass + assert type(dd_hkl[k]) == type(dd[k]) + except AssertionError: + print(k) + print(dd_hkl[k]) + print(dd[k]) + print(type(dd_hkl[k]), type(dd[k])) + raise + + +def test_empty_dict(): + """ Test empty dictionary dumping and loading """ + filename, mode = 'test.h5', 'w' + + dump({}, filename, mode) + assert load(filename) == {} + + +def test_compression(): + """ Test compression on datasets""" + + filename, mode = 'test.h5', 'w' + dtypes = ['int32', 'float32', 'float64', 'complex64', 'complex128'] + + comps = [None, 'gzip', 'lzf'] + + for dt in dtypes: + for cc in comps: + array_obj = np.ones(32768, dtype=dt) + dump(array_obj, filename, mode, compression=cc) + print(cc, os.path.getsize(filename)) + array_hkl = load(filename) + try: + assert array_hkl.dtype == array_obj.dtype + assert np.all((array_hkl, array_obj)) + except AssertionError: + print(array_hkl) + print(array_obj) + raise + + +def test_dict_int_key(): + """ Test for dictionaries with integer keys """ + filename, mode = 'test.h5', 'w' + + dd = { + 0: "test", + 1: "test2" + } + + dump(dd, filename, mode) + dd_hkl = load(filename) + + +def test_dict_nested(): + """ Test for dictionaries with integer keys """ + filename, mode = 'test.h5', 'w' + + dd = NESTED_DICT + + dump(dd, filename, mode) + dd_hkl = load(filename) + + ll_hkl = dd_hkl["level1_3"]["level2_1"]["level3_1"] + ll = dd["level1_3"]["level2_1"]["level3_1"] + assert ll == ll_hkl + + +def test_masked_dict(): + """ Test dictionaries with masked arrays """ + + filename, mode = 'test.h5', 'w' + + dd = { + "data" : np.ma.array([1,2,3], mask=[True, False, False]), + "data2" : np.array([1,2,3,4,5]) + } + + dump(dd, filename, mode) + dd_hkl = load(filename) + + for k in dd.keys(): + try: + assert k in dd_hkl.keys() + if type(dd[k]) is type(np.array([1])): + assert np.all((dd[k], dd_hkl[k])) + elif type(dd[k]) is type(np.ma.array([1])): + print(dd[k].data) + print(dd_hkl[k].data) + assert np.allclose(dd[k].data, dd_hkl[k].data) + assert np.allclose(dd[k].mask, dd_hkl[k].mask) + + assert type(dd_hkl[k]) == type(dd[k]) + + except AssertionError: + print(k) + print(dd_hkl[k]) + print(dd[k]) + print(type(dd_hkl[k]), type(dd[k])) + raise + + +def test_np_float(): + """ Test for singular np dtypes """ + filename, mode = 'np_float.h5', 'w' + + dtype_list = (np.float16, np.float32, np.float64, + np.complex64, np.complex128, + np.int8, np.int16, np.int32, np.int64, + np.uint8, np.uint16, np.uint32, np.uint64) + + for dt in dtype_list: + + dd = dt(1) + dump(dd, filename, mode) + dd_hkl = load(filename) + assert dd == dd_hkl + assert dd.dtype == dd_hkl.dtype + + dd = {} + for dt in dtype_list: + dd[str(dt)] = dt(1.0) + dump(dd, filename, mode) + dd_hkl = load(filename) + + print(dd) + for dt in dtype_list: + assert dd[str(dt)] == dd_hkl[str(dt)] + + +def md5sum(filename, blocksize=65536): + """ Compute MD5 sum for a given file """ + hash = hashlib.md5() + + with open(filename, "r+b") as f: + for block in iter(lambda: f.read(blocksize), ""): + hash.update(block) + return hash.hexdigest() + + +def caching_dump(obj, filename, *args, **kwargs): + """ Save arguments of all dump calls """ + DUMP_CACHE.append((obj, filename, args, kwargs)) + return hickle_dump(obj, filename, *args, **kwargs) + + +def test_track_times(): + """ Verify that track_times = False produces identical files """ + hashes = [] + for obj, filename, mode, kwargs in DUMP_CACHE: + if isinstance(filename, hickle.H5FileWrapper): + filename = str(filename.file_name) + kwargs['track_times'] = False + caching_dump(obj, filename, mode, **kwargs) + hashes.append(md5sum(filename)) + + time.sleep(1) + + for hash1, (obj, filename, mode, kwargs) in zip(hashes, DUMP_CACHE): + if isinstance(filename, hickle.H5FileWrapper): + filename = str(filename.file_name) + caching_dump(obj, filename, mode, **kwargs) + hash2 = md5sum(filename) + print(hash1, hash2) + assert hash1 == hash2 + + +def test_comp_kwargs(): + """ Test compression with some kwargs for shuffle and chunking """ + + filename, mode = 'test.h5', 'w' + dtypes = ['int32', 'float32', 'float64', 'complex64', 'complex128'] + + comps = [None, 'gzip', 'lzf'] + chunks = [(100, 100), (250, 250)] + shuffles = [True, False] + scaleoffsets = [0, 1, 2] + + for dt in dtypes: + for cc in comps: + for ch in chunks: + for sh in shuffles: + for so in scaleoffsets: + kwargs = { + 'compression' : cc, + 'dtype': dt, + 'chunks': ch, + 'shuffle': sh, + 'scaleoffset': so + } + #array_obj = np.random.random_integers(low=-8192, high=8192, size=(1000, 1000)).astype(dt) + array_obj = NESTED_DICT + dump(array_obj, filename, mode, compression=cc) + print(kwargs, os.path.getsize(filename)) + array_hkl = load(filename) + + +def test_list_numpy(): + """ Test converting a list of numpy arrays """ + + filename, mode = 'test.h5', 'w' + + a = np.ones(1024) + b = np.zeros(1000) + c = [a, b] + + dump(c, filename, mode) + dd_hkl = load(filename) + + print(dd_hkl) + + assert isinstance(dd_hkl, list) + assert isinstance(dd_hkl[0], np.ndarray) + + +def test_tuple_numpy(): + """ Test converting a list of numpy arrays """ + + filename, mode = 'test.h5', 'w' + + a = np.ones(1024) + b = np.zeros(1000) + c = (a, b, a) + + dump(c, filename, mode) + dd_hkl = load(filename) + + print(dd_hkl) + + assert isinstance(dd_hkl, tuple) + assert isinstance(dd_hkl[0], np.ndarray) + + +def test_none(): + """ Test None type hickling """ + + filename, mode = 'test.h5', 'w' + + a = None + + dump(a, filename, mode) + dd_hkl = load(filename) + print(a) + print(dd_hkl) + + assert isinstance(dd_hkl, type(None)) + + +def test_dict_none(): + """ Test None type hickling """ + + filename, mode = 'test.h5', 'w' + + a = {'a': 1, 'b' : None} + + dump(a, filename, mode) + dd_hkl = load(filename) + print(a) + print(dd_hkl) + + assert isinstance(a['b'], type(None)) + + +def test_file_open_close(): + """ https://github.com/telegraphic/hickle/issues/20 """ + import h5py + f = h5py.File('test.hdf', 'w') + a = np.arange(5) + + dump(a, 'test.hkl') + dump(a, 'test.hkl') + + dump(a, f, mode='w') + f.close() + try: + dump(a, f, mode='w') + except hickle.hickle.ClosedFileError: + print("Tests: Closed file exception caught") + + +def test_list_order(): + """ https://github.com/telegraphic/hickle/issues/26 """ + d = [np.arange(n + 1) for n in range(20)] + hickle.dump(d, 'test.h5') + d_hkl = hickle.load('test.h5') + + try: + for ii, xx in enumerate(d): + assert d[ii].shape == d_hkl[ii].shape + for ii, xx in enumerate(d): + assert np.allclose(d[ii], d_hkl[ii]) + except AssertionError: + print(d[ii], d_hkl[ii]) + raise + + +def test_embedded_array(): + """ See https://github.com/telegraphic/hickle/issues/24 """ + + d_orig = [[np.array([10., 20.]), np.array([10, 20, 30])], [np.array([10, 2]), np.array([1.])]] + hickle.dump(d_orig, 'test.h5') + d_hkl = hickle.load('test.h5') + + for ii, xx in enumerate(d_orig): + for jj, yy in enumerate(xx): + assert np.allclose(d_orig[ii][jj], d_hkl[ii][jj]) + + print(d_hkl) + print(d_orig) + + +################ +## NEW TESTS ## +################ + + +def generate_nested(): + a = [1, 2, 3] + b = [a, a, a] + c = [a, b, 's'] + d = [a, b, c, c, a] + e = [d, d, d, d, 1] + f = {'a' : a, 'b' : b, 'e' : e} + g = {'f' : f, 'a' : e, 'd': d} + h = {'h': g, 'g' : f} + z = [f, a, b, c, d, e, f, g, h, g, h] + a = np.array([1, 2, 3, 4]) + b = set([1, 2, 3, 4, 5]) + c = (1, 2, 3, 4, 5) + d = np.ma.array([1, 2, 3, 4, 5, 6, 7, 8]) + z = {'a': a, 'b': b, 'c': c, 'd': d, 'z': z} + return z + + +def test_is_iterable(): + a = [1, 2, 3] + b = 1 + + assert check_is_iterable(a) == True + assert check_is_iterable(b) == False + + +def test_check_iterable_item_type(): + + a = [1, 2, 3] + b = [a, a, a] + c = [a, b, 's'] + + type_a = check_iterable_item_type(a) + type_b = check_iterable_item_type(b) + type_c = check_iterable_item_type(c) + + assert type_a is int + assert type_b is list + assert type_c == False + + +def test_dump_nested(): + """ Dump a complicated nested object to HDF5 + """ + z = generate_nested() + dump(z, 'test.hkl', mode='w') + + +def test_with_dump(): + lst = [1] + tpl = (1) + dct = {1: 1} + arr = np.array([1]) + + with h5py.File('test.hkl') as file: + dump(lst, file, path='/lst') + dump(tpl, file, path='/tpl') + dump(dct, file, path='/dct') + dump(arr, file, path='/arr') + + +def test_with_load(): + lst = [1] + tpl = (1) + dct = {1: 1} + arr = np.array([1]) + + with h5py.File('test.hkl') as file: + assert load(file, '/lst') == lst + assert load(file, '/tpl') == tpl + assert load(file, '/dct') == dct + assert load(file, '/arr') == arr + + +def test_load(): + + a = set([1, 2, 3, 4]) + b = set([5, 6, 7, 8]) + c = set([9, 10, 11, 12]) + z = (a, b, c) + z = [z, z] + z = (z, z, z, z, z) + + print("Original:") + pprint(z) + dump(z, 'test.hkl', mode='w') + + print("\nReconstructed:") + z = load('test.hkl') + pprint(z) + + +def test_sort_keys(): + keys = [b'data_0', b'data_1', b'data_2', b'data_3', b'data_10'] + keys_sorted = [b'data_0', b'data_1', b'data_2', b'data_3', b'data_10'] + + print(keys) + print(keys_sorted) + assert sort_keys(keys) == keys_sorted + + +def test_ndarray(): + + a = np.array([1,2,3]) + b = np.array([2,3,4]) + z = (a, b) + + print("Original:") + pprint(z) + dump(z, 'test.hkl', mode='w') + + print("\nReconstructed:") + z = load('test.hkl') + pprint(z) + + +def test_ndarray_masked(): + + a = np.ma.array([1,2,3]) + b = np.ma.array([2,3,4], mask=[True, False, True]) + z = (a, b) + + print("Original:") + pprint(z) + dump(z, 'test.hkl', mode='w') + + print("\nReconstructed:") + z = load('test.hkl') + pprint(z) + + +def test_simple_dict(): + a = {'key1': 1, 'key2': 2} + + dump(a, 'test.hkl') + z = load('test.hkl') + + pprint(a) + pprint(z) + + +def test_complex_dict(): + a = {'akey': 1, 'akey2': 2} + if six.PY2: + # NO LONG TYPE IN PY3! + b = {'bkey': 2.0, 'bkey3': long(3.0)} + else: + b = a + c = {'ckey': "hello", "ckey2": "hi there"} + z = {'zkey1': a, 'zkey2': b, 'zkey3': c} + + print("Original:") + pprint(z) + dump(z, 'test.hkl', mode='w') + + print("\nReconstructed:") + z = load('test.hkl') + pprint(z) + +def test_multi_hickle(): + a = {'a': 123, 'b': [1, 2, 4]} + + if os.path.exists("test.hkl"): + os.remove("test.hkl") + dump(a, "test.hkl", path="/test", mode="w") + dump(a, "test.hkl", path="/test2", mode="r+") + dump(a, "test.hkl", path="/test3", mode="r+") + dump(a, "test.hkl", path="/test4", mode="r+") + + a = load("test.hkl", path="/test") + b = load("test.hkl", path="/test2") + c = load("test.hkl", path="/test3") + d = load("test.hkl", path="/test4") + +def test_complex(): + """ Test complex value dtype is handled correctly + + https://github.com/telegraphic/hickle/issues/29 """ + + data = {"A":1.5, "B":1.5 + 1j, "C":np.linspace(0,1,4) + 2j} + dump(data, "test.hkl") + data2 = load("test.hkl") + for key in data.keys(): + assert type(data[key]) == type(data2[key]) + +def test_nonstring_keys(): + """ Test that keys are reconstructed back to their original datatypes + https://github.com/telegraphic/hickle/issues/36 + """ + if six.PY2: + u = unichr(233) + unichr(0x0bf2) + unichr(3972) + unichr(6000) + + data = {u'test': 123, + 'def': 456, + 'hik' : np.array([1,2,3]), + u: u, + 0: 0, + True: 'hi', + 1.1 : 'hey', + #2L : 'omg', + 1j: 'complex_hashable', + (1, 2): 'boo', + ('A', 17.4, 42): [1, 7, 'A'], + (): '1313e was here', + '0': 0 + } + #data = {'0': 123, 'def': 456} + print(data) + dump(data, "test.hkl") + data2 = load("test.hkl") + print(data2) + + for key in data.keys(): + assert key in data2.keys() + + print(data2) + else: + pass + +def test_scalar_compression(): + """ Test bug where compression causes a crash on scalar datasets + + (Scalars are incompressible!) + https://github.com/telegraphic/hickle/issues/37 + """ + data = {'a' : 0, 'b' : np.float(2), 'c' : True} + + dump(data, "test.hkl", compression='gzip') + data2 = load("test.hkl") + + print(data2) + for key in data.keys(): + assert type(data[key]) == type(data2[key]) + +def test_bytes(): + """ Dumping and loading a string. PYTHON3 ONLY """ + if six.PY3: + filename, mode = 'test.h5', 'w' + string_obj = b"The quick brown fox jumps over the lazy dog" + dump(string_obj, filename, mode) + string_hkl = load(filename) + #print "Initial list: %s"%list_obj + #print "Unhickled data: %s"%list_hkl + print(type(string_obj)) + print(type(string_hkl)) + assert type(string_obj) == type(string_hkl) == bytes + assert string_obj == string_hkl + else: + pass + +def test_np_scalar(): + """ Numpy scalar datatype + + https://github.com/telegraphic/hickle/issues/50 + """ + + fid='test.h5py' + r0={'test': np.float64(10.)} + s = dump(r0, fid) + r = load(fid) + print(r) + assert type(r0['test']) == type(r['test']) + +if __name__ == '__main__': + """ Some tests and examples """ + test_sort_keys() + + test_np_scalar() + test_scalar_compression() + test_complex() + test_file_open_close() + test_dict_none() + test_none() + test_masked_dict() + test_list() + test_set() + test_numpy() + test_dict() + test_empty_dict() + test_compression() + test_masked() + test_dict_nested() + test_comp_kwargs() + test_list_numpy() + test_tuple_numpy() + test_track_times() + test_list_order() + test_embedded_array() + test_np_float() + + if six.PY2: + test_unicode() + test_unicode2() + test_string() + test_nonstring_keys() + + if six.PY3: + test_bytes() + + + # NEW TESTS + test_is_iterable() + test_check_iterable_item_type() + test_dump_nested() + test_with_dump() + test_with_load() + test_load() + test_sort_keys() + test_ndarray() + test_ndarray_masked() + test_simple_dict() + test_complex_dict() + test_multi_hickle() + test_dict_int_key() + + # Cleanup + print("ALL TESTS PASSED!") \ No newline at end of file diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_hickle_helpers.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_hickle_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..253839e97c96e484b7a66ad9d174648d281d1c66 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_hickle_helpers.py @@ -0,0 +1,63 @@ +#! /usr/bin/env python +# encoding: utf-8 +""" +# test_hickle_helpers.py + +Unit tests for hickle module -- helper functions. + +""" + +import numpy as np +try: + import scipy + from scipy import sparse + _has_scipy = True +except ImportError: + _has_scipy = False + +from hickle.helpers import check_is_hashable, check_is_iterable, check_iterable_item_type + +from hickle.loaders.load_numpy import check_is_numpy_array +if _has_scipy: + from hickle.loaders.load_scipy import check_is_scipy_sparse_array + + + +def test_check_is_iterable(): + assert check_is_iterable([1,2,3]) is True + assert check_is_iterable(1) is False + + +def test_check_is_hashable(): + assert check_is_hashable(1) is True + assert check_is_hashable([1,2,3]) is False + + +def test_check_iterable_item_type(): + assert check_iterable_item_type([1,2,3]) is int + assert check_iterable_item_type([int(1), float(1)]) is False + assert check_iterable_item_type([]) is False + + +def test_check_is_numpy_array(): + assert check_is_numpy_array(np.array([1,2,3])) is True + assert check_is_numpy_array(np.ma.array([1,2,3])) is True + assert check_is_numpy_array([1,2]) is False + + +def test_check_is_scipy_sparse_array(): + t_csr = scipy.sparse.csr_matrix([0]) + t_csc = scipy.sparse.csc_matrix([0]) + t_bsr = scipy.sparse.bsr_matrix([0]) + assert check_is_scipy_sparse_array(t_csr) is True + assert check_is_scipy_sparse_array(t_csc) is True + assert check_is_scipy_sparse_array(t_bsr) is True + assert check_is_scipy_sparse_array(np.array([1])) is False + +if __name__ == "__main__": + test_check_is_hashable() + test_check_is_iterable() + test_check_is_numpy_array() + test_check_iterable_item_type() + if _has_scipy: + test_check_is_scipy_sparse_array() \ No newline at end of file diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_legacy_load.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_legacy_load.py new file mode 100644 index 0000000000000000000000000000000000000000..e849bcf6594c7139357659f8cf0721ef777da3b0 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_legacy_load.py @@ -0,0 +1,30 @@ +import glob +import warnings +import hickle as hkl +import h5py +import six + +def test_legacy_load(): + if six.PY2: + filelist = sorted(glob.glob('legacy_hkls/*.hkl')) + + # Make all warnings show + warnings.simplefilter("always") + + for filename in filelist: + try: + print(filename) + a = hkl.load(filename) + except: + with h5py.File(filename) as a: + print(a.attrs.items()) + print(a.items()) + for key, item in a.items(): + print(item.attrs.items()) + raise + else: + print("Legacy loading only works in Py2. Sorry.") + pass + +if __name__ == "__main__": + test_legacy_load() \ No newline at end of file diff --git a/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_scipy.py b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_scipy.py new file mode 100644 index 0000000000000000000000000000000000000000..ab78311d3eb543f4d3515b6aef2eba4e5ea2a175 --- /dev/null +++ b/video_prediction_savp/external_package/hickle/lib/python3.6/site-packages/hickle-3.4.3-py3.6.egg/tests/test_scipy.py @@ -0,0 +1,57 @@ +import numpy as np +from scipy.sparse import csr_matrix, csc_matrix, bsr_matrix + +import hickle +from hickle.loaders.load_scipy import check_is_scipy_sparse_array + +from py.path import local + +# Set the current working directory to the temporary directory +local.get_temproot().chdir() + + +def test_is_sparse(): + sm0 = csr_matrix((3, 4), dtype=np.int8) + sm1 = csc_matrix((1, 2)) + + assert check_is_scipy_sparse_array(sm0) + assert check_is_scipy_sparse_array(sm1) + + +def test_sparse_matrix(): + sm0 = csr_matrix((3, 4), dtype=np.int8).toarray() + + row = np.array([0, 0, 1, 2, 2, 2]) + col = np.array([0, 2, 2, 0, 1, 2]) + data = np.array([1, 2, 3, 4, 5, 6]) + sm1 = csr_matrix((data, (row, col)), shape=(3, 3)) + sm2 = csc_matrix((data, (row, col)), shape=(3, 3)) + + indptr = np.array([0, 2, 3, 6]) + indices = np.array([0, 2, 2, 0, 1, 2]) + data = np.array([1, 2, 3, 4, 5, 6]).repeat(4).reshape(6, 2, 2) + sm3 = bsr_matrix((data,indices, indptr), shape=(6, 6)) + + hickle.dump(sm1, 'test_sp.h5') + sm1_h = hickle.load('test_sp.h5') + hickle.dump(sm2, 'test_sp2.h5') + sm2_h = hickle.load('test_sp2.h5') + hickle.dump(sm3, 'test_sp3.h5') + sm3_h = hickle.load('test_sp3.h5') + + assert isinstance(sm1_h, csr_matrix) + assert isinstance(sm2_h, csc_matrix) + assert isinstance(sm3_h, bsr_matrix) + + assert np.allclose(sm1_h.data, sm1.data) + assert np.allclose(sm2_h.data, sm2.data) + assert np.allclose(sm3_h.data, sm3.data) + + assert sm1_h. shape == sm1.shape + assert sm2_h. shape == sm2.shape + assert sm3_h. shape == sm3.shape + + +if __name__ == "__main__": + test_sparse_matrix() + test_is_sparse() \ No newline at end of file