numpy_pickle_utils.py 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228
  1. """Utilities for fast persistence of big data, with optional compression."""
  2. # Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
  3. # Copyright (c) 2009 Gael Varoquaux
  4. # License: BSD Style, 3 clauses.
  5. import pickle
  6. import io
  7. import warnings
  8. import contextlib
  9. from .compressor import _ZFILE_PREFIX
  10. from .compressor import _COMPRESSORS
  11. try:
  12. import numpy as np
  13. except ImportError:
  14. np = None
  15. Unpickler = pickle._Unpickler
  16. Pickler = pickle._Pickler
  17. xrange = range
  18. try:
  19. # The python standard library can be built without bz2 so we make bz2
  20. # usage optional.
  21. # see https://github.com/scikit-learn/scikit-learn/issues/7526 for more
  22. # details.
  23. import bz2
  24. except ImportError:
  25. bz2 = None
  26. # Buffer size used in io.BufferedReader and io.BufferedWriter
  27. _IO_BUFFER_SIZE = 1024 ** 2
  28. def _is_raw_file(fileobj):
  29. """Check if fileobj is a raw file object, e.g created with open."""
  30. fileobj = getattr(fileobj, 'raw', fileobj)
  31. return isinstance(fileobj, io.FileIO)
  32. def _get_prefixes_max_len():
  33. # Compute the max prefix len of registered compressors.
  34. prefixes = [len(compressor.prefix) for compressor in _COMPRESSORS.values()]
  35. prefixes += [len(_ZFILE_PREFIX)]
  36. return max(prefixes)
  37. ###############################################################################
  38. # Cache file utilities
  39. def _detect_compressor(fileobj):
  40. """Return the compressor matching fileobj.
  41. Parameters
  42. ----------
  43. fileobj: file object
  44. Returns
  45. -------
  46. str in {'zlib', 'gzip', 'bz2', 'lzma', 'xz', 'compat', 'not-compressed'}
  47. """
  48. # Read the magic number in the first bytes of the file.
  49. max_prefix_len = _get_prefixes_max_len()
  50. if hasattr(fileobj, 'peek'):
  51. # Peek allows to read those bytes without moving the cursor in the
  52. # file whic.
  53. first_bytes = fileobj.peek(max_prefix_len)
  54. else:
  55. # Fallback to seek if the fileobject is not peekable.
  56. first_bytes = fileobj.read(max_prefix_len)
  57. fileobj.seek(0)
  58. if first_bytes.startswith(_ZFILE_PREFIX):
  59. return "compat"
  60. else:
  61. for name, compressor in _COMPRESSORS.items():
  62. if first_bytes.startswith(compressor.prefix):
  63. return name
  64. return "not-compressed"
  65. def _buffered_read_file(fobj):
  66. """Return a buffered version of a read file object."""
  67. return io.BufferedReader(fobj, buffer_size=_IO_BUFFER_SIZE)
  68. def _buffered_write_file(fobj):
  69. """Return a buffered version of a write file object."""
  70. return io.BufferedWriter(fobj, buffer_size=_IO_BUFFER_SIZE)
  71. @contextlib.contextmanager
  72. def _read_fileobject(fileobj, filename, mmap_mode=None):
  73. """Utility function opening the right fileobject from a filename.
  74. The magic number is used to choose between the type of file object to open:
  75. * regular file object (default)
  76. * zlib file object
  77. * gzip file object
  78. * bz2 file object
  79. * lzma file object (for xz and lzma compressor)
  80. Parameters
  81. ----------
  82. fileobj: file object
  83. compressor: str in {'zlib', 'gzip', 'bz2', 'lzma', 'xz', 'compat',
  84. 'not-compressed'}
  85. filename: str
  86. filename path corresponding to the fileobj parameter.
  87. mmap_mode: str
  88. memory map mode that should be used to open the pickle file. This
  89. parameter is useful to verify that the user is not trying to one with
  90. compression. Default: None.
  91. Returns
  92. -------
  93. a file like object
  94. """
  95. # Detect if the fileobj contains compressed data.
  96. compressor = _detect_compressor(fileobj)
  97. if compressor == 'compat':
  98. # Compatibility with old pickle mode: simply return the input
  99. # filename "as-is" and let the compatibility function be called by the
  100. # caller.
  101. warnings.warn("The file '%s' has been generated with a joblib "
  102. "version less than 0.10. "
  103. "Please regenerate this pickle file." % filename,
  104. DeprecationWarning, stacklevel=2)
  105. yield filename
  106. else:
  107. if compressor in _COMPRESSORS:
  108. # based on the compressor detected in the file, we open the
  109. # correct decompressor file object, wrapped in a buffer.
  110. compressor_wrapper = _COMPRESSORS[compressor]
  111. inst = compressor_wrapper.decompressor_file(fileobj)
  112. fileobj = _buffered_read_file(inst)
  113. # Checking if incompatible load parameters with the type of file:
  114. # mmap_mode cannot be used with compressed file or in memory buffers
  115. # such as io.BytesIO.
  116. if mmap_mode is not None:
  117. if isinstance(fileobj, io.BytesIO):
  118. warnings.warn('In memory persistence is not compatible with '
  119. 'mmap_mode "%(mmap_mode)s" flag passed. '
  120. 'mmap_mode option will be ignored.'
  121. % locals(), stacklevel=2)
  122. elif compressor != 'not-compressed':
  123. warnings.warn('mmap_mode "%(mmap_mode)s" is not compatible '
  124. 'with compressed file %(filename)s. '
  125. '"%(mmap_mode)s" flag will be ignored.'
  126. % locals(), stacklevel=2)
  127. elif not _is_raw_file(fileobj):
  128. warnings.warn('"%(fileobj)r" is not a raw file, mmap_mode '
  129. '"%(mmap_mode)s" flag will be ignored.'
  130. % locals(), stacklevel=2)
  131. yield fileobj
  132. def _write_fileobject(filename, compress=("zlib", 3)):
  133. """Return the right compressor file object in write mode."""
  134. compressmethod = compress[0]
  135. compresslevel = compress[1]
  136. if compressmethod in _COMPRESSORS.keys():
  137. file_instance = _COMPRESSORS[compressmethod].compressor_file(
  138. filename, compresslevel=compresslevel)
  139. return _buffered_write_file(file_instance)
  140. else:
  141. file_instance = _COMPRESSORS['zlib'].compressor_file(
  142. filename, compresslevel=compresslevel)
  143. return _buffered_write_file(file_instance)
  144. # Utility functions/variables from numpy required for writing arrays.
  145. # We need at least the functions introduced in version 1.9 of numpy. Here,
  146. # we use the ones from numpy 1.10.2.
  147. BUFFER_SIZE = 2 ** 18 # size of buffer for reading npz files in bytes
  148. def _read_bytes(fp, size, error_template="ran out of data"):
  149. """Read from file-like object until size bytes are read.
  150. TODO python2_drop: is it still needed? The docstring mentions python 2.6
  151. and it looks like this can be at least simplified ...
  152. Raises ValueError if not EOF is encountered before size bytes are read.
  153. Non-blocking objects only supported if they derive from io objects.
  154. Required as e.g. ZipExtFile in python 2.6 can return less data than
  155. requested.
  156. This function was taken from numpy/lib/format.py in version 1.10.2.
  157. Parameters
  158. ----------
  159. fp: file-like object
  160. size: int
  161. error_template: str
  162. Returns
  163. -------
  164. a bytes object
  165. The data read in bytes.
  166. """
  167. data = bytes()
  168. while True:
  169. # io files (default in python3) return None or raise on
  170. # would-block, python2 file will truncate, probably nothing can be
  171. # done about that. note that regular files can't be non-blocking
  172. try:
  173. r = fp.read(size - len(data))
  174. data += r
  175. if len(r) == 0 or len(data) == size:
  176. break
  177. except io.BlockingIOError:
  178. pass
  179. if len(data) != size:
  180. msg = "EOF: reading %s, expected %d bytes got %d"
  181. raise ValueError(msg % (error_template, size, len(data)))
  182. else:
  183. return data