Hello community,

here is the log from the commit of package python-bloscpack for 
openSUSE:Factory checked in at 2019-07-24 20:34:46
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-bloscpack (Old)
 and      /work/SRC/openSUSE:Factory/.python-bloscpack.new.4126 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-bloscpack"

Wed Jul 24 20:34:46 2019 rev:2 rq:718014 version:0.16.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-bloscpack/python-bloscpack.changes        
2018-11-27 10:42:22.768198323 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-bloscpack.new.4126/python-bloscpack.changes  
    2019-07-24 20:34:47.418578989 +0200
@@ -1,0 +2,8 @@
+Tue Jul 23 20:35:51 UTC 2019 - Todd R <[email protected]>
+
+- Update to 0.16.0
+  * The Python API naming has been overhauled and a few functions have been
+    deprecated. Also the documentation for the Python API has been extended
+    to inlcude more uses cases and potential applications.
+
+-------------------------------------------------------------------

Old:
----
  bloscpack-0.15.0.tar.gz

New:
----
  bloscpack-0.16.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-bloscpack.spec ++++++
--- /var/tmp/diff_new_pack.rbZ3sk/_old  2019-07-24 20:34:47.918578934 +0200
+++ /var/tmp/diff_new_pack.rbZ3sk/_new  2019-07-24 20:34:47.922578934 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-bloscpack
 #
-# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -18,7 +18,7 @@
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-bloscpack
-Version:        0.15.0
+Version:        0.16.0
 Release:        0
 Summary:        Command line interface and serialization format for Blosc
 License:        MIT

++++++ bloscpack-0.15.0.tar.gz -> bloscpack-0.16.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/ANNOUNCE.rst 
new/bloscpack-0.16.0/ANNOUNCE.rst
--- old/bloscpack-0.15.0/ANNOUNCE.rst   2018-10-31 10:26:50.000000000 +0100
+++ new/bloscpack-0.16.0/ANNOUNCE.rst   2018-12-27 16:16:51.000000000 +0100
@@ -1,20 +1,18 @@
-===========================
-Announcing Bloscpack 0.15.0
-===========================
+============================
+Announcing Bloscpack v0.16.0
+============================
 
 What is new?
 ============
 
-Two new high-level API functions have been added:
-
-* 'pack_bytes_to_bytes'
-* 'unpack_bytes_from_bytes'
-
-As you might expect from the naming, these allow you to perform fully
-in-memory based compression and decompression via the bytes datatype.
-
-Additionally there are a  few bugfixes, support for python-blosc
-1.6.1 and support for Python 3.7.
+The Python API naming has been overhauled and a few functions have been
+deprecated. Also the documentation for the Python API has been extended
+to inlcude more uses cases and potential applications.
+
+A big thank you goes out to Daniel Stender from the Debian project for his
+continued efforts to package the Blosc stack -- including python-blosc -- for
+Debian. This also means it is likely that a recent version of
+bloscpack will be included in Buster.
 
 For more info, have a look at the changelog:
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/PKG-INFO 
new/bloscpack-0.16.0/PKG-INFO
--- old/bloscpack-0.15.0/PKG-INFO       2018-10-31 10:29:43.000000000 +0100
+++ new/bloscpack-0.16.0/PKG-INFO       2018-12-27 16:23:46.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: bloscpack
-Version: 0.15.0
+Version: 0.16.0
 Summary: Command line interface to and serialization format for Blosc
 Home-page: https://github.com/blosc/bloscpack
 Author: Valentin Haenel
@@ -65,9 +65,9 @@
         Dependencies
         ------------
         
-        * Pythons 2.7, 3.4, 3.5, 3.6 and 3.7
+        * Python 2.7, 3.4, 3.5, 3.6 or 3.7
         * `python-blosc  <https://github.com/Blosc/python-blosc>`_ (provides 
Blosc) and
-          `Numpy <http://www.numpy.org/>`_ (as listed in ``requirements.txt`` 
for
+          `Numpy <http://www.numpy.org/>`_ (as listed in ``requirements.txt``) 
for
           running the code
         * The Python packages listed in ``test_requirements.txt`` for testing 
and
           releasing
@@ -448,7 +448,160 @@
         Python API
         ----------
         
-        The Python API is still in flux, so this section is deliberately 
sparse.
+        Bloscpack has a versatile yet simple API consisting of a series of 
'arguments'
+        objects and high-level functions that can be invoked dependding on 
your input
+        and output needs.
+        
+        Nomenclature wise, Python 3 has done a lot for Bloscpack, because we 
always
+        need to represent compressed data as bytes deliberatey. This makes it 
easier
+        and more natural to distinguish between text, such a filenames and 
binary and
+        bytes objects such as compressed data.
+        
+        Arguments
+        ~~~~~~~~~
+        
+        The three argument types are:
+        
+        * ``BloscArgs``
+        * ``BloscpackArgs``
+        * ``MetadataArgs``
+        
+        as defined in ``bloscpack/args.py``.  Instantiating any of them will 
create an
+        object with the defaults setup. The defaults are defined in
+        ``bloscpack/defaults.py``. You can use these in the high-level 
functions listed
+        below.
+        
+        You can override any and all defaults by passing in the respective
+        keyword-arguments, for example:
+        
+        
+        .. code-block:: pycon
+        
+           >>> b = BloscArgs()               # will create a default args 
object
+           >>> b = BloscArgs(clevel=4)       # change compression level to 4
+           >>> b = BloscArgs(typesize=4,     # change the typesize to 4
+           >>> ...           clevel=9,       # change the compression level to 
9
+           >>> ...           shuffle=False,  # deactivate the shuffle filter
+           >>> ...           cname='lz4')    # let lz4 be the internal codec
+        
+        
+        .. code-block:: python
+        
+            class BloscArgs(MutableMappingObject):
+                """ Object to hold Blosc arguments.
+        
+                Parameters
+                ----------
+                typesize : int
+                    The typesize used
+                clevel : int
+                    Compression level
+                shuffle : boolean
+                    Whether or not to activate the shuffle filter
+                cname: str
+                    Name of the internal code to use
+        
+                """
+        
+        .. code-block:: python
+        
+            class BloscpackArgs(MutableMappingObject):
+                """ Object to hold BloscPack arguments.
+        
+                Parameters
+                ----------
+                offsets : boolean
+                    Whether to include space for offsets
+                checksum : str
+                    Name of the checksum to use or None/'None'
+                max_app_chunks : int or callable on number of chunks
+                    How much space to reserve in the offsets for chunks to be 
appended.
+        
+                """
+        
+        .. code-block:: python
+        
+            class MetadataArgs(MutableMappingObject):
+                """ Object to hold the metadata arguments.
+        
+                Parameters
+                ----------
+                magic_format : 8 bytes
+                    Format identifier for the metadata
+                meta_checksum : str
+                    Checksum to be used for the metadata
+                meta_codec : str
+                    Codec to be used to compress the metadata
+                meta_level : int
+                    Compression level for metadata
+                max_meta_size : int or callable on metadata size
+                    How much space to reserve for additional metadata
+        
+                """
+        
+        File / Bytes
+        ~~~~~~~~~~~~
+        
+        The following high-level functions exist for compressing and 
decompressing to
+        and from files and byte objects:
+        
+        
+        * ``pack_file_to_file``
+        * ``unpack_file_from_file``
+        * ``pack_bytes_to_file``
+        * ``unpack_bytes_from_file``
+        * ``pack_bytes_to_bytes``
+        * ``unpack_bytes_from_bytes``
+        
+        Beyond the target arguments such as the files and the bytes, each 
``pack_*``
+        function takes the following arguments:
+        
+        .. code-block::
+        
+            chunk_size : int
+                the desired chunk size in bytes
+            metadata : dict
+                the metadata dict
+            blosc_args : BloscArgs
+                blosc args
+            bloscpack_args : BloscpackArgs
+                bloscpack args
+            metadata_args : MetadataArgs
+                metadata args
+        
+        Below are their sigantures:
+        
+        .. code-block:: python
+        
+            def pack_file_to_file(in_file, out_file,
+                                  chunk_size=DEFAULT_CHUNK_SIZE,
+                                  metadata=None,
+                                  blosc_args=None,
+                                  bloscpack_args=None,
+                                  metadata_args=None):
+        
+            def unpack_file_from_file(in_file, out_file):
+        
+        
+            def pack_bytes_to_file(bytes_, out_file,
+                                   chunk_size=DEFAULT_CHUNK_SIZE,
+                                   metadata=None,
+                                   blosc_args=None,
+                                   bloscpack_args=None,
+                                   metadata_args=None):
+        
+            def unpack_bytes_from_file(compressed_file):
+        
+            def pack_bytes_to_bytes(bytes_,
+                                    chunk_size=DEFAULT_CHUNK_SIZE,
+                                    metadata=None,
+                                    blosc_args=None,
+                                    bloscpack_args=None,
+                                    metadata_args=None,
+                                    ):
+        
+        
+            def unpack_bytes_from_bytes(bytes_):
         
         Numpy
         ~~~~~
@@ -460,8 +613,8 @@
             >>> a = np.linspace(0, 1, 3e8)
             >>> print a.size, a.dtype
             300000000 float64
-            >>> bp.pack_ndarray_file(a, 'a.blp')
-            >>> b = bp.unpack_ndarray_file('a.blp')
+            >>> bp.pack_ndarray_to_file(a, 'a.blp')
+            >>> b = bp.unpack_ndarray_from_file('a.blp')
             >>> (a == b).all()
             True
         
@@ -506,8 +659,8 @@
         .. code-block:: pycon
         
             >>> a = np.linspace(0, 1, 3e8)
-            >>> c = pack_ndarray_str(a)
-            >>> b = unpack_ndarray_str(c)
+            >>> c = pack_ndarray_to_bytes(a)
+            >>> b = unpack_ndarray_from_bytes(c)
             >>> (a == b).all()
             True
         
@@ -516,11 +669,30 @@
         .. code-block:: pycon
         
             >>> a = np.linspace(0, 1, 3e8)
-            >>> c = pack_ndarray_str(a, blosc_args=BloscArgs(cname='lz4'))
-            >>> b = unpack_ndarray_str(c)
+            >>> c = pack_ndarray_to_bytes(a, blosc_args=BloscArgs(cname='lz4'))
+            >>> b = unpack_ndarray_from_bytes(c)
             >>> (a == b).all()
             True
         
+        
+        .. code-block:: python
+        
+            def pack_ndarray_to_file(ndarray, filename,
+                                     chunk_size=DEFAULT_CHUNK_SIZE,
+                                     blosc_args=None,
+                                     bloscpack_args=None,
+                                     metadata_args=None):
+        
+            def pack_ndarray_to_bytes(ndarray,
+                                      chunk_size=DEFAULT_CHUNK_SIZE,
+                                      blosc_args=None,
+                                      bloscpack_args=None,
+                                      metadata_args=None):
+        
+            def unpack_ndarray_from_file(filename):
+        
+            def unpack_ndarray_from_bytes(str_):
+        
         If you are interested in the performance of Bloscpack compared to other
         serialization formats for Numpy arrays, please look at the benchmarks 
presented
         in `the Bloscpack paper from the EuroScipy 2013 conference proceedings
@@ -1128,7 +1300,7 @@
         --------------------------------------
         
         #. Set the version as environment variable ``VERSION=vX.X.X``
-        #. Update the changelog and ANNOUNCE.rst
+        #. Update the changelog and ``ANNOUNCE.rst``
         #. Commit using ``git commit -m "$VERSION changelog and ANNOUNCE.rst"``
         #. Set the version number in ``bloscpack/version.py``
         #. Commit with ``git commit -m "$VERSION"``
@@ -1139,7 +1311,7 @@
         #. Push the tag to own github ``git push esc $VERSION``
         #. Make a source distribution using ``python setup.py sdist 
bdist_wheel``
         #. Upload to PyPi using ``twine upload dist/bloscpack-$VERSION*``
-        #. Bump version number to next dev version
+        #. Bump version number to next dev version and reset ``ANNOUNCE.rst``
         #. Announce release on the Blosc list
         #. Announce release via Twitter
         
@@ -1151,8 +1323,6 @@
         
         * Refactor monolithic readme into Sphinx and publish
         * Cleanup and double check the docstrings for the public API classes
-        * document library usage
-        * Announcement RST
         
         Command Line
         ~~~~~~~~~~~~
@@ -1218,6 +1388,11 @@
         Changelog
         ---------
         
+        * v0.16.0     - Thu 27 Dec 2018
+        
+          * Update of Python API and docs
+          * various minor fixes
+        
         * v0.15.0     - Wed 31 Oct 2018
         
           * Halloween Release!
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/README.rst 
new/bloscpack-0.16.0/README.rst
--- old/bloscpack-0.15.0/README.rst     2018-10-31 10:20:59.000000000 +0100
+++ new/bloscpack-0.16.0/README.rst     2018-12-27 16:18:48.000000000 +0100
@@ -57,9 +57,9 @@
 Dependencies
 ------------
 
-* Pythons 2.7, 3.4, 3.5, 3.6 and 3.7
+* Python 2.7, 3.4, 3.5, 3.6 or 3.7
 * `python-blosc  <https://github.com/Blosc/python-blosc>`_ (provides Blosc) and
-  `Numpy <http://www.numpy.org/>`_ (as listed in ``requirements.txt`` for
+  `Numpy <http://www.numpy.org/>`_ (as listed in ``requirements.txt``) for
   running the code
 * The Python packages listed in ``test_requirements.txt`` for testing and
   releasing
@@ -440,7 +440,160 @@
 Python API
 ----------
 
-The Python API is still in flux, so this section is deliberately sparse.
+Bloscpack has a versatile yet simple API consisting of a series of 'arguments'
+objects and high-level functions that can be invoked dependding on your input
+and output needs.
+
+Nomenclature wise, Python 3 has done a lot for Bloscpack, because we always
+need to represent compressed data as bytes deliberatey. This makes it easier
+and more natural to distinguish between text, such a filenames and binary and
+bytes objects such as compressed data.
+
+Arguments
+~~~~~~~~~
+
+The three argument types are:
+
+* ``BloscArgs``
+* ``BloscpackArgs``
+* ``MetadataArgs``
+
+as defined in ``bloscpack/args.py``.  Instantiating any of them will create an
+object with the defaults setup. The defaults are defined in
+``bloscpack/defaults.py``. You can use these in the high-level functions listed
+below.
+
+You can override any and all defaults by passing in the respective
+keyword-arguments, for example:
+
+
+.. code-block:: pycon
+
+   >>> b = BloscArgs()               # will create a default args object
+   >>> b = BloscArgs(clevel=4)       # change compression level to 4
+   >>> b = BloscArgs(typesize=4,     # change the typesize to 4
+   >>> ...           clevel=9,       # change the compression level to 9
+   >>> ...           shuffle=False,  # deactivate the shuffle filter
+   >>> ...           cname='lz4')    # let lz4 be the internal codec
+
+
+.. code-block:: python
+
+    class BloscArgs(MutableMappingObject):
+        """ Object to hold Blosc arguments.
+
+        Parameters
+        ----------
+        typesize : int
+            The typesize used
+        clevel : int
+            Compression level
+        shuffle : boolean
+            Whether or not to activate the shuffle filter
+        cname: str
+            Name of the internal code to use
+
+        """
+
+.. code-block:: python
+
+    class BloscpackArgs(MutableMappingObject):
+        """ Object to hold BloscPack arguments.
+
+        Parameters
+        ----------
+        offsets : boolean
+            Whether to include space for offsets
+        checksum : str
+            Name of the checksum to use or None/'None'
+        max_app_chunks : int or callable on number of chunks
+            How much space to reserve in the offsets for chunks to be appended.
+
+        """
+
+.. code-block:: python
+
+    class MetadataArgs(MutableMappingObject):
+        """ Object to hold the metadata arguments.
+
+        Parameters
+        ----------
+        magic_format : 8 bytes
+            Format identifier for the metadata
+        meta_checksum : str
+            Checksum to be used for the metadata
+        meta_codec : str
+            Codec to be used to compress the metadata
+        meta_level : int
+            Compression level for metadata
+        max_meta_size : int or callable on metadata size
+            How much space to reserve for additional metadata
+
+        """
+
+File / Bytes
+~~~~~~~~~~~~
+
+The following high-level functions exist for compressing and decompressing to
+and from files and byte objects:
+
+
+* ``pack_file_to_file``
+* ``unpack_file_from_file``
+* ``pack_bytes_to_file``
+* ``unpack_bytes_from_file``
+* ``pack_bytes_to_bytes``
+* ``unpack_bytes_from_bytes``
+
+Beyond the target arguments such as the files and the bytes, each ``pack_*``
+function takes the following arguments:
+
+.. code-block::
+
+    chunk_size : int
+        the desired chunk size in bytes
+    metadata : dict
+        the metadata dict
+    blosc_args : BloscArgs
+        blosc args
+    bloscpack_args : BloscpackArgs
+        bloscpack args
+    metadata_args : MetadataArgs
+        metadata args
+
+Below are their sigantures:
+
+.. code-block:: python
+
+    def pack_file_to_file(in_file, out_file,
+                          chunk_size=DEFAULT_CHUNK_SIZE,
+                          metadata=None,
+                          blosc_args=None,
+                          bloscpack_args=None,
+                          metadata_args=None):
+
+    def unpack_file_from_file(in_file, out_file):
+
+
+    def pack_bytes_to_file(bytes_, out_file,
+                           chunk_size=DEFAULT_CHUNK_SIZE,
+                           metadata=None,
+                           blosc_args=None,
+                           bloscpack_args=None,
+                           metadata_args=None):
+
+    def unpack_bytes_from_file(compressed_file):
+
+    def pack_bytes_to_bytes(bytes_,
+                            chunk_size=DEFAULT_CHUNK_SIZE,
+                            metadata=None,
+                            blosc_args=None,
+                            bloscpack_args=None,
+                            metadata_args=None,
+                            ):
+
+
+    def unpack_bytes_from_bytes(bytes_):
 
 Numpy
 ~~~~~
@@ -452,8 +605,8 @@
     >>> a = np.linspace(0, 1, 3e8)
     >>> print a.size, a.dtype
     300000000 float64
-    >>> bp.pack_ndarray_file(a, 'a.blp')
-    >>> b = bp.unpack_ndarray_file('a.blp')
+    >>> bp.pack_ndarray_to_file(a, 'a.blp')
+    >>> b = bp.unpack_ndarray_from_file('a.blp')
     >>> (a == b).all()
     True
 
@@ -498,8 +651,8 @@
 .. code-block:: pycon
 
     >>> a = np.linspace(0, 1, 3e8)
-    >>> c = pack_ndarray_str(a)
-    >>> b = unpack_ndarray_str(c)
+    >>> c = pack_ndarray_to_bytes(a)
+    >>> b = unpack_ndarray_from_bytes(c)
     >>> (a == b).all()
     True
 
@@ -508,11 +661,30 @@
 .. code-block:: pycon
 
     >>> a = np.linspace(0, 1, 3e8)
-    >>> c = pack_ndarray_str(a, blosc_args=BloscArgs(cname='lz4'))
-    >>> b = unpack_ndarray_str(c)
+    >>> c = pack_ndarray_to_bytes(a, blosc_args=BloscArgs(cname='lz4'))
+    >>> b = unpack_ndarray_from_bytes(c)
     >>> (a == b).all()
     True
 
+
+.. code-block:: python
+
+    def pack_ndarray_to_file(ndarray, filename,
+                             chunk_size=DEFAULT_CHUNK_SIZE,
+                             blosc_args=None,
+                             bloscpack_args=None,
+                             metadata_args=None):
+
+    def pack_ndarray_to_bytes(ndarray,
+                              chunk_size=DEFAULT_CHUNK_SIZE,
+                              blosc_args=None,
+                              bloscpack_args=None,
+                              metadata_args=None):
+
+    def unpack_ndarray_from_file(filename):
+
+    def unpack_ndarray_from_bytes(str_):
+
 If you are interested in the performance of Bloscpack compared to other
 serialization formats for Numpy arrays, please look at the benchmarks presented
 in `the Bloscpack paper from the EuroScipy 2013 conference proceedings
@@ -1120,7 +1292,7 @@
 --------------------------------------
 
 #. Set the version as environment variable ``VERSION=vX.X.X``
-#. Update the changelog and ANNOUNCE.rst
+#. Update the changelog and ``ANNOUNCE.rst``
 #. Commit using ``git commit -m "$VERSION changelog and ANNOUNCE.rst"``
 #. Set the version number in ``bloscpack/version.py``
 #. Commit with ``git commit -m "$VERSION"``
@@ -1131,7 +1303,7 @@
 #. Push the tag to own github ``git push esc $VERSION``
 #. Make a source distribution using ``python setup.py sdist bdist_wheel``
 #. Upload to PyPi using ``twine upload dist/bloscpack-$VERSION*``
-#. Bump version number to next dev version
+#. Bump version number to next dev version and reset ``ANNOUNCE.rst``
 #. Announce release on the Blosc list
 #. Announce release via Twitter
 
@@ -1143,8 +1315,6 @@
 
 * Refactor monolithic readme into Sphinx and publish
 * Cleanup and double check the docstrings for the public API classes
-* document library usage
-* Announcement RST
 
 Command Line
 ~~~~~~~~~~~~
@@ -1210,6 +1380,11 @@
 Changelog
 ---------
 
+* v0.16.0     - Thu 27 Dec 2018
+
+  * Update of Python API and docs
+  * various minor fixes
+
 * v0.15.0     - Wed 31 Oct 2018
 
   * Halloween Release!
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/bloscpack/__init__.py 
new/bloscpack-0.16.0/bloscpack/__init__.py
--- old/bloscpack-0.15.0/bloscpack/__init__.py  2018-10-30 22:24:40.000000000 
+0100
+++ new/bloscpack-0.16.0/bloscpack/__init__.py  2018-11-06 22:47:19.000000000 
+0100
@@ -10,13 +10,25 @@
                    BloscpackArgs,
                    MetadataArgs,
                    )
+from .file_io import (pack_file_to_file,
+                      unpack_file_from_file,
+                      pack_bytes_to_file,
+                      unpack_bytes_from_file,
+                      pack_bytes_to_bytes,
+                      unpack_bytes_from_bytes,
+                      )
+# deprecated
 from .file_io import (pack_file,
                       unpack_file,
                       pack_bytes_file,
                       unpack_bytes_file,
-                      pack_bytes_to_bytes,
-                      unpack_bytes_from_bytes,
                       )
+from .numpy_io import (pack_ndarray_to_file,
+                       unpack_ndarray_from_file,
+                       pack_ndarray_to_bytes,
+                       unpack_ndarray_from_bytes,
+                       )
+# deprecated
 from .numpy_io import (pack_ndarray_file,
                        unpack_ndarray_file,
                        pack_ndarray_str,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/bloscpack/cli.py 
new/bloscpack-0.16.0/bloscpack/cli.py
--- old/bloscpack-0.15.0/bloscpack/cli.py       2018-05-24 16:17:26.000000000 
+0200
+++ new/bloscpack-0.16.0/bloscpack/cli.py       2018-11-06 22:47:19.000000000 
+0100
@@ -41,8 +41,8 @@
                          FormatVersionMismatch,
                          ChecksumMismatch,
                          )
-from .file_io import (pack_file,
-                      unpack_file,
+from .file_io import (pack_file_to_file,
+                      unpack_file_from_file,
                       _read_beginning,
                       _read_compressed_chunk_fp,
                       )
@@ -453,11 +453,12 @@
         bloscpack_args = BloscpackArgs(offsets=args.offsets,
                                        checksum=args.checksum)
         try:
-            pack_file(in_file, out_file, chunk_size=args.chunk_size,
-                      metadata=metadata,
-                      blosc_args=blosc_args,
-                      bloscpack_args=bloscpack_args,
-                      metadata_args=MetadataArgs())
+            pack_file_to_file(in_file, out_file,
+                              chunk_size=args.chunk_size,
+                              metadata=metadata,
+                              blosc_args=blosc_args,
+                              bloscpack_args=bloscpack_args,
+                              metadata_args=MetadataArgs())
         except ChunkingException as ce:
             log.error(str(ce))
     elif args.subcommand in ['decompress', 'd']:
@@ -468,7 +469,7 @@
         except FileNotFound as fnf:
             log.error(str(fnf))
         try:
-            metadata = unpack_file(in_file, out_file)
+            metadata = unpack_file_from_file(in_file, out_file)
             if metadata:
                 log_metadata(metadata)
         except FormatVersionMismatch as fvm:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/bloscpack/file_io.py 
new/bloscpack-0.16.0/bloscpack/file_io.py
--- old/bloscpack-0.15.0/bloscpack/file_io.py   2018-10-30 22:24:40.000000000 
+0100
+++ new/bloscpack-0.16.0/bloscpack/file_io.py   2018-11-06 22:47:19.000000000 
+0100
@@ -11,6 +11,7 @@
 import blosc
 import six
 from six.moves import xrange
+from deprecated import deprecated
 
 
 from .args import (calculate_nchunks,
@@ -416,11 +417,13 @@
         return offset, compressed, digest
 
 
-def pack_file(in_file, out_file, chunk_size=DEFAULT_CHUNK_SIZE, metadata=None,
-              blosc_args=None,
-              bloscpack_args=None,
-              metadata_args=None):
-    """ Main function for compressing a file.
+def pack_file_to_file(in_file, out_file,
+                      chunk_size=DEFAULT_CHUNK_SIZE,
+                      metadata=None,
+                      blosc_args=None,
+                      bloscpack_args=None,
+                      metadata_args=None):
+    """ Compress a file to a file.
 
     Parameters
     ----------
@@ -467,8 +470,13 @@
     log.verbose('compression ratio: %f' % (in_file_size/out_file_size))
 
 
-def unpack_file(in_file, out_file):
-    """ Main function for decompressing a file.
+pack_file = deprecated(pack_file_to_file,
+                       version="0.16.0",
+                       reason="Use 'pack_file_to_file' instead")
+
+
+def unpack_file_from_file(in_file, out_file):
+    """ Uncompress a file from a file.
 
     Parameters
     ----------
@@ -479,7 +487,7 @@
 
     Returns
     -------
-    metadata : str
+    metadata : bytes
         the metadata contained in the file if present
 
     Raises
@@ -502,10 +510,42 @@
     return source.metadata
 
 
-def pack_bytes_file(bytes_, out_file, chunk_size=DEFAULT_CHUNK_SIZE, 
metadata=None,
-                    blosc_args=None,
-                    bloscpack_args=None,
-                    metadata_args=None):
+unpack_file = deprecated(unpack_file_from_file,
+                         version="0.16.0",
+                         reason="Use 'unpack_file_from_file' instead")
+
+
+def pack_bytes_to_file(bytes_, out_file,
+                       chunk_size=DEFAULT_CHUNK_SIZE,
+                       metadata=None,
+                       blosc_args=None,
+                       bloscpack_args=None,
+                       metadata_args=None):
+    """ Compress bytes to file.
+
+    Parameters
+    ----------
+    bytes_ : bytes
+        the bytes to compress
+    out_file : str
+        the name of the output file
+    chunk_size : int
+        the desired chunk size in bytes
+    metadata : dict
+        the metadata dict
+    blosc_args : BloscArgs
+        blosc args
+    bloscpack_args : BloscpackArgs
+        bloscpack args
+    metadata_args : MetadataArgs
+        metadata args
+
+    Raises
+    ------
+
+    ChunkingException
+        if there was a problem caculating the chunks
+    """
     bytes_size = len(bytes_)
     log.verbose('input bytes size: %s' % double_pretty_size(bytes_size))
     # calculate chunk sizes
@@ -525,13 +565,45 @@
     log.verbose('compression ratio: %f' % (bytes_size/out_file_size))
 
 
-def unpack_bytes_file(compressed_file):
+pack_bytes_file = deprecated(pack_bytes_to_file,
+                             version="0.16.0",
+                             reason="Use 'pack_bytes_to_file' instead")
+
+
+def unpack_bytes_from_file(compressed_file):
+    """ Uncompress bytes from a file.
+
+    Parameters
+    ----------
+    compressed_file : str
+        the name of the input file
+
+    Returns
+    -------
+    bytes_ : bytes_
+        the decompressed bytes
+    metadata : bytes
+        the metadata contained in the file if present
+
+    Raises
+    ------
+
+    FormatVersionMismatch
+        if the file has an unmatching format version number
+    ChecksumMismatch
+        if any of the chunks fail to produce the correct checksum
+    """
     sio = StringIO()
     sink = PlainFPSink(sio)
     with open(compressed_file, 'rb') as fp:
         source = CompressedFPSource(fp)
         unpack(source, sink)
-    return sio.getvalue()
+        return sio.getvalue(), source.metadata
+
+
+unpack_bytes_file = deprecated(unpack_bytes_from_file,
+                               version="0.16.0",
+                               reason="Use 'unpack_bytes_from_file' instead")
 
 
 def pack_bytes_to_bytes(bytes_,
@@ -541,6 +613,37 @@
                         bloscpack_args=None,
                         metadata_args=None,
                         ):
+
+    """ Compress bytes to bytes_
+
+    Parameters
+    ----------
+    bytes_ : bytes
+        the bytes to compress
+    out_file : str
+        the name of the output file
+    chunk_size : int
+        the desired chunk size in bytes
+    metadata : dict
+        the metadata dict
+    blosc_args : BloscArgs
+        blosc args
+    bloscpack_args : BloscpackArgs
+        bloscpack args
+    metadata_args : MetadataArgs
+        metadata args
+
+    Returns
+    -------
+    bytes_ : bytes
+        the compressed bytes
+
+    Raises
+    ------
+
+    ChunkingException
+        if there was a problem caculating the chunks
+    """
     bytes_size = len(bytes_)
     log.verbose('input bytes size: %s' % double_pretty_size(bytes_size))
     nchunks, chunk_size, last_chunk_size = \
@@ -561,8 +664,30 @@
 
 
 def unpack_bytes_from_bytes(bytes_):
+    """ Uncompress bytes from bytes
+
+    Parameters
+    ----------
+    bytes_: bytes
+        input bytes
+
+    Returns
+    -------
+    bytes_ : bytes_
+        the decompressed bytes
+    metadata : bytes
+        the metadata contained in the file if present
+
+    Raises
+    ------
+
+    FormatVersionMismatch
+        if the file has an unmatching format version number
+    ChecksumMismatch
+        if any of the chunks fail to produce the correct checksum
+    """
     source = CompressedFPSource(StringIO(bytes_))
     sio = StringIO()
     sink = PlainFPSink(sio)
     unpack(source, sink)
-    return sio.getvalue()
+    return sio.getvalue(), source.metadata
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/bloscpack/numpy_io.py 
new/bloscpack-0.16.0/bloscpack/numpy_io.py
--- old/bloscpack-0.15.0/bloscpack/numpy_io.py  2018-10-30 22:24:51.000000000 
+0100
+++ new/bloscpack-0.16.0/bloscpack/numpy_io.py  2018-11-06 22:47:19.000000000 
+0100
@@ -9,6 +9,7 @@
 import numpy
 import six
 from six.moves import xrange
+from deprecated import deprecated
 
 
 from .abstract_io import (pack,
@@ -171,6 +172,7 @@
 
     The 'typesize' value of 'blosc_args' will be silently ignored and replaced
     with the itemsize of the Numpy array's dtype.
+
     """
     if ndarray.dtype.hasobject:
         raise ObjectNumpyArrayRejection
@@ -193,11 +195,32 @@
     #log.verbose('compression ratio: %f' % (out_file_size/source.size))
 
 
-def pack_ndarray_file(ndarray, filename,
-                      chunk_size=DEFAULT_CHUNK_SIZE,
-                      blosc_args=None,
-                      bloscpack_args=None,
-                      metadata_args=None):
+def pack_ndarray_to_file(ndarray, filename,
+                         chunk_size=DEFAULT_CHUNK_SIZE,
+                         blosc_args=None,
+                         bloscpack_args=None,
+                         metadata_args=None):
+    """ Serialialize a Numpy array to a file.
+
+    Parameters
+    ----------
+    ndarray : ndarray
+        the numpy array to serialize
+    filename : str
+        the file to compress to
+    blosc_args : BloscArgs
+        blosc args
+    bloscpack_args : BloscpackArgs
+        bloscpack args
+    metadata_args : MetadataArgs
+        the args for the metadata
+
+    Notes
+    -----
+    The 'typesize' value of 'blosc_args' will be silently ignored and replaced
+    with the itemsize of the Numpy array's dtype.
+
+    """
     with open(filename, 'wb') as fp:
         sink = CompressedFPSink(fp)
         pack_ndarray(ndarray, sink,
@@ -207,11 +230,43 @@
                      metadata_args=metadata_args)
 
 
-def pack_ndarray_str(ndarray,
-                     chunk_size=DEFAULT_CHUNK_SIZE,
-                     blosc_args=None,
-                     bloscpack_args=None,
-                     metadata_args=None):
+pack_ndarray_file = deprecated(pack_ndarray_to_file,
+                               version='0.16.0',
+                               reason="Use 'pack_ndarray_to_file' instead."
+                               )
+
+
+def pack_ndarray_to_bytes(ndarray,
+                          chunk_size=DEFAULT_CHUNK_SIZE,
+                          blosc_args=None,
+                          bloscpack_args=None,
+                          metadata_args=None):
+    """ Serialialize a Numpy array to bytes_
+
+    Parameters
+    ----------
+    ndarray : ndarray
+        the numpy array to serialize
+    filename : str
+        the file to compress to
+    blosc_args : BloscArgs
+        blosc args
+    bloscpack_args : BloscpackArgs
+        bloscpack args
+    metadata_args : MetadataArgs
+        the args for the metadata
+
+    Returns
+    -------
+    bytes_ : bytes
+        compressed bytes
+
+    Notes
+    -----
+    The 'typesize' value of 'blosc_args' will be silently ignored and replaced
+    with the itemsize of the Numpy array's dtype.
+
+    """
     sio = StringIO()
     sink = CompressedFPSink(sio)
     pack_ndarray(ndarray, sink,
@@ -222,6 +277,12 @@
     return sio.getvalue()
 
 
+pack_ndarray_str = deprecated(pack_ndarray_to_bytes,
+                              version='0.16.0',
+                              reason="Use 'pack_ndarray_to_bytes' instead."
+                              )
+
+
 def unpack_ndarray(source):
     """ Deserialize a Numpy array.
 
@@ -246,12 +307,58 @@
     return sink.ndarray
 
 
-def unpack_ndarray_file(filename):
+def unpack_ndarray_from_file(filename):
+    """ Deserialize a Numpy array from a file.
+
+    Parameters
+    ----------
+    filename : str
+        the file to decompress from
+
+    Returns
+    -------
+    ndarray : ndarray
+        the Numpy array
+
+    Raises
+    ------
+    NotANumpyArray
+        if the source doesn't seem to contain a Numpy array
+    """
     source = CompressedFPSource(open(filename, 'rb'))
     return unpack_ndarray(source)
 
 
-def unpack_ndarray_str(str_):
-    sio = StringIO(str_)
+unpack_ndarray_file = deprecated(unpack_ndarray_from_file,
+                                 version='0.16.0',
+                                 reason="Use 'pack_ndarray_from_file' instead."
+                                 )
+
+
+def unpack_ndarray_from_bytes(bytes_):
+    """ Deserialize a Numpy array from bytes.
+
+    Parameters
+    ----------
+    bytes_ : bytes
+        the bytes to decompress from
+
+    Returns
+    -------
+    ndarray : ndarray
+        the Numpy array
+
+    Raises
+    ------
+    NotANumpyArray
+        if the source doesn't seem to contain a Numpy array
+    """
+    sio = StringIO(bytes_)
     source = CompressedFPSource(sio)
     return unpack_ndarray(source)
+
+
+unpack_ndarray_str = deprecated(unpack_ndarray_from_bytes,
+                                version='0.16.0',
+                                reason="Use 'pack_ndarray_from_bytes' instead."
+                                )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/bloscpack/version.py 
new/bloscpack-0.16.0/bloscpack/version.py
--- old/bloscpack-0.15.0/bloscpack/version.py   2018-10-31 10:28:36.000000000 
+0100
+++ new/bloscpack-0.16.0/bloscpack/version.py   2018-12-27 16:19:20.000000000 
+0100
@@ -2,5 +2,5 @@
 # -*- coding: utf-8 -*-
 # vim :set ft=py:
 
-__version__ = '0.15.0'
+__version__ = '0.16.0'
 __author__ = 'Valentin Haenel <[email protected]>'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/bloscpack.egg-info/PKG-INFO 
new/bloscpack-0.16.0/bloscpack.egg-info/PKG-INFO
--- old/bloscpack-0.15.0/bloscpack.egg-info/PKG-INFO    2018-10-31 
10:29:43.000000000 +0100
+++ new/bloscpack-0.16.0/bloscpack.egg-info/PKG-INFO    2018-12-27 
16:23:46.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: bloscpack
-Version: 0.15.0
+Version: 0.16.0
 Summary: Command line interface to and serialization format for Blosc
 Home-page: https://github.com/blosc/bloscpack
 Author: Valentin Haenel
@@ -65,9 +65,9 @@
         Dependencies
         ------------
         
-        * Pythons 2.7, 3.4, 3.5, 3.6 and 3.7
+        * Python 2.7, 3.4, 3.5, 3.6 or 3.7
         * `python-blosc  <https://github.com/Blosc/python-blosc>`_ (provides 
Blosc) and
-          `Numpy <http://www.numpy.org/>`_ (as listed in ``requirements.txt`` 
for
+          `Numpy <http://www.numpy.org/>`_ (as listed in ``requirements.txt``) 
for
           running the code
         * The Python packages listed in ``test_requirements.txt`` for testing 
and
           releasing
@@ -448,7 +448,160 @@
         Python API
         ----------
         
-        The Python API is still in flux, so this section is deliberately 
sparse.
+        Bloscpack has a versatile yet simple API consisting of a series of 
'arguments'
+        objects and high-level functions that can be invoked dependding on 
your input
+        and output needs.
+        
+        Nomenclature wise, Python 3 has done a lot for Bloscpack, because we 
always
+        need to represent compressed data as bytes deliberatey. This makes it 
easier
+        and more natural to distinguish between text, such a filenames and 
binary and
+        bytes objects such as compressed data.
+        
+        Arguments
+        ~~~~~~~~~
+        
+        The three argument types are:
+        
+        * ``BloscArgs``
+        * ``BloscpackArgs``
+        * ``MetadataArgs``
+        
+        as defined in ``bloscpack/args.py``.  Instantiating any of them will 
create an
+        object with the defaults setup. The defaults are defined in
+        ``bloscpack/defaults.py``. You can use these in the high-level 
functions listed
+        below.
+        
+        You can override any and all defaults by passing in the respective
+        keyword-arguments, for example:
+        
+        
+        .. code-block:: pycon
+        
+           >>> b = BloscArgs()               # will create a default args 
object
+           >>> b = BloscArgs(clevel=4)       # change compression level to 4
+           >>> b = BloscArgs(typesize=4,     # change the typesize to 4
+           >>> ...           clevel=9,       # change the compression level to 
9
+           >>> ...           shuffle=False,  # deactivate the shuffle filter
+           >>> ...           cname='lz4')    # let lz4 be the internal codec
+        
+        
+        .. code-block:: python
+        
+            class BloscArgs(MutableMappingObject):
+                """ Object to hold Blosc arguments.
+        
+                Parameters
+                ----------
+                typesize : int
+                    The typesize used
+                clevel : int
+                    Compression level
+                shuffle : boolean
+                    Whether or not to activate the shuffle filter
+                cname: str
+                    Name of the internal code to use
+        
+                """
+        
+        .. code-block:: python
+        
+            class BloscpackArgs(MutableMappingObject):
+                """ Object to hold BloscPack arguments.
+        
+                Parameters
+                ----------
+                offsets : boolean
+                    Whether to include space for offsets
+                checksum : str
+                    Name of the checksum to use or None/'None'
+                max_app_chunks : int or callable on number of chunks
+                    How much space to reserve in the offsets for chunks to be 
appended.
+        
+                """
+        
+        .. code-block:: python
+        
+            class MetadataArgs(MutableMappingObject):
+                """ Object to hold the metadata arguments.
+        
+                Parameters
+                ----------
+                magic_format : 8 bytes
+                    Format identifier for the metadata
+                meta_checksum : str
+                    Checksum to be used for the metadata
+                meta_codec : str
+                    Codec to be used to compress the metadata
+                meta_level : int
+                    Compression level for metadata
+                max_meta_size : int or callable on metadata size
+                    How much space to reserve for additional metadata
+        
+                """
+        
+        File / Bytes
+        ~~~~~~~~~~~~
+        
+        The following high-level functions exist for compressing and 
decompressing to
+        and from files and byte objects:
+        
+        
+        * ``pack_file_to_file``
+        * ``unpack_file_from_file``
+        * ``pack_bytes_to_file``
+        * ``unpack_bytes_from_file``
+        * ``pack_bytes_to_bytes``
+        * ``unpack_bytes_from_bytes``
+        
+        Beyond the target arguments such as the files and the bytes, each 
``pack_*``
+        function takes the following arguments:
+        
+        .. code-block::
+        
+            chunk_size : int
+                the desired chunk size in bytes
+            metadata : dict
+                the metadata dict
+            blosc_args : BloscArgs
+                blosc args
+            bloscpack_args : BloscpackArgs
+                bloscpack args
+            metadata_args : MetadataArgs
+                metadata args
+        
+        Below are their sigantures:
+        
+        .. code-block:: python
+        
+            def pack_file_to_file(in_file, out_file,
+                                  chunk_size=DEFAULT_CHUNK_SIZE,
+                                  metadata=None,
+                                  blosc_args=None,
+                                  bloscpack_args=None,
+                                  metadata_args=None):
+        
+            def unpack_file_from_file(in_file, out_file):
+        
+        
+            def pack_bytes_to_file(bytes_, out_file,
+                                   chunk_size=DEFAULT_CHUNK_SIZE,
+                                   metadata=None,
+                                   blosc_args=None,
+                                   bloscpack_args=None,
+                                   metadata_args=None):
+        
+            def unpack_bytes_from_file(compressed_file):
+        
+            def pack_bytes_to_bytes(bytes_,
+                                    chunk_size=DEFAULT_CHUNK_SIZE,
+                                    metadata=None,
+                                    blosc_args=None,
+                                    bloscpack_args=None,
+                                    metadata_args=None,
+                                    ):
+        
+        
+            def unpack_bytes_from_bytes(bytes_):
         
         Numpy
         ~~~~~
@@ -460,8 +613,8 @@
             >>> a = np.linspace(0, 1, 3e8)
             >>> print a.size, a.dtype
             300000000 float64
-            >>> bp.pack_ndarray_file(a, 'a.blp')
-            >>> b = bp.unpack_ndarray_file('a.blp')
+            >>> bp.pack_ndarray_to_file(a, 'a.blp')
+            >>> b = bp.unpack_ndarray_from_file('a.blp')
             >>> (a == b).all()
             True
         
@@ -506,8 +659,8 @@
         .. code-block:: pycon
         
             >>> a = np.linspace(0, 1, 3e8)
-            >>> c = pack_ndarray_str(a)
-            >>> b = unpack_ndarray_str(c)
+            >>> c = pack_ndarray_to_bytes(a)
+            >>> b = unpack_ndarray_from_bytes(c)
             >>> (a == b).all()
             True
         
@@ -516,11 +669,30 @@
         .. code-block:: pycon
         
             >>> a = np.linspace(0, 1, 3e8)
-            >>> c = pack_ndarray_str(a, blosc_args=BloscArgs(cname='lz4'))
-            >>> b = unpack_ndarray_str(c)
+            >>> c = pack_ndarray_to_bytes(a, blosc_args=BloscArgs(cname='lz4'))
+            >>> b = unpack_ndarray_from_bytes(c)
             >>> (a == b).all()
             True
         
+        
+        .. code-block:: python
+        
+            def pack_ndarray_to_file(ndarray, filename,
+                                     chunk_size=DEFAULT_CHUNK_SIZE,
+                                     blosc_args=None,
+                                     bloscpack_args=None,
+                                     metadata_args=None):
+        
+            def pack_ndarray_to_bytes(ndarray,
+                                      chunk_size=DEFAULT_CHUNK_SIZE,
+                                      blosc_args=None,
+                                      bloscpack_args=None,
+                                      metadata_args=None):
+        
+            def unpack_ndarray_from_file(filename):
+        
+            def unpack_ndarray_from_bytes(str_):
+        
         If you are interested in the performance of Bloscpack compared to other
         serialization formats for Numpy arrays, please look at the benchmarks 
presented
         in `the Bloscpack paper from the EuroScipy 2013 conference proceedings
@@ -1128,7 +1300,7 @@
         --------------------------------------
         
         #. Set the version as environment variable ``VERSION=vX.X.X``
-        #. Update the changelog and ANNOUNCE.rst
+        #. Update the changelog and ``ANNOUNCE.rst``
         #. Commit using ``git commit -m "$VERSION changelog and ANNOUNCE.rst"``
         #. Set the version number in ``bloscpack/version.py``
         #. Commit with ``git commit -m "$VERSION"``
@@ -1139,7 +1311,7 @@
         #. Push the tag to own github ``git push esc $VERSION``
         #. Make a source distribution using ``python setup.py sdist 
bdist_wheel``
         #. Upload to PyPi using ``twine upload dist/bloscpack-$VERSION*``
-        #. Bump version number to next dev version
+        #. Bump version number to next dev version and reset ``ANNOUNCE.rst``
         #. Announce release on the Blosc list
         #. Announce release via Twitter
         
@@ -1151,8 +1323,6 @@
         
         * Refactor monolithic readme into Sphinx and publish
         * Cleanup and double check the docstrings for the public API classes
-        * document library usage
-        * Announcement RST
         
         Command Line
         ~~~~~~~~~~~~
@@ -1218,6 +1388,11 @@
         Changelog
         ---------
         
+        * v0.16.0     - Thu 27 Dec 2018
+        
+          * Update of Python API and docs
+          * various minor fixes
+        
         * v0.15.0     - Wed 31 Oct 2018
         
           * Halloween Release!
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/bloscpack.egg-info/requires.txt 
new/bloscpack-0.16.0/bloscpack.egg-info/requires.txt
--- old/bloscpack-0.15.0/bloscpack.egg-info/requires.txt        2018-10-31 
10:29:43.000000000 +0100
+++ new/bloscpack-0.16.0/bloscpack.egg-info/requires.txt        2018-12-27 
16:23:46.000000000 +0100
@@ -1,12 +1,13 @@
 blosc
 numpy
 six
+deprecated
 
 [tests]
-coverage
-coveralls
+nose
 cram>=0.6
 mock
-nose
+coverage
+coveralls
 twine
 wheel
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/setup.py 
new/bloscpack-0.16.0/setup.py
--- old/bloscpack-0.15.0/setup.py       2018-10-31 10:05:49.000000000 +0100
+++ new/bloscpack-0.16.0/setup.py       2018-12-05 22:53:38.000000000 +0100
@@ -4,8 +4,9 @@
 
 from setuptools import setup
 import sys
+import io
 
-with open('README.rst') as f:
+with io.open('README.rst', encoding='utf-8') as f:
     long_description = f.read()
 
 with open('bloscpack/version.py') as f:
@@ -15,6 +16,7 @@
     'blosc',
     'numpy',
     'six',
+    'deprecated',
 ]
 
 tests_require = [
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/test/test_append.py 
new/bloscpack-0.16.0/test/test_append.py
--- old/bloscpack-0.15.0/test/test_append.py    2018-10-30 22:47:39.000000000 
+0100
+++ new/bloscpack-0.16.0/test/test_append.py    2018-11-06 22:47:19.000000000 
+0100
@@ -37,8 +37,8 @@
                                PlainFPSink,
                                CompressedFPSource,
                                CompressedFPSink,
-                               pack_file,
-                               unpack_file,
+                               pack_file_to_file,
+                               unpack_file_from_file,
                                _read_beginning,
                                _read_compressed_chunk_fp,
                                _write_metadata,
@@ -149,9 +149,9 @@
 def test_append():
     with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
         create_array(1, in_file)
-        pack_file(in_file, out_file)
+        pack_file_to_file(in_file, out_file)
         append(out_file, in_file)
-        unpack_file(out_file, dcmp_file)
+        unpack_file_from_file(out_file, dcmp_file)
         in_content = open(in_file, 'rb').read()
         dcmp_content = open(dcmp_file, 'rb').read()
         nt.assert_equal(len(dcmp_content), len(in_content) * 2)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/test/test_file_io.py 
new/bloscpack-0.16.0/test/test_file_io.py
--- old/bloscpack-0.15.0/test/test_file_io.py   2018-10-30 22:24:40.000000000 
+0100
+++ new/bloscpack-0.16.0/test/test_file_io.py   2018-11-06 22:47:19.000000000 
+0100
@@ -30,10 +30,10 @@
                                PlainFPSink,
                                CompressedFPSource,
                                CompressedFPSink,
-                               pack_file,
-                               unpack_file,
-                               pack_bytes_file,
-                               unpack_bytes_file,
+                               pack_file_to_file,
+                               unpack_file_from_file,
+                               pack_bytes_to_file,
+                               unpack_bytes_from_file,
                                pack_bytes_to_bytes,
                                unpack_bytes_from_bytes,
                                _read_bloscpack_header,
@@ -58,7 +58,7 @@
 def test_offsets():
     with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
         create_array(1, in_file)
-        pack_file(in_file, out_file, chunk_size='2M')
+        pack_file_to_file(in_file, out_file, chunk_size='2M')
         with open(out_file, 'r+b') as input_fp:
             bloscpack_header = _read_bloscpack_header(input_fp)
             total_entries = bloscpack_header.total_prospective_chunks
@@ -153,15 +153,15 @@
     blosc_args = BloscArgs()
     with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
         create_array(1, in_file)
-        pack_file(in_file, out_file, blosc_args=blosc_args)
+        pack_file_to_file(in_file, out_file, blosc_args=blosc_args)
         nt.assert_raises(FormatVersionMismatch,
-                         unpack_file, out_file, dcmp_file)
+                         unpack_file_from_file, out_file, dcmp_file)
 
 
 def test_file_corruption():
     with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
         create_array(1, in_file)
-        pack_file(in_file, out_file)
+        pack_file_to_file(in_file, out_file)
         # now go in and modify a byte in the file
         with open(out_file, 'r+b') as input_fp:
             # read offsets and header
@@ -180,7 +180,7 @@
             # write the flipped byte
             input_fp.write(replace)
         # now attempt to unpack it
-        nt.assert_raises(ChecksumMismatch, unpack_file, out_file, dcmp_file)
+        nt.assert_raises(ChecksumMismatch, unpack_file_from_file, out_file, 
dcmp_file)
 
 
 def pack_unpack(repeats, chunk_size=None, progress=False):
@@ -190,10 +190,10 @@
         create_array(repeats, in_file, progress=progress)
         if progress:
             print("Compressing")
-        pack_file(in_file, out_file, chunk_size=chunk_size)
+        pack_file_to_file(in_file, out_file, chunk_size=chunk_size)
         if progress:
             print("Decompressing")
-        unpack_file(out_file, dcmp_file)
+        unpack_file_from_file(out_file, dcmp_file)
         if progress:
             print("Verifying")
         cmp_file(in_file, dcmp_file)
@@ -242,19 +242,20 @@
     pack_unpack_fp(1, chunk_size=reverse_pretty('8M'))
 
 
-def test_pack_unpack_bytes_file():
+def test_pack_unpack_bytes_to_from_file():
     array_ = np.linspace(0, 1e5)
     input_bytes = array_.tostring()
     with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
-        pack_bytes_file(input_bytes, out_file)
-        output_bytes = unpack_bytes_file(out_file)
+        pack_bytes_to_file(input_bytes, out_file)
+        output_bytes, _ = unpack_bytes_from_file(out_file)
     nt.assert_equal(input_bytes, output_bytes)
 
+
 def test_pack_unpack_bytes_bytes():
     a = np.linspace(0, 1e5)
     b = a.tostring()
     c = pack_bytes_to_bytes(b)
-    d = unpack_bytes_from_bytes(c)
+    d, _ = unpack_bytes_from_bytes(c)
     nt.assert_equal(b, d)
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/bloscpack-0.15.0/test/test_numpy_io.py 
new/bloscpack-0.16.0/test/test_numpy_io.py
--- old/bloscpack-0.15.0/test/test_numpy_io.py  2018-10-30 22:47:39.000000000 
+0100
+++ new/bloscpack-0.16.0/test/test_numpy_io.py  2018-12-27 16:12:05.000000000 
+0100
@@ -28,10 +28,10 @@
 from bloscpack.memory_io import CompressedMemorySource, CompressedMemorySink
 from bloscpack.numpy_io import (pack_ndarray,
                                 unpack_ndarray,
-                                pack_ndarray_str,
-                                unpack_ndarray_str,
-                                pack_ndarray_file,
-                                unpack_ndarray_file,
+                                pack_ndarray_to_bytes,
+                                unpack_ndarray_from_bytes,
+                                pack_ndarray_to_file,
+                                unpack_ndarray_from_file,
                                 _conv,
                                 )
 from bloscpack.testutil import (create_tmp_files,
@@ -47,8 +47,8 @@
 
 
 def roundtrip_numpy_str(ndarray):
-    s = pack_ndarray_str(ndarray)
-    b = unpack_ndarray_str(s)
+    s = pack_ndarray_to_bytes(ndarray)
+    b = unpack_ndarray_from_bytes(s)
     return npt.assert_array_equal, ndarray, b
 
 
@@ -64,8 +64,8 @@
 
 def roundtrip_numpy_file(ndarray):
     with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
-        pack_ndarray_file(ndarray, out_file)
-        b = unpack_ndarray_file(out_file)
+        pack_ndarray_to_file(ndarray, out_file)
+        b = unpack_ndarray_from_file(out_file)
         return npt.assert_array_equal, ndarray, b
 
 
@@ -87,7 +87,7 @@
     source = PlainFPSource(StringIO(a_str))
     sink = CompressedFPSink(sio)
     pack(source, sink, *calculate_nchunks(len(a_str)))
-    nt.assert_raises(NotANumpyArray, unpack_ndarray_str, sio.getvalue())
+    nt.assert_raises(NotANumpyArray, unpack_ndarray_from_bytes, sio.getvalue())
 
 
 def roundtrip_ndarray(ndarray):
@@ -206,19 +206,19 @@
     with mock.patch('bloscpack.numpy_io._ndarray_meta', old_ndarray_meta):
         for a in test_data:
             # uses old version of _ndarray_meta
-            c = pack_ndarray_str(a)
+            c = pack_ndarray_to_bytes(a)
             # should not raise a SyntaxError
-            d = unpack_ndarray_str(c)
+            d = unpack_ndarray_from_bytes(c)
             yield npt.assert_array_equal, a, d
 
 
 def test_itemsize_chunk_size_mismatch():
     a = np.arange(1000)
     # typesize of the array is 8, let's glitch the typesize
-    for i in [1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15]:
+    for i in [1, 2, 3, 5, 6, 7, 9, 10, 11, 13, 14, 15]:
             yield nt.assert_raises, \
                 ChunkSizeTypeSizeMismatch, \
-                pack_ndarray_str, a, i
+                pack_ndarray_to_bytes, a, i
 
 
 def test_larger_arrays():


Reply via email to