Hello community,

here is the log from the commit of package python-jsonpickle for 
openSUSE:Factory checked in at 2020-03-08 22:21:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-jsonpickle (Old)
 and      /work/SRC/openSUSE:Factory/.python-jsonpickle.new.26092 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-jsonpickle"

Sun Mar  8 22:21:24 2020 rev:6 rq:780358 version:1.3

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-jsonpickle/python-jsonpickle.changes      
2019-12-11 12:11:16.172585279 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-jsonpickle.new.26092/python-jsonpickle.changes
   2020-03-08 22:21:31.687980861 +0100
@@ -1,0 +2,6 @@
+Fri Feb 28 18:31:13 CET 2020 - Matej Cepl <mc...@suse.com>
+
+- Add PR292-Python38.patch to fix Python 3.8 incompatibilities
+  (gh#jsonpickle/jsonpickle#281).
+
+-------------------------------------------------------------------

Old:
----
  jsonpickle-1.2.tar.gz

New:
----
  PR292-Python38.patch
  jsonpickle-1.3.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-jsonpickle.spec ++++++
--- /var/tmp/diff_new_pack.7agU3T/_old  2020-03-08 22:21:32.595981421 +0100
+++ /var/tmp/diff_new_pack.7agU3T/_new  2020-03-08 22:21:32.611981431 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-jsonpickle
 #
-# Copyright (c) 2019 SUSE LLC
+# Copyright (c) 2020 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,14 +17,18 @@
 
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
+%define modname jsonpickle
 Name:           python-jsonpickle
-Version:        1.2
+Version:        1.3
 Release:        0
 Summary:        Python library for serializing any arbitrary object graph into 
JSON
 License:        BSD-3-Clause
 Group:          Development/Languages/Python
 URL:            https://github.com/jsonpickle/jsonpickle
 Source:         
https://files.pythonhosted.org/packages/source/j/jsonpickle/jsonpickle-%{version}.tar.gz
+# PATCH-FIX-UPSTREAM PR292-Python38.patch gh#jsonpickle/jsonpickle#281 
mc...@suse.com
+# Fix Python 3.8 incompatibilities
+Patch01:        PR292-Python38.patch
 BuildRequires:  %{python_module setuptools}
 BuildRequires:  fdupes
 BuildRequires:  python-rpm-macros
@@ -57,7 +61,7 @@
 Additionally, it can reconstitute the object back into Python.
 
 %prep
-%setup -q -n jsonpickle-%{version}
+%autosetup -p1 -n %{modname}-%{version}
 
 %build
 %python_build
@@ -67,8 +71,7 @@
 %python_expand %fdupes %{buildroot}%{$python_sitelib}
 
 %check
-# Exclusions because of gh#jsonpickle/jsonpickle#281
-%pytest -k 'not (test_thing_with_fd or test_list_with_fd or test_dict_with_fd)'
+%pytest
 
 %files %{python_files}
 %doc README.rst docs/source/changelog.rst

++++++ PR292-Python38.patch ++++++
--- a/jsonpickle/handlers.py
+++ b/jsonpickle/handlers.py
@@ -10,7 +10,9 @@ A handler can be bound to other types by
 from __future__ import absolute_import, division, unicode_literals
 import copy
 import datetime
+import io
 import re
+import sys
 import threading
 import uuid
 
@@ -254,3 +256,18 @@ class LockHandler(BaseHandler):
 
 _lock = threading.Lock()
 LockHandler.handles(_lock.__class__)
+
+
+class TextIOHandler(BaseHandler):
+    """Serialize file descriptors as None because we cannot roundtrip"""
+
+    def flatten(self, obj, data):
+        return None
+
+    def restore(self, data):
+        """Restore should never get called because flatten() returns None"""
+        raise AssertionError('Restoring IO.TextIOHandler is not supported')
+
+
+if sys.version_info >= (3, 8):
+    TextIOHandler.handles(io.TextIOWrapper)
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -12,5 +12,5 @@ pytest
 pytest-cov
 simplejson
 sqlalchemy
-ujson
-yajl; sys_platform != 'win32'
+ujson; python_version < '3.8'
+yajl; sys_platform != 'win32' and python_version < '3.8'
++++++ jsonpickle-1.2.tar.gz -> jsonpickle-1.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/PKG-INFO new/jsonpickle-1.3/PKG-INFO
--- old/jsonpickle-1.2/PKG-INFO 2019-05-24 07:17:33.000000000 +0200
+++ new/jsonpickle-1.3/PKG-INFO 2020-02-14 07:52:32.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: jsonpickle
-Version: 1.2
+Version: 1.3
 Summary: Python library for serializing any arbitrary object graph into JSON
 Home-page: https://jsonpickle.github.io/
 Author: David Aguilar
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/README.rst 
new/jsonpickle-1.3/README.rst
--- old/jsonpickle-1.2/README.rst       2019-05-24 07:17:18.000000000 +0200
+++ new/jsonpickle-1.3/README.rst       2020-02-14 07:52:17.000000000 +0100
@@ -86,3 +86,45 @@
 =======
 Licensed under the BSD License. See COPYING for details.
 See jsonpickleJS/LICENSE for details about the jsonpickleJS license.
+
+Development
+===========
+
+Use `make` to run the unit tests::
+
+        make test
+
+`pytest` is used to run unit tests internally.
+
+A `tox` target is provided to run tests against multiple
+python versions using `tox`::
+
+        make tox
+
+`jsonpickle` itself has no dependencies beyond the Python stdlib.
+`tox` is required for testing when using the `tox` test runner only.
+
+The testing requirements are specified in `requirements-dev.txt`.
+It is recommended to create a virtualenv and install the requirements there.::
+
+        python3 -mvenv env3x
+        vx env3x pip install --requirement requirements-dev.txt
+
+You can then execute tests inside the virtualenv::
+
+        vx env3x make test
+
+`vx <https://github.com/davvid/vx/>`_ is a simple script that allows you to
+eschew the typical virtualenv `source activate` / `deactivate` dance.
+
+The following steps clone `vx` to `~/src/vx` and symlinks to the script from
+`~/bin/vx`.  This assumes that `$HOME/bin` is in your `$PATH`.::
+
+    mkdir -p ~/bin ~/src
+    cd ~/src && git clone git://github.com/davvid/vx.git
+    cd ~/bin && ln -s ../src/vx/vx
+
+You don't need `vx` to run the jsonpickle's tests -- you can always use the
+`activate` and `deactivate` virtualenv workflow instead.  `vx` is convenient
+when testing against multiple virtualenvs because it does not mutate your
+shell environment.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/docs/source/changelog.rst 
new/jsonpickle-1.3/docs/source/changelog.rst
--- old/jsonpickle-1.2/docs/source/changelog.rst        2019-05-24 
07:17:18.000000000 +0200
+++ new/jsonpickle-1.3/docs/source/changelog.rst        2020-02-14 
07:52:17.000000000 +0100
@@ -1,6 +1,16 @@
 Change Log
 ==========
 
+Version 1.3 - February 13, 2019
+-------------------------------
+    * Improved round tripping of default dicts.
+      (`#283 <https://github.com/jsonpickle/jsonpickle/pull/283>`_)
+      (`#282 <https://github.com/jsonpickle/jsonpickle/issues/282>`_)
+
+    * Better support for cyclical references when encoding with
+      ``unpicklable=False``.
+      (`#264 <https://github.com/jsonpickle/jsonpickle/pull/264>`_)
+
 Version 1.2 - May 15, 2019
 --------------------------
     * Simplified JSON representation for `__reduce__` values.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/jsonpickle/backend.py 
new/jsonpickle-1.3/jsonpickle/backend.py
--- old/jsonpickle-1.2/jsonpickle/backend.py    2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/jsonpickle/backend.py    2020-02-14 07:52:17.000000000 
+0100
@@ -25,14 +25,9 @@
         self._decoders = {}
 
         # Options to pass to specific encoders
-        json_opts = ((), {'sort_keys': True})
-        self._encoder_options = {
-            'json': json_opts,
-            'simplejson': json_opts,
-            'django.util.simplejson': json_opts,
-        }
+        self._encoder_options = {}
 
-        # Options to pass to specific encoders
+        # Options to pass to specific decoders
         self._decoder_options = {}
 
         # The exception class that is thrown when a decoding error occurs
@@ -48,6 +43,15 @@
         self.load_backend('yajl')
         self.load_backend('ujson')
 
+        # Defaults for various encoders
+        json_opts = ((), {'sort_keys': True})
+        self._encoder_options = {
+            'ujson': ((), {'sort_keys': True, 'escape_forward_slashes': 
False}),
+            'json': json_opts,
+            'simplejson': json_opts,
+            'django.util.simplejson': json_opts,
+        }
+
     def _verify(self):
         """Ensures that we've loaded at least one JSON backend."""
         if self._verified:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/jsonpickle/ext/pandas.py 
new/jsonpickle-1.3/jsonpickle/ext/pandas.py
--- old/jsonpickle-1.2/jsonpickle/ext/pandas.py 2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/jsonpickle/ext/pandas.py 2020-02-14 07:52:17.000000000 
+0100
@@ -7,8 +7,8 @@
 from .. import encode, decode
 from ..handlers import BaseHandler, register, unregister
 from ..util import b64decode, b64encode
-from ..backend import json
-from .numpy import register_handlers as register_numpy_handlers, 
unregister_handlers as unregister_numpy_handlers
+from .numpy import register_handlers as register_numpy_handlers
+from .numpy import unregister_handlers as unregister_numpy_handlers
 
 __all__ = ['register_handlers', 'unregister_handlers']
 
@@ -80,15 +80,16 @@
         meta = {'dtypes': {k: str(dtype[k]) for k in dtype},
                 'index': encode(obj.index)}
 
-        data = 
self.pp.flatten_pandas(obj.reset_index(drop=True).to_csv(index=False), data, 
meta)
+        data = self.pp.flatten_pandas(
+            obj.reset_index(drop=True).to_csv(index=False), data, meta)
         return data
 
     def restore(self, data):
         csv, meta = self.pp.restore_pandas(data)
         params = make_read_csv_params(meta)
-        df = pd.read_csv(StringIO(csv),
-                         **params)
-        df.set_index(decode(meta["index"]), inplace=True)
+        df = pd.read_csv(
+          StringIO(csv), **params) if data['values'].strip() else 
pd.DataFrame()
+        df.set_index(decode(meta['index']), inplace=True)
         return df
 
 
@@ -114,14 +115,16 @@
 
 
 class PandasIndexHandler(BaseHandler):
-    pp = PandasProcessor()
 
+    pp = PandasProcessor()
     index_constructor = pd.Index
-    name_bundler = lambda _, obj: {'name': obj.name}
+
+    def name_bundler(self, obj):
+        return {'name': obj.name}
 
     def flatten(self, obj, data):
         name_bundle = self.name_bundler(obj)
-        meta = dict(dtype= str(obj.dtype), **name_bundle)
+        meta = dict(dtype=str(obj.dtype), **name_bundle)
         buf = encode(obj.tolist())
         data = self.pp.flatten_pandas(buf, data, meta)
         return data
@@ -139,15 +142,17 @@
 
 
 class PandasMultiIndexHandler(PandasIndexHandler):
-    name_bundler = lambda _, obj: {'names': obj.names}
+
+    def name_bundler(self, obj):
+        return {'names': obj.names}
 
 
 class PandasTimestampHandler(BaseHandler):
     pp = PandasProcessor()
 
     def flatten(self, obj, data):
-        meta = {"isoformat": obj.isoformat()}
-        buf = ""
+        meta = {'isoformat': obj.isoformat()}
+        buf = ''
         data = self.pp.flatten_pandas(buf, data, meta)
         return data
 
@@ -162,8 +167,11 @@
     pp = PandasProcessor()
 
     def flatten(self, obj, data):
-        meta = {"start_time": encode(obj.start_time), "freqstr": obj.freqstr}
-        buf = ""
+        meta = {
+            'start_time': encode(obj.start_time),
+            'freqstr': obj.freqstr,
+        }
+        buf = ''
         data = self.pp.flatten_pandas(buf, data, meta)
         return data
 
@@ -179,8 +187,12 @@
     pp = PandasProcessor()
 
     def flatten(self, obj, data):
-        meta = {"left": encode(obj.left), "right": encode(obj.right), 
"closed": obj.closed}
-        buf = ""
+        meta = {
+            'left': encode(obj.left),
+            'right': encode(obj.right),
+            'closed': obj.closed
+        }
+        buf = ''
         data = self.pp.flatten_pandas(buf, data, meta)
         return data
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/jsonpickle/handlers.py 
new/jsonpickle-1.3/jsonpickle/handlers.py
--- old/jsonpickle-1.2/jsonpickle/handlers.py   2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/jsonpickle/handlers.py   2020-02-14 07:52:17.000000000 
+0100
@@ -11,6 +11,7 @@
 import copy
 import datetime
 import re
+import threading
 import uuid
 
 from . import compat
@@ -235,3 +236,21 @@
 
 
 UUIDHandler.handles(uuid.UUID)
+
+
+class LockHandler(BaseHandler):
+    """Serialize threading.Lock objects"""
+
+    def flatten(self, obj, data):
+        data['locked'] = obj.locked()
+        return data
+
+    def restore(self, data):
+        lock = threading.Lock()
+        if data.get('locked', False):
+            lock.acquire()
+        return lock
+
+
+_lock = threading.Lock()
+LockHandler.handles(_lock.__class__)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/jsonpickle/pickler.py 
new/jsonpickle-1.3/jsonpickle/pickler.py
--- old/jsonpickle-1.2/jsonpickle/pickler.py    2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/jsonpickle/pickler.py    2020-02-14 07:52:17.000000000 
+0100
@@ -31,7 +31,8 @@
            max_iter=None,
            use_decimal=False,
            numeric_keys=False,
-           use_base85=False):
+           use_base85=False,
+           fail_safe=None):
     """Return a JSON formatted representation of value, a Python object.
 
     :param unpicklable: If set to False then the output will not contain the
@@ -66,6 +67,13 @@
 
         NOTE: A side-effect of the above settings is that float values will be
         converted to Decimal when converting to json.
+    :param use_base85:
+        If possible, use base85 to encode binary data. Base85 bloats binary 
data
+        by 1/4 as opposed to base64, which expands it by 1/3. This argument is
+        ignored on Python 2 because it doesn't support it.
+    :param fail_safe: If set to a function exceptions are ignored when pickling
+        and if a exception happens the function is called and the return value
+        is used as the value for the object that caused the error
 
     >>> encode('my string') == '"my string"'
     True
@@ -76,10 +84,6 @@
     >>> encode({'foo': [1, 2, [3, 4]]}, max_depth=1)
     '{"foo": "[1, 2, [3, 4]]"}'
 
-    :param use_base85:
-        If possible, use base85 to encode binary data. Base85 bloats binary 
data
-        by 1/4 as opposed to base64, which expands it by 1/3. This argument is
-        ignored on Python 2 because it doesn't support it.
     """
     backend = backend or json
     context = context or Pickler(
@@ -92,7 +96,8 @@
             max_iter=max_iter,
             numeric_keys=numeric_keys,
             use_decimal=use_decimal,
-            use_base85=use_base85)
+            use_base85=use_base85,
+            fail_safe=fail_safe)
     return backend.encode(context.flatten(value, reset=reset))
 
 
@@ -108,7 +113,8 @@
                  max_iter=None,
                  numeric_keys=False,
                  use_decimal=False,
-                 use_base85=False):
+                 use_base85=False,
+                 fail_safe=None):
         self.unpicklable = unpicklable
         self.make_refs = make_refs
         self.backend = backend or json
@@ -136,6 +142,9 @@
             self._bytes_tag = tags.B64
             self._bytes_encoder = util.b64encode
 
+        # ignore exceptions
+        self.fail_safe = fail_safe
+
     def reset(self):
         self._objs = {}
         self._depth = -1
@@ -214,47 +223,62 @@
         return self._flatten(obj)
 
     def _flatten(self, obj):
+
+        #########################################
+        # if obj is nonrecursive return immediately
+        # for performance reasons we don't want to do recursive checks
+        if PY2 and isinstance(obj, types.FileType):
+            return self._flatten_file(obj)
+
+        if util.is_bytes(obj):
+            return self._flatten_bytestring(obj)
+
+        if util.is_primitive(obj):
+            return obj
+
+        # Decimal is a primitive when use_decimal is True
+        if self._use_decimal and isinstance(obj, decimal.Decimal):
+            return obj
+        #########################################
+
         self._push()
         return self._pop(self._flatten_obj(obj))
 
+    def _max_reached(self):
+        return self._depth == self._max_depth
+
     def _flatten_obj(self, obj):
         self._seen.append(obj)
-        max_reached = self._depth == self._max_depth
-
-        in_cycle = (
-            max_reached or (
-                not self.make_refs
-                and id(obj) in self._objs
-            )) and not util.is_primitive(obj)
-        if in_cycle:
-            # break the cycle
-            flatten_func = repr
-        else:
-            flatten_func = self._get_flattener(obj)
 
-        if flatten_func is None:
-            self._pickle_warning(obj)
-            return None
+        max_reached = self._max_reached()
 
-        return flatten_func(obj)
+        try:
 
-    def _list_recurse(self, obj):
-        return [self._flatten(v) for v in obj]
+            in_cycle = _in_cycle(obj, self._objs, max_reached, self.make_refs)
+            if in_cycle:
+                # break the cycle
+                flatten_func = repr
+            else:
+                flatten_func = self._get_flattener(obj)
 
-    def _get_flattener(self, obj):
+            if flatten_func is None:
+                self._pickle_warning(obj)
+                return None
 
-        if PY2 and isinstance(obj, types.FileType):
-            return self._flatten_file
+            return flatten_func(obj)
 
-        if util.is_bytes(obj):
-            return self._flatten_bytestring
+        except (KeyboardInterrupt, SystemExit) as e:
+            raise e
+        except Exception as e:
+            if self.fail_safe is None:
+                raise e
+            else:
+                return self.fail_safe(e)
 
-        if util.is_primitive(obj):
-            return lambda obj: obj
+    def _list_recurse(self, obj):
+        return [self._flatten(v) for v in obj]
 
-        # Decimal is a primitive when use_decimal is True
-        if self._use_decimal and isinstance(obj, decimal.Decimal):
-            return lambda obj: obj
+    def _get_flattener(self, obj):
 
         list_recurse = self._list_recurse
 
@@ -295,14 +319,24 @@
     def _ref_obj_instance(self, obj):
         """Reference an existing object or flatten if new
         """
-        if self._mkref(obj):
-            # We've never seen this object so return its
-            # json representation.
+        if self.unpicklable:
+            if self._mkref(obj):
+                # We've never seen this object so return its
+                # json representation.
+                return self._flatten_obj_instance(obj)
+            # We've seen this object before so place an object
+            # reference tag in the data. This avoids infinite recursion
+            # when processing cyclical objects.
+            return self._getref(obj)
+        else:
+            max_reached = self._max_reached()
+            in_cycle = _in_cycle(obj, self._objs, max_reached, False)
+            if in_cycle:
+                # A circular becomes None.
+                return None
+
+            self._mkref(obj)
             return self._flatten_obj_instance(obj)
-        # We've seen this object before so place an object
-        # reference tag in the data. This avoids infinite recursion
-        # when processing cyclical objects.
-        return self._getref(obj)
 
     def _flatten_file(self, obj):
         """
@@ -515,7 +549,8 @@
                     # We've never seen this object before so pickle it 
in-place.
                     # Create an instance from the factory and assume that the
                     # resulting instance is a suitable examplar.
-                    value = self._flatten(handlers.CloneFactory(factory()))
+                    value = self._flatten_obj_instance(
+                        handlers.CloneFactory(factory()))
                 else:
                     # We've seen this object before.
                     # Break the cycle by emitting a reference.
@@ -609,6 +644,14 @@
             warnings.warn(msg)
 
 
+def _in_cycle(obj, objs, max_reached, make_refs):
+    return (
+        max_reached or (
+            not make_refs and id(obj) in objs
+        )
+    ) and not util.is_primitive(obj)
+
+
 def _mktyperef(obj):
     """Return a typeref dictionary
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/jsonpickle/unpickler.py 
new/jsonpickle-1.3/jsonpickle/unpickler.py
--- old/jsonpickle-1.2/jsonpickle/unpickler.py  2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/jsonpickle/unpickler.py  2020-02-14 07:52:17.000000000 
+0100
@@ -383,11 +383,13 @@
         if isinstance(instance, tuple):
             return instance
 
+        instance = self._restore_object_instance_variables(obj, instance)
+
         if (_safe_hasattr(instance, 'default_factory') and
                 isinstance(instance.default_factory, _Proxy)):
             instance.default_factory = instance.default_factory.get()
 
-        return self._restore_object_instance_variables(obj, instance)
+        return instance
 
     def _restore_from_dict(self, obj, instance, ignorereserved=True):
         restore_key = self._restore_key_fn()
@@ -627,7 +629,7 @@
             for class_name in names[up_to:]:
                 obj = getattr(obj, class_name)
             return obj
-        except (AttributeError, ImportError, ValueError) as ex:
+        except (AttributeError, ImportError, ValueError):
             continue
     return None
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/jsonpickle/version.py 
new/jsonpickle-1.3/jsonpickle/version.py
--- old/jsonpickle-1.2/jsonpickle/version.py    2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/jsonpickle/version.py    2020-02-14 07:52:17.000000000 
+0100
@@ -1 +1 @@
-__version__ = '1.2'
+__version__ = '1.3'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/jsonpickle.egg-info/PKG-INFO 
new/jsonpickle-1.3/jsonpickle.egg-info/PKG-INFO
--- old/jsonpickle-1.2/jsonpickle.egg-info/PKG-INFO     2019-05-24 
07:17:32.000000000 +0200
+++ new/jsonpickle-1.3/jsonpickle.egg-info/PKG-INFO     2020-02-14 
07:52:32.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: jsonpickle
-Version: 1.2
+Version: 1.3
 Summary: Python library for serializing any arbitrary object graph into JSON
 Home-page: https://jsonpickle.github.io/
 Author: David Aguilar
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/tests/jsonpickle_test.py 
new/jsonpickle-1.3/tests/jsonpickle_test.py
--- old/jsonpickle-1.2/tests/jsonpickle_test.py 2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/tests/jsonpickle_test.py 2020-02-14 07:52:17.000000000 
+0100
@@ -1461,12 +1461,14 @@
         self.assertEqual(PickleProtocol2Thing, decoded.args[1].__class__)
         self.assertTrue(decoded.args[0] is decoded.args[1])
 
-    def test_handles_cyclical_objects(self):
+    def test_cyclical_objects(self, use_tuple=True):
         child = Capture(None)
         instance = Capture(child, child)
-        child.args = (instance,)  # create a cycle
-        # TODO we do not properly restore references inside of lists.
-        # Change the above tuple into a list to show the breakage.
+        # create a cycle
+        if use_tuple:
+            child.args = (instance,)
+        else:
+            child.args = [instance]
 
         encoded = jsonpickle.encode(instance)
         decoded = jsonpickle.decode(encoded)
@@ -1494,6 +1496,9 @@
         self.assertTrue(decoded.args[0] is decoded.args[0].args[0].args[0])
         self.assertTrue(decoded.args[0] is decoded.args[1].args[0].args[0])
 
+    def test_cyclical_objects_list(self):
+        self.test_cyclical_objects(use_tuple=False)
+
     def test_handles_cyclical_objects_in_lists(self):
         child = PickleProtocol2ChildThing(None)
         instance = PickleProtocol2ChildThing([child, child])
@@ -1505,6 +1510,36 @@
         self.assertTrue(decoded is decoded.child[0].child)
         self.assertTrue(decoded is decoded.child[1].child)
 
+    def test_cyclical_objects_unpickleable_false(self, use_tuple=True):
+        child = Capture(None)
+        instance = Capture(child, child)
+        # create a cycle
+        if use_tuple:
+            child.args = (instance,)
+        else:
+            child.args = [instance]
+        encoded = jsonpickle.encode(instance, unpicklable=False)
+        decoded = jsonpickle.decode(encoded)
+
+        self.assertTrue(isinstance(decoded, dict))
+        self.assertTrue('args' in decoded)
+        self.assertTrue('kwargs' in decoded)
+
+        # Tuple is lost via json
+        args = decoded['args']
+        self.assertTrue(isinstance(args, list))
+
+        # Get the children
+        self.assertEqual(len(args), 2)
+        decoded_child0 = args[0]
+        decoded_child1 = args[1]
+        self.assertTrue(isinstance(decoded_child0, dict))
+        # Circular references become None
+        self.assertEqual(decoded_child1, None)
+
+    def test_cyclical_objects_unpickleable_false_list(self):
+        self.test_cyclical_objects_unpickleable_false(use_tuple=False)
+
 
 def suite():
     suite = unittest.TestSuite()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/tests/object_test.py 
new/jsonpickle-1.3/tests/object_test.py
--- old/jsonpickle-1.2/tests/object_test.py     2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/tests/object_test.py     2020-02-14 07:52:17.000000000 
+0100
@@ -1,10 +1,11 @@
 from __future__ import absolute_import, division, unicode_literals
 import enum
 import collections
+import datetime
 import decimal
 import re
+import threading
 import unittest
-import datetime
 
 import jsonpickle
 from jsonpickle import compat
@@ -76,6 +77,7 @@
     def __setstate__(self, state):
         self.x, self.y = state[0], state[1]
 
+
 class GetstateRecursesInfintely(object):
     def __getstate__(self):
         return GetstateRecursesInfintely()
@@ -237,6 +239,41 @@
         return self.offset,
 
 
+class FailSafeTestCase(SkippableTest):
+
+    class BadClass(object):
+
+        def __getstate__(self):
+            raise ValueError('Intentional error')
+
+    good = 'good'
+
+    to_pickle = [BadClass(), good]
+
+    def test_no_error(self):
+        encoded = jsonpickle.encode(self.to_pickle, fail_safe=lambda e: None)
+        decoded = jsonpickle.decode(encoded)
+        self.assertEqual(decoded[0], None)
+        self.assertEqual(decoded[1], 'good')
+
+    def test_error_recorded(self):
+        exceptions = []
+
+        def recorder(exception):
+            exceptions.append(exception)
+
+        jsonpickle.encode(self.to_pickle, fail_safe=recorder)
+        self.assertEqual(len(exceptions), 1)
+        self.assertTrue(isinstance(exceptions[0], Exception))
+
+    def test_custom_err_msg(self):
+        CUSTOM_ERR_MSG = 'custom err msg'
+        encoded = jsonpickle.encode(self.to_pickle,
+                                    fail_safe=lambda e: CUSTOM_ERR_MSG)
+        decoded = jsonpickle.decode(encoded)
+        self.assertEqual(decoded[0], CUSTOM_ERR_MSG)
+
+
 class IntKeysObject(object):
 
     def __init__(self):
@@ -294,6 +331,27 @@
         # outer-most defaultdict
         self.assertEqual(newdefdict[3].default_factory, int)
 
+    def test_defaultdict_roundtrip_simple_lambda2(self):
+        defaultdict = collections.defaultdict
+        payload = {'a': defaultdict(lambda: 0)}
+        defdict = defaultdict(lambda: 0, payload)
+        # roundtrip
+        encoded = jsonpickle.encode(defdict, keys=True)
+        decoded = jsonpickle.decode(encoded, keys=True)
+        self.assertEqual(type(decoded), defaultdict)
+        self.assertEqual(type(decoded['a']), defaultdict)
+
+    def test_defaultdict_and_things_roundtrip_simple_lambda(self):
+        thing = Thing('a')
+        defaultdict = collections.defaultdict
+        defdict = defaultdict(lambda: 0)
+        obj = [defdict, thing, thing]
+        # roundtrip
+        encoded = jsonpickle.encode(obj, keys=True)
+        decoded = jsonpickle.decode(encoded, keys=True)
+        self.assertEqual(decoded[0].default_factory(), 0)
+        self.assertIs(decoded[1], decoded[2])
+
     def test_defaultdict_subclass_with_self_as_default_factory(self):
         cls = ThingWithSelfAsDefaultFactory
         tree = cls()
@@ -859,6 +917,25 @@
         restored = self.unpickler.restore(flattened)
         self.assertEqual(restored.offset, datetime.timedelta(99))
 
+    def test_threading_lock(self):
+        obj = Thing('lock')
+        obj.lock = threading.Lock()
+        lock_class = obj.lock.__class__
+        # Roundtrip and make sure we get a lock object.
+        json = self.pickler.flatten(obj)
+        clone = self.unpickler.restore(json)
+        self.assertTrue(isinstance(clone.lock, lock_class))
+        self.assertFalse(clone.lock.locked())
+
+        # Serializing a locked lock should create a locked clone.
+        self.assertTrue(obj.lock.acquire())
+        json = self.pickler.flatten(obj)
+        obj.lock.release()
+        # Restore the locked lock state.
+        clone = self.unpickler.restore(json)
+        self.assertTrue(clone.lock.locked())
+        clone.lock.release()
+
 
 # Test classes for ExternalHandlerTestCase
 class Mixin(object):
@@ -906,6 +983,7 @@
 
 def suite():
     suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(FailSafeTestCase))
     suite.addTest(unittest.makeSuite(AdvancedObjectsTestCase))
     suite.addTest(unittest.makeSuite(ExternalHandlerTestCase))
     return suite
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/jsonpickle-1.2/tests/pandas_test.py 
new/jsonpickle-1.3/tests/pandas_test.py
--- old/jsonpickle-1.2/tests/pandas_test.py     2019-05-24 07:17:18.000000000 
+0200
+++ new/jsonpickle-1.3/tests/pandas_test.py     2020-02-14 07:52:17.000000000 
+0100
@@ -8,8 +8,9 @@
 try:
     import pandas as pd
     import numpy as np
-    from pandas.testing import assert_series_equal, assert_frame_equal, 
assert_index_equal
-
+    from pandas.testing import assert_series_equal
+    from pandas.testing import assert_frame_equal
+    from pandas.testing import assert_index_equal
 except ImportError:
     np = None
 
@@ -95,8 +96,8 @@
         if self.should_skip:
             return self.skip('pandas is not importable')
 
-        df = pd.DataFrame({"a": [1, 2], "b": [3, 4]},
-                           index=pd.IntervalIndex.from_breaks([1,2,4]))
+        df = pd.DataFrame({'a': [1, 2], 'b': [3, 4]},
+                          index=pd.IntervalIndex.from_breaks([1, 2, 4]))
 
         decoded_df = self.roundtrip(df)
         assert_frame_equal(decoded_df, df)
@@ -121,7 +122,7 @@
         if self.should_skip:
             return self.skip('pandas is not importable')
 
-        idx = pd.DatetimeIndex(['2019-01-01', '2019-01-02', '2019-01-05',])
+        idx = pd.DatetimeIndex(['2019-01-01', '2019-01-02', '2019-01-05'])
         decoded_idx = self.roundtrip(idx)
         assert_index_equal(decoded_idx, idx)
 
@@ -177,7 +178,8 @@
         if self.should_skip:
             return self.skip('pandas is not importable')
 
-        idx = pd.IntervalIndex.from_breaks(pd.date_range('2019-01-01', 
'2019-01-10'))
+        idx = pd.IntervalIndex.from_breaks(
+            pd.date_range('2019-01-01', '2019-01-10'))
         decoded_idx = self.roundtrip(idx)
         assert_index_equal(decoded_idx, idx)
 
@@ -185,7 +187,7 @@
         if self.should_skip:
             return self.skip('pandas is not importable')
 
-        idx = pd.MultiIndex.from_product(((1,2,3), ("a", "b")))
+        idx = pd.MultiIndex.from_product(((1, 2, 3), ('a', 'b')))
         decoded_idx = self.roundtrip(idx)
         assert_index_equal(decoded_idx, idx)
 


Reply via email to