Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-rpyc for openSUSE:Factory 
checked in at 2024-03-13 22:20:54
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-rpyc (Old)
 and      /work/SRC/openSUSE:Factory/.python-rpyc.new.1770 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-rpyc"

Wed Mar 13 22:20:54 2024 rev:14 rq:1157613 version:6.0.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-rpyc/python-rpyc.changes  2024-01-03 
12:24:13.041787825 +0100
+++ /work/SRC/openSUSE:Factory/.python-rpyc.new.1770/python-rpyc.changes        
2024-03-13 22:22:15.200402899 +0100
@@ -1,0 +2,11 @@
+Wed Mar 13 13:13:19 UTC 2024 - Daniel Garcia <daniel.gar...@suse.com>
+
+- Update to 6.0.0 (bsc#1221331, CVE-2024-27758):
+  * #551 Resolves security issue that results in RCE. The fix breaks
+    backwards compatibility for those that rely on the __array__
+    attribute used by numpy. This RCE is only exploitable when the
+    server-side gets the attribute __array__ and calls it (e.g.,
+    np.array(x)). This issues effects all versions since major release
+    4.
+
+-------------------------------------------------------------------

Old:
----
  5.3.1.tar.gz

New:
----
  6.0.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-rpyc.spec ++++++
--- /var/tmp/diff_new_pack.9p2S6J/_old  2024-03-13 22:22:15.640419140 +0100
+++ /var/tmp/diff_new_pack.9p2S6J/_new  2024-03-13 22:22:15.640419140 +0100
@@ -1,7 +1,7 @@
 #
-# spec file
+# spec file for package python-rpyc
 #
-# Copyright (c) 2023 SUSE LLC
+# Copyright (c) 2024 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -26,7 +26,7 @@
 %endif
 %{?sle15_python_module_pythons}
 Name:           python-rpyc%{psuffix}
-Version:        5.3.1
+Version:        6.0.0
 Release:        0
 Summary:        Remote Python Call (RPyC), a RPC library
 License:        MIT
@@ -40,7 +40,7 @@
 BuildRequires:  python-rpm-macros
 Requires:       python-plumbum >= 1.2
 Requires(post): update-alternatives
-Requires(postun):update-alternatives
+Requires(postun): update-alternatives
 BuildArch:      noarch
 %if %{with test}
 BuildRequires:  %{python_module gevent}
@@ -81,10 +81,13 @@
 
 %if %{with test}
 %check
-donttest="TestDeploy or Test_Ssh or TestUdpRegistry or win32pipes or 
test_server_stops or test_immutable_object_return or 
test_return_of_modified_parameter or test_return_of_unmodified_parameter or 
test_dataframe_pickling or test_ssl_conenction or test_connection"
+export PYTEST_ADDOPTS="--import-mode=importlib" PYTHONPATH="."
+pushd tests
+donttest="TestDeploy or TestUdpRegistry"
 # Fails with python 3.11
 donttest+=" or test_gdb"
 %pytest -k "not ($donttest)"
+popd
 %endif
 
 %if !%{with test}

++++++ 5.3.1.tar.gz -> 6.0.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/.github/workflows/codespell.yml 
new/rpyc-6.0.0/.github/workflows/codespell.yml
--- old/rpyc-5.3.1/.github/workflows/codespell.yml      1970-01-01 
01:00:00.000000000 +0100
+++ new/rpyc-6.0.0/.github/workflows/codespell.yml      2024-02-24 
00:30:15.000000000 +0100
@@ -0,0 +1,22 @@
+---
+name: Codespell
+
+on:
+  push:
+    branches: [master]
+  pull_request:
+    branches: [master]
+
+permissions:
+  contents: read
+
+jobs:
+  codespell:
+    name: Check for spelling errors
+    runs-on: ubuntu-latest
+
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
+      - name: Codespell
+        uses: codespell-project/actions-codespell@v2
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/.github/workflows/python-app.yml 
new/rpyc-6.0.0/.github/workflows/python-app.yml
--- old/rpyc-5.3.1/.github/workflows/python-app.yml     2023-02-22 
04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/.github/workflows/python-app.yml     2024-02-24 
00:30:15.000000000 +0100
@@ -16,7 +16,7 @@
 
     strategy:
       matrix:
-        python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12.0-alpha.1"]
+        python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
 
     steps:
       - uses: actions/checkout@v3
@@ -46,6 +46,9 @@
           ssh-keygen -q -f ~/.ssh/id_rsa -N ''
           cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
           uname -a
-      - name: Test with unittest
+      - name: Bind threads tests with unittest 
         run: |
-          python -m unittest discover -v -s ./rpyc ./tests
+          RPYC_BIND_THREADS="true" python -m unittest discover -v
+      - name: Default tests with unittest
+        run: |
+          python -m unittest discover -v
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/.github/workflows/sphinx-test.yml 
new/rpyc-6.0.0/.github/workflows/sphinx-test.yml
--- old/rpyc-5.3.1/.github/workflows/sphinx-test.yml    1970-01-01 
01:00:00.000000000 +0100
+++ new/rpyc-6.0.0/.github/workflows/sphinx-test.yml    2024-02-24 
00:30:15.000000000 +0100
@@ -0,0 +1,23 @@
+name: "Sphinx Test"
+on:
+  push:
+    branches: [ master ]
+  pull_request:
+    branches: [ master ]
+
+jobs:
+  docs-linkcheck-nitpicky:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v3
+      - name: Set up Python 3
+        uses: actions/setup-python@v4
+        with:
+          python-version: 3.12
+      - name: Install Dependencies
+        run: |
+          pip install -r docs/requirements.txt
+      - name: Build linkcheck with nit-picky mode
+        working-directory: docs
+        run: |
+          sphinx-build -W --keep-going -b linkcheck -n -d _build/doctrees . 
_build/linkcheck
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/.readthedocs.yml 
new/rpyc-6.0.0/.readthedocs.yml
--- old/rpyc-5.3.1/.readthedocs.yml     2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/.readthedocs.yml     2024-02-24 00:30:15.000000000 +0100
@@ -1,23 +1,21 @@
-# .readthedocs.yml
-# Read the Docs configuration file
-# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
-
 # Required
 version: 2
 
+# Set the version of Python and other tools you might need
+build:
+  os: ubuntu-22.04
+  tools:
+    python: "3.11"
+
+# Build all formats
+formats: all
+
 # Build documentation in the docs/ directory with Sphinx
 sphinx:
   configuration: docs/conf.py
 
-# Build documentation with MkDocs
-#mkdocs:
-#  configuration: mkdocs.yml
-
-# Optionally build your docs in additional formats such as PDF and ePub
-formats: all
-
-# Optionally set the version of Python and requirements required to build your 
docs
+# We recommend specifying your dependencies to enable reproducible builds:
+# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
 python:
-  version: 3.7
   install:
     - requirements: docs/requirements.txt
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/CHANGELOG.rst new/rpyc-6.0.0/CHANGELOG.rst
--- old/rpyc-5.3.1/CHANGELOG.rst        2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/CHANGELOG.rst        2024-02-24 00:30:15.000000000 +0100
@@ -1,3 +1,11 @@
+6.0.0
+=====
+Date: 2024-02-23
+
+- `#551`_ Resolves security issue that results in RCE. The fix breaks 
backwards compatibility for those that rely on the `__array__` attribute used 
by `numpy`. This RCE is only exploitable when the server-side gets the 
attribute `__array__` and calls it (e.g., `np.array(x)`). This issues effects 
all versions since major release 4.
+
+.. _#551: https://github.com/tomerfiliba-org/rpyc/issues/551
+
 5.3.1
 =====
 Date: 2023-02-21
@@ -118,7 +126,7 @@
 - Fixed teleport function behavior for keyword-only arguments with default 
`#422`_
 - Improved documentation on custom exception handling
 - Fixed IPv6 support for server `#407`_
-- Added a simple asynchrounous service example `#400`_
+- Added a simple asynchronous service example `#400`_
 
 .. _#425: https://github.com/tomerfiliba-org/rpyc/issues/425
 .. _#412: https://github.com/tomerfiliba-org/rpyc/pull/412
@@ -240,7 +248,7 @@
 * no longer store connection as ``self._conn``. (allows services that serve
   multiple clients using the same service object, see `#198`_).
 
-* ``SlaveService`` is now split into two asymetric classes: ``SlaveService``
+* ``SlaveService`` is now split into two asymmetric classes: ``SlaveService``
   and ``MasterService``. The slave exposes functionality to the master but can
   not anymore access remote objects on the master (`#232`_, `#248`_).
   If you were previously using ``SlaveService``, you may experience problems
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/demos/boilerplate/ReadMe.md 
new/rpyc-6.0.0/demos/boilerplate/ReadMe.md
--- old/rpyc-5.3.1/demos/boilerplate/ReadMe.md  2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/demos/boilerplate/ReadMe.md  2024-02-24 00:30:15.000000000 
+0100
@@ -2,7 +2,7 @@
 
 This service is ispired by the FileMonitor example.
 
-It will monitor the file `/tmp/test.txt` to geenrate asynchrounous events that 
will be notified to the RPyC client.
+It will monitor the file `/tmp/test.txt` to generate asynchronous events that 
will be notified to the RPyC client.
 
 run in one terminal:
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/demos/chat/readme.txt 
new/rpyc-6.0.0/demos/chat/readme.txt
--- old/rpyc-5.3.1/demos/chat/readme.txt        2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/demos/chat/readme.txt        2024-02-24 00:30:15.000000000 
+0100
@@ -19,11 +19,11 @@
 while receiving messages from the chat server is analogous to the server
 calling an(async) function on the client. RPC at its best.
 
-Also, do keep in mind the inherent securiy of this model: the server exposes
+Also, do keep in mind the inherent security of this model: the server exposes
 a well defined set of methods(so there's no risk of the client abusing
                               the server), while the server can't abuse the 
client because it can invoke
 only a designated callback it is passed. This allows both parties not to
-trust each other while still providing RPyC-grade servive.
+trust each other while still providing RPyC-grade service.
 
 
 == Threading issues ==
@@ -33,7 +33,7 @@
 
 == Client Design ==
 With all the visual noise caused by the GUI code, it's easy to get lost on
-the RPyC part. In short, this is the RPyC releated code:
+the RPyC part. In short, this is the RPyC related code:
 
     def on_message(text):
         # server-side callback
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/demos/filemon/readme.txt 
new/rpyc-6.0.0/demos/filemon/readme.txt
--- old/rpyc-5.3.1/demos/filemon/readme.txt     2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/demos/filemon/readme.txt     2024-02-24 00:30:15.000000000 
+0100
@@ -1,2 +1,2 @@
-a demonstation of events: the file monitor will send events to the client
+a demonstration of events: the file monitor will send events to the client
 (invoke an async callback) whenever a file is changed(as reported by os.stat)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/Makefile new/rpyc-6.0.0/docs/Makefile
--- old/rpyc-5.3.1/docs/Makefile        2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/docs/Makefile        2024-02-24 00:30:15.000000000 +0100
@@ -10,9 +10,9 @@
 # Internal variables.
 PAPEROPT_a4     = -D latex_paper_size=a4
 PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+ALLSPHINXOPTS   = -n -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) 
$(SPHINXOPTS) .
 
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp 
epub latex latexpdf text man changes linkcheck doctest
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp 
epub latex latexpdf text man changes linkcheck doctest nitpick
 
 help:
        @echo "Please use \`make <target>' where <target> is one of"
@@ -124,6 +124,10 @@
        @echo "Link check complete; look for any errors in the above output " \
              "or in $(BUILDDIR)/linkcheck/output.txt."
 
+nitpick:
+       $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) -E -W --keep-going 
$(BUILDDIR)/html
+       @echo "Nit-picky build test and treating warnings as errors."
+
 doctest:
        $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
        @echo "Testing of doctests in the sources finished, look at the " \
@@ -131,7 +135,3 @@
 
 upload: html
        rsync -r -v $(BUILDDIR)/html/ 
gangesmaster,r...@web.sourceforge.net:htdocs/
-
-
-
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/conf.py new/rpyc-6.0.0/docs/conf.py
--- old/rpyc-5.3.1/docs/conf.py 2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/docs/conf.py 2024-02-24 00:30:15.000000000 +0100
@@ -28,7 +28,7 @@
 
 # Add any Sphinx extension module names here, as strings. They can be 
extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 
'sphinx.ext.doctest']
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['_templates']
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/docs/advanced-debugging.rst 
new/rpyc-6.0.0/docs/docs/advanced-debugging.rst
--- old/rpyc-5.3.1/docs/docs/advanced-debugging.rst     2023-02-22 
04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/docs/docs/advanced-debugging.rst     2024-02-24 
00:30:15.000000000 +0100
@@ -11,7 +11,7 @@
 
 .. code-block:: bash
 
-    versions=( 3.7-dev 3.8-dev 3.9-dev 3.10-dev 3.11-dev )
+    versions=( 3.7 3.8 3.9 3.10 3.11 3.12)
     for ver in ${versions[@]}; do
         pyenv install --force ${ver}
         pyenv global ${ver}
@@ -31,8 +31,8 @@
 
 .. code-block:: bash
 
-    PYENV_VERSION=3.9-dev pyenv exec python -m unittest discover -s ./tests -k 
test_affinity
-    PYENV_VERSION=3.8-dev pyenv exec python -m unittest discover -s ./tests
+    PYENV_VERSION=3.9-dev pyenv exec python -m unittest discover -v -k 
test_affinity
+    PYENV_VERSION=3.8-dev pyenv exec python -m unittest discover
 
 Testing Supported Python Versions via Docker
 --------------------------------------------
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/docs/classic.rst 
new/rpyc-6.0.0/docs/docs/classic.rst
--- old/rpyc-5.3.1/docs/docs/classic.rst        2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/docs/docs/classic.rst        2024-02-24 00:30:15.000000000 
+0100
@@ -31,6 +31,6 @@
 
     remote_list = conn.builtin.range(7)
 
-    conn.execute("print 'foo'")
+    conn.execute("print('foo')")
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/docs/howto.rst 
new/rpyc-6.0.0/docs/docs/howto.rst
--- old/rpyc-5.3.1/docs/docs/howto.rst  2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/docs/docs/howto.rst  2024-02-24 00:30:15.000000000 +0100
@@ -13,23 +13,23 @@
 
     >>> import rpyc
     >>> c = rpyc.classic.connect("localhost")
-    >>> c.execute("print 'hi there'")   # this will print on the host
+    >>> c.execute("print('hi there')")   # this will print on the host
     >>> import sys
     >>> c.modules.sys.stdout = sys.stdout
-    >>> c.execute("print 'hi here'")   # now this will be redirected here
+    >>> c.execute("print('hi here')")   # now this will be redirected here
     hi here
 
 Also note that if you are using classic mode RPyC, you can use the
 `context manager 
<http://www.python.org/doc/2.5.2/lib/typecontextmanager.html>`_
 ``rpyc.classic.redirected_stdio``::
 
-    >>> c.execute("print 'hi there'")                   # printed on the server
+    >>> c.execute("print('hi there')")                   # printed on the 
server
     >>>
     >>> with rpyc.classic.redirected_stdio(c):
-    ...     c.execute("print 'hi here'")                # printed on the client
+    ...     c.execute("print('hi here')")                # printed on the 
client
     ...
     hi here
-    >>> c.execute("print 'hi there again'")             # printed on the server
+    >>> c.execute("print('hi there again')")             # printed on the 
server
     >>>
 
 .. figure:: _static/howto-redirected.png
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/docs/security.rst 
new/rpyc-6.0.0/docs/docs/security.rst
--- old/rpyc-5.3.1/docs/docs/security.rst       2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/docs/docs/security.rst       2024-02-24 00:30:15.000000000 
+0100
@@ -4,7 +4,7 @@
 ========
 Operating over a network always involve a certain security risk, and requires 
some awareness.
 Version 3 of RPyC was a rewrite of the library, specifically targeting 
security and
-service-orientation. Unlike version 2.6, RPyC no longer makes use of unsecure 
protocols like ``pickle``,
+service-orientation. Unlike version 2.6, RPyC no longer makes use of insecure 
protocols like ``pickle``,
 supports :data:`security-related configuration parameters 
<rpyc.core.protocol.DEFAULT_CONFIG>`,
 comes with strict defaults, and encourages the use of a capability-based 
security model. Even so, it behooves you to
 take a layered to secure programming and not let RPyC be a single point of 
failure.
@@ -23,7 +23,7 @@
 RPyC works by exposing a root object, which in turn may expose other objects 
(and so on). For
 instance, if you expose a module or an object that has a reference to the 
``sys`` module,
 a user may be able to reach it. After reaching ``sys``, the user can traverse 
``sys.modules`` and
-gain access to all of the modules that the server imports. More complex 
methodologies, similiar to those used in ``CVE-2019-16328``,
+gain access to all of the modules that the server imports. More complex 
methodologies, similar to those used in ``CVE-2019-16328``,
 could leverage access to ``builtins.str``, ``builtins.type``, 
``builtins.object``, and ``builtins.dict`` and gain access to
 ``sys`` modules. The default configurations for RPyC are intended to mitigate 
access to dangerous objects. But if you enable
 ``allow_public_attrs``, return uninitialized classes or override 
``_rpyc_getattr`` such things are likely to slip under the radar
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/docs/services.rst 
new/rpyc-6.0.0/docs/docs/services.rst
--- old/rpyc-5.3.1/docs/docs/services.rst       2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/docs/docs/services.rst       2024-02-24 00:30:15.000000000 
+0100
@@ -64,7 +64,7 @@
         def div(self, a, b):
             return a / b
         def foo(self):
-            print "foo"
+            print("foo")
 
 When implementing services, ``@rpyc.service`` and ``@rpyc.exposed`` can 
replace the ``exposed_`` naming
 convention.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/docs/usecases.rst 
new/rpyc-6.0.0/docs/docs/usecases.rst
--- old/rpyc-5.3.1/docs/docs/usecases.rst       2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/docs/docs/usecases.rst       2024-02-24 00:30:15.000000000 
+0100
@@ -56,7 +56,7 @@
 
 Parallel Execution
 ------------------
-In CPython, the `GIL <http://wiki.python.org/moin/GlobalInterpreterLock>`_ 
prevents mutliple
+In CPython, the `GIL <http://wiki.python.org/moin/GlobalInterpreterLock>`_ 
prevents multiple
 threads from executing python bytecode at once. This simplifies the design of 
the python
 interpreter, but the consequence of which is that CPython cannot utilize 
multiple/multicore
 CPUs. The only way to achieve scalable, CPU-bound python programs is to use 
multiple processes,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/index.rst 
new/rpyc-6.0.0/docs/index.rst
--- old/rpyc-5.3.1/docs/index.rst       2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/docs/index.rst       2024-02-24 00:30:15.000000000 +0100
@@ -74,7 +74,7 @@
   protocol, and requiring no complex setup (name servers, HTTP, URL-mapping, 
etc.)
 
 * **Secure** - employs a `Capability based 
<http://en.wikipedia.org/wiki/Capability-based_security>`_
-  security model; intergrates easily with SSH
+  security model; integrates easily with SSH
 
 * **Zero-Deploy Enabled** -- Read more about :ref:`Zero-Deploy RPyC 
<zerodeploy>`
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/install.rst 
new/rpyc-6.0.0/docs/install.rst
--- old/rpyc-5.3.1/docs/install.rst     2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/docs/install.rst     2024-02-24 00:30:15.000000000 +0100
@@ -45,7 +45,7 @@
 of Python incompatible with one another, and sadly, this cannot be bridged
 automatically by RPyC at the serialization layer.
 
-It's not that I didn't try -- it's just too hard a feat. It's bascially like
+It's not that I didn't try -- it's just too hard a feat. It's basically like
 writing a 100% working `2to3 <http://docs.python.org/library/2to3.html>`_ tool,
 alongside with a matching ``3to2`` one; and that, I reckon, is comparable to
 the *halting problem* (of course I might be wrong here, but it still doesn't
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/tutorial/tut1.rst 
new/rpyc-6.0.0/docs/tutorial/tut1.rst
--- old/rpyc-5.3.1/docs/tutorial/tut1.rst       2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/docs/tutorial/tut1.rst       2024-02-24 00:30:15.000000000 
+0100
@@ -57,7 +57,7 @@
     >>> rsys.argv
     ['bin/rpyc_classic.py']
 
-…add module search pathes for the server's import mechanism::
+…add module search paths for the server's import mechanism::
 
     >>> rsys.path.append('/tmp/totally-secure-package-location)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/docs/tutorial/tut5.rst 
new/rpyc-6.0.0/docs/tutorial/tut5.rst
--- old/rpyc-5.3.1/docs/tutorial/tut5.rst       2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/docs/tutorial/tut5.rst       2024-02-24 00:30:15.000000000 
+0100
@@ -1,7 +1,7 @@
 .. _tut5:
 
-Part 5: Asynchrounous Operation and Events
-==========================================
+Part 5: Asynchronous Operation and Events
+=========================================
 
 Asynchronism
 ------------
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/pyproject.toml 
new/rpyc-6.0.0/pyproject.toml
--- old/rpyc-5.3.1/pyproject.toml       2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/pyproject.toml       2024-02-24 00:30:15.000000000 +0100
@@ -8,7 +8,7 @@
 name = "rpyc"
 description = "Remote Python Call (RPyC) is a transparent and symmetric 
distributed computing library"
 readme = "README.rst"
-license = "MIT"
+license = {text = "MIT License"}
 requires-python = ">=3.7"
 authors = [
     { name = "Tomer Filiba", email = "tomerfil...@gmail.com" },
@@ -26,6 +26,7 @@
     "Programming Language :: Python :: 3.9",
     "Programming Language :: Python :: 3.10",
     "Programming Language :: Python :: 3.11",
+    "Programming Language :: Python :: 3.12",
     "Topic :: Internet",
     "Topic :: Software Development :: Libraries :: Python Modules",
     "Topic :: Software Development :: Object Brokering",
@@ -62,3 +63,8 @@
 
 [tool.hatch.build.targets.wheel]
 only-include = ["rpyc"]
+
+[tool.codespell]
+skip = '.git,*.pdf,*.svg'
+#
+# ignore-words-list = ''
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/core/async_.py 
new/rpyc-6.0.0/rpyc/core/async_.py
--- old/rpyc-5.3.1/rpyc/core/async_.py  2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/rpyc/core/async_.py  2024-02-24 00:30:15.000000000 +0100
@@ -44,16 +44,19 @@
         """Waits for the result to arrive. If the AsyncResult object has an
         expiry set, and the result did not arrive within that timeout,
         an :class:`AsyncResultTimeout` exception is raised"""
-        while not (self._is_ready or self.expired):
+        while self._waiting():
             # Serve the connection since we are not ready. Suppose
             # the reply for our seq is served. The callback is this class
             # so __call__ sets our obj and _is_ready to true.
-            self._conn.serve(self._ttl)
+            self._conn.serve(self._ttl, waiting=self._waiting)
 
         # Check if we timed out before result was ready
         if not self._is_ready:
             raise AsyncResultTimeout("result expired")
 
+    def _waiting(self):
+        return not (self._is_ready or self.expired)
+
     def add_callback(self, func):
         """Adds a callback to be invoked when the result arrives. The callback
         function takes a single argument, which is the current AsyncResult
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/core/netref.py 
new/rpyc-6.0.0/rpyc/core/netref.py
--- old/rpyc-5.3.1/rpyc/core/netref.py  2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/rpyc/core/netref.py  2024-02-24 00:30:15.000000000 +0100
@@ -4,7 +4,7 @@
 import sys
 import types
 from rpyc.lib import get_methods, get_id_pack
-from rpyc.lib.compat import pickle, maxint, with_metaclass
+from rpyc.lib.compat import pickle, maxint
 from rpyc.core import consts
 
 
@@ -83,7 +83,6 @@
     """A *metaclass* used to customize the ``__repr__`` of ``netref`` classes.
     It is quite useless, but it makes debugging and interactive programming
     easier"""
-
     __slots__ = ()
 
     def __repr__(self):
@@ -93,13 +92,13 @@
             return f"<netref class '{self.__name__}'>"
 
 
-class BaseNetref(with_metaclass(NetrefMetaclass, object)):
+class BaseNetref(object, metaclass=NetrefMetaclass):
     """The base netref class, from which all netref classes derive. Some netref
     classes are "pre-generated" and cached upon importing this module (those
     defined in the :data:`_builtin_types`), and they are shared between all
     connections.
 
-    The rest of the netref classes are created by 
:meth:`rpyc.core.protocl.Connection._unbox`,
+    The rest of the netref classes are created by 
:meth:`rpyc.core.protocol.Connection._unbox`,
     and are private to the connection.
 
     Do not use this class directly; use :func:`class_factory` instead.
@@ -252,6 +251,9 @@
         def __array__(self):
             # Note that protocol=-1 will only work between python
             # interpreters of the same version.
+            if not 
object.__getattribute__(self,'____conn__')._config["allow_pickle"]:
+                # Security check that server side allows pickling per #551
+                raise ValueError("pickling is disabled")
             return pickle.loads(syncreq(self, consts.HANDLE_PICKLE, -1))
         __array__.__doc__ = doc
         return __array__
@@ -320,16 +322,18 @@
                 _class = getattr(_module, _class_name, None)
                 if _class is not None and hasattr(_class, '__class__'):
                     class_descriptor = NetrefClass(_class)
+                elif _class is None:
+                    class_descriptor = NetrefClass(type(_module))
                 break
     ns['__class__'] = class_descriptor
-    netref_name = class_descriptor.owner.__name__ if class_descriptor is not 
None else name_pack
     # create methods that must perform a syncreq
     for name, doc in methods:
         name = str(name)  # IronPython issue #10
-        # only create methods that wont shadow BaseNetref during merge for mro
+        # only create methods that won't shadow BaseNetref during merge for mro
         if name not in LOCAL_ATTRS:  # i.e. `name != __class__`
             ns[name] = _make_method(name, doc)
-    return type(netref_name, (BaseNetref,), ns)
+    netref_cls = type(name_pack, (BaseNetref, ), ns)
+    return netref_cls
 
 
 for _builtin in _builtin_types:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/core/protocol.py 
new/rpyc-6.0.0/rpyc/core/protocol.py
--- old/rpyc-5.3.1/rpyc/core/protocol.py        2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/rpyc/core/protocol.py        2024-02-24 00:30:15.000000000 
+0100
@@ -212,7 +212,16 @@
         # self._config.clear()
         del self._HANDLERS
         if self._bind_threads:
-            self._thread_pool_executor.shutdown(wait=False)  # TODO where?
+            self._thread_pool_executor.shutdown(wait=True)  # TODO where?
+        if _anyway:
+            try:
+                self._recvlock.release()
+            except Exception:
+                pass
+            try:
+                self._sendlock.release()
+            except Exception:
+                pass
 
     def close(self):  # IO
         """closes the connection, releasing all held resources"""
@@ -222,8 +231,9 @@
             self._closed = True
             if self._config.get("before_closed"):
                 self._config["before_closed"](self.root)
-            self._async_request(consts.HANDLE_CLOSE)
-        except EOFError:
+            # TODO: define invariants/expectations around close sequence and 
timing
+            self.sync_request(consts.HANDLE_CLOSE)
+        except (EOFError, TimeoutError):
             pass
         except Exception:
             if not self._config["close_catchall"]:
@@ -260,7 +270,7 @@
         return next(self._seqcounter)
 
     def _send(self, msg, seq, args):  # IO
-        data = brine.dump((msg, seq, args))
+        data = brine.I1.pack(msg) + brine.dump((seq, args))  # see _dispatch
         if self._bind_threads:
             this_thread = self._get_thread()
             data = brine.I8I8.pack(this_thread.id, 
this_thread._remote_thread_id) + data
@@ -357,7 +367,7 @@
             handler, args = raw_args
             args = self._unbox(args)
             res = self._HANDLERS[handler](self, *args)
-        except:  # TODO: revist how to catch handle locally, this should 
simplify when py2 is dropped
+        except:  # TODO: revisit how to catch handle locally, this should 
simplify when py2 is dropped
             # need to catch old style exceptions too
             t, v, tb = sys.exc_info()
             self._last_traceback = tb
@@ -392,10 +402,13 @@
             self._config["logger"].debug(debug_msg.format(msg, seq))
 
     def _dispatch(self, data):  # serving---dispatch?
-        msg, seq, args = brine.load(data)
+        msg, = brine.I1.unpack(data[:1])  # unpack just msg to minimize time 
to release
         if msg == consts.MSG_REQUEST:
             if self._bind_threads:
                 self._get_thread()._occupation_count += 1
+            else:
+                self._recvlock.release()
+            seq, args = brine.load(data[1:])
             self._dispatch_request(seq, args)
         else:
             if self._bind_threads:
@@ -404,15 +417,21 @@
                 if this_thread._occupation_count == 0:
                     this_thread._remote_thread_id = UNBOUND_THREAD_ID
             if msg == consts.MSG_REPLY:
+                seq, args = brine.load(data[1:])
                 obj = self._unbox(args)
                 self._seq_request_callback(msg, seq, False, obj)
+                if not self._bind_threads:
+                    self._recvlock.release()  # releasing here fixes race 
condition with AsyncResult.wait
             elif msg == consts.MSG_EXCEPTION:
+                if not self._bind_threads:
+                    self._recvlock.release()
+                seq, args = brine.load(data[1:])
                 obj = self._unbox_exc(args)
                 self._seq_request_callback(msg, seq, True, obj)
             else:
                 raise ValueError(f"invalid message type: {msg!r}")
 
-    def serve(self, timeout=1, wait_for_lock=True):  # serving
+    def serve(self, timeout=1, wait_for_lock=True, waiting=lambda: True):  # 
serving
         """Serves a single request or reply that arrives within the given
         time frame (default is 1 sec). Note that the dispatching of a request
         might trigger multiple (nested) requests, thus this function may be
@@ -427,10 +446,17 @@
             # Exit early if we cannot acquire the recvlock
             if not self._recvlock.acquire(False):
                 if wait_for_lock:
+                    if not waiting():  # unlikely, but the result could've 
arrived and another thread could've won the race to acquire
+                        return False
                     # Wait condition for recvlock release; recvlock is not 
underlying lock for condition
                     return self._recv_event.wait(timeout.timeleft())
                 else:
                     return False
+        if not waiting():  # the result arrived and we won the race to 
acquire, unlucky
+            self._recvlock.release()
+            with self._recv_event:
+                self._recv_event.notify_all()
+            return False
         # Assume the receive rlock is acquired and incremented
         # We must release once BEFORE dispatch, dispatch any data, and THEN 
notify all (see issue #527 and #449)
         try:
@@ -442,11 +468,11 @@
                 self.close()  # sends close async request
             raise
         else:
-            self._recvlock.release()
             if data:
                 self._dispatch(data)  # Dispatch will unbox, invoke callbacks, 
etc.
                 return True
             else:
+                self._recvlock.release()
                 return False
         finally:
             with self._recv_event:
@@ -708,7 +734,7 @@
     def sync_request(self, handler, *args):
         """requests, sends a synchronous request (waits for the reply to 
arrive)
 
-        :raises: any exception that the requets may be generated
+        :raises: any exception that the requests may be generated
         :returns: the result of the request
         """
         timeout = self._config["sync_request_timeout"]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/lib/compat.py 
new/rpyc-6.0.0/rpyc/lib/compat.py
--- old/rpyc-5.3.1/rpyc/lib/compat.py   2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/rpyc/lib/compat.py   2024-02-24 00:30:15.000000000 +0100
@@ -171,7 +171,7 @@
 def with_metaclass(meta, *bases):
     """Create a base class with a metaclass."""
     # dummy metaclass that replaces itself with the actual metaclass after
-    # one level of class instanciation:
+    # one level of class instantiation:
     class metaclass(type):
         def __new__(cls, name, this_bases, d):
             return meta(name, bases, d)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/utils/classic.py 
new/rpyc-6.0.0/rpyc/utils/classic.py
--- old/rpyc-5.3.1/rpyc/utils/classic.py        2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/rpyc/utils/classic.py        2024-02-24 00:30:15.000000000 
+0100
@@ -163,7 +163,7 @@
 def connect_multiprocess(args={}):
     """
     Starts a SlaveService on a multiprocess process and connects to it.
-    Useful for testing purposes and running multicore code thats uses shared
+    Useful for testing purposes and running multicore code that's uses shared
     memory. See :func:`rpyc.utils.factory.connect_multiprocess`
 
     :returns: an RPyC connection exposing ``SlaveService``
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/utils/factory.py 
new/rpyc-6.0.0/rpyc/utils/factory.py
--- old/rpyc-5.3.1/rpyc/utils/factory.py        2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/rpyc/utils/factory.py        2024-02-24 00:30:15.000000000 
+0100
@@ -278,7 +278,7 @@
     from subprocess import Popen, PIPE
     proc = Popen(args, stdin=PIPE, stdout=PIPE)
     conn = connect_pipes(proc.stdout, proc.stdin, service=service, 
config=config)
-    conn.proc = proc  # just so you can have control over the processs
+    conn.proc = proc  # just so you can have control over the process
     return conn
 
 
@@ -325,7 +325,7 @@
     """starts an rpyc server on a new process, bound to an arbitrary port,
     and connects to it over a socket. Basically a copy of connect_thread().
     However if args is used and if these are shared memory then changes
-    will be bi-directional. That is we now have access to shared memmory.
+    will be bi-directional. That is we now have access to shared memory.
 
     :param service: the local service to expose (defaults to Void)
     :param config: configuration dict
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/utils/helpers.py 
new/rpyc-6.0.0/rpyc/utils/helpers.py
--- old/rpyc-5.3.1/rpyc/utils/helpers.py        2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/rpyc/utils/helpers.py        2024-02-24 00:30:15.000000000 
+0100
@@ -193,7 +193,11 @@
     """Runs an RPyC server in the background to serve all requests and replies
     that arrive on the given RPyC connection. The thread is started upon the
     the instantiation of the ``BgServingThread`` object; you can use the
-    :meth:`stop` method to stop the server thread
+    :meth:`stop` method to stop the server thread.
+
+    CAVEAT: RPyC defaults to bind_threads as False. So, there is no guarantee 
that the
+    background thread will serve the request. See issue #522 for an example of 
this behavior.
+    As the bind_threads feature matures, we may change the default to to True 
in the future.
 
     Example::
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/utils/registry.py 
new/rpyc-6.0.0/rpyc/utils/registry.py
--- old/rpyc-5.3.1/rpyc/utils/registry.py       2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/rpyc/utils/registry.py       2024-02-24 00:30:15.000000000 
+0100
@@ -143,11 +143,11 @@
             except Exception:
                 continue
             if magic != "RPYC":
-                self.logger.warn(f"invalid magic: {magic!r}")
+                self.logger.warning(f"invalid magic: {magic!r}")
                 continue
             cmdfunc = getattr(self, f"cmd_{cmd.lower()}", None)
             if not cmdfunc:
-                self.logger.warn(f"unknown command: {cmd!r}")
+                self.logger.warning(f"unknown command: {cmd!r}")
                 continue
 
             try:
@@ -169,7 +169,7 @@
             self.active = True
             self._work()
         except KeyboardInterrupt:
-            self.logger.warn("User interrupt!")
+            self.logger.warning("User interrupt!")
         finally:
             self.active = False
             self.logger.debug("server closed")
@@ -392,7 +392,7 @@
                     data, address = sock.recvfrom(MAX_DGRAM_SIZE)
                     rip, rport = address[:2]
                 except socket.timeout:
-                    self.logger.warn("no registry acknowledged")
+                    self.logger.warning("no registry acknowledged")
                     return False
                 if rport != self.port:
                     continue
@@ -404,7 +404,7 @@
                     self.logger.info(f"registry {rip}:{rport} acknowledged")
                     return True
             else:
-                self.logger.warn("no registry acknowledged")
+                self.logger.warning("no registry acknowledged")
                 return False
 
     def unregister(self, port):
@@ -481,17 +481,17 @@
                 sock.connect((self.ip, self.port))
                 sock.send(data)
             except (socket.error, socket.timeout):
-                self.logger.warn("could not connect to registry")
+                self.logger.warning("could not connect to registry")
                 return False
             try:
                 data = sock.recv(MAX_DGRAM_SIZE)
             except socket.timeout:
-                self.logger.warn("registry did not acknowledge")
+                self.logger.warning("registry did not acknowledge")
                 return False
             try:
                 reply = brine.load(data)
             except Exception:
-                self.logger.warn("received corrupted data from registry")
+                self.logger.warning("received corrupted data from registry")
                 return False
             if reply == "OK":
                 self.logger.info(f"registry {self.ip}:{self.port} 
acknowledged")
@@ -508,4 +508,4 @@
                 sock.connect((self.ip, self.port))
                 sock.send(data)
             except (socket.error, socket.timeout):
-                self.logger.warn("could not connect to registry")
+                self.logger.warning("could not connect to registry")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/utils/server.py 
new/rpyc-6.0.0/rpyc/utils/server.py
--- old/rpyc-5.3.1/rpyc/utils/server.py 2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/rpyc/utils/server.py 2024-02-24 00:30:15.000000000 +0100
@@ -374,7 +374,7 @@
         '''removes a connection by closing it and removing it from internal 
structs'''
         conn = None
 
-        # cleanup fd_to_conn dictionnary
+        # cleanup fd_to_conn dictionary
         try:
             conn = self.fd_to_conn[fd]
             del self.fd_to_conn[fd]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/rpyc/version.py 
new/rpyc-6.0.0/rpyc/version.py
--- old/rpyc-5.3.1/rpyc/version.py      2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/rpyc/version.py      2024-02-24 00:30:15.000000000 +0100
@@ -1,3 +1,3 @@
-__version__ = '5.3.1'
+__version__ = '6.0.0'
 version = tuple(__version__.split('.'))
-release_date = "2023-02-21"
+release_date = "2024-02-23"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/tests/__init__.py 
new/rpyc-6.0.0/tests/__init__.py
--- old/rpyc-5.3.1/tests/__init__.py    1970-01-01 01:00:00.000000000 +0100
+++ new/rpyc-6.0.0/tests/__init__.py    2024-02-24 00:30:15.000000000 +0100
@@ -0,0 +1,17 @@
+import sys
+import rpyc
+from pathlib import Path
+
+def load_tests(loader, standard_tests, pattern):
+    # Hook rpyc logger, unittest verbosity, and system paths
+    #rpyc.core.DEFAULT_CONFIG['logger'] = rpyc.lib.setup_logger()
+    rpyc_tests_path = Path(__file__).absolute().parent
+    rpyc_path = rpyc_tests_path.parent
+    for p in [str(rpyc_path), str(rpyc_tests_path)]:
+        if p not in sys.path:
+            sys.path.insert(0, p)
+
+    # Discover on tests and add paths
+    tests = loader.discover(start_dir=rpyc_tests_path, pattern=pattern, 
top_level_dir=rpyc_path)
+    standard_tests.addTests(tests)
+    return standard_tests
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/tests/test_deploy.py 
new/rpyc-6.0.0/tests/test_deploy.py
--- old/rpyc-5.3.1/tests/test_deploy.py 2023-02-22 04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/tests/test_deploy.py 2024-02-24 00:30:15.000000000 +0100
@@ -7,6 +7,7 @@
 from plumbum import SshMachine
 from plumbum.machines.paramiko_machine import ParamikoMachine
 from rpyc.utils.zerodeploy import DeployedServer
+from rpyc.core import DEFAULT_CONFIG
 try:
     import paramiko  # noqa
     _paramiko_import_failed = False
@@ -17,6 +18,7 @@
 class TestDeploy(unittest.TestCase):
     def test_deploy(self):
         rem = SshMachine("localhost")
+        rem.env['RPYC_BIND_THREADS'] = 
str(DEFAULT_CONFIG['bind_threads']).lower()
         SshMachine.python = rem[sys.executable]
         with DeployedServer(rem) as dep:
             conn = dep.classic_connect()
@@ -45,6 +47,7 @@
         try:
             subprocess.Popen.communicate = replacement_communicate
             rem = SshMachine("localhost")
+            rem.env['RPYC_BIND_THREADS'] = 
str(DEFAULT_CONFIG['bind_threads']).lower()
             SshMachine.python = rem[sys.executable]
             dep = DeployedServer(rem)
             conn = dep.classic_connect()
@@ -68,6 +71,7 @@
         try:
             subprocess.Popen.communicate = replacement_communicate
             rem = SshMachine("localhost")
+            rem.env['RPYC_BIND_THREADS'] = 
str(DEFAULT_CONFIG['bind_threads']).lower()
             SshMachine.python = rem[sys.executable]
             dep = DeployedServer(rem)
             conn = dep.classic_connect()
@@ -82,6 +86,7 @@
     @unittest.skipIf(_paramiko_import_failed, "Paramiko is not available")
     def test_deploy_paramiko(self):
         rem = ParamikoMachine("localhost", 
missing_host_policy=paramiko.AutoAddPolicy())
+        rem.env['RPYC_BIND_THREADS'] = 
str(DEFAULT_CONFIG['bind_threads']).lower()
         with DeployedServer(rem) as dep:
             conn = dep.classic_connect()
             print(conn.modules.sys)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/tests/test_netref_hierachy.py 
new/rpyc-6.0.0/tests/test_netref_hierachy.py
--- old/rpyc-5.3.1/tests/test_netref_hierachy.py        2023-02-22 
04:42:03.000000000 +0100
+++ new/rpyc-6.0.0/tests/test_netref_hierachy.py        2024-02-24 
00:30:15.000000000 +0100
@@ -1,10 +1,15 @@
+import inspect
 import math
 import rpyc
 from rpyc.utils.server import ThreadedServer
 from rpyc import SlaveService
+from rpyc.core import netref
 import unittest
 
 
+logger = rpyc.lib.setup_logger()
+
+
 class MyMeta(type):
 
     def spam(self):
@@ -45,11 +50,44 @@
 
     def exposed_getnonetype(self):
         """ About the unit test - what's common to types.MethodType and 
NoneType is that both are
-        not accessible via builtins. So the unit test I've added in 108ff8e 
was enough to 
+        not accessible via builtins. So the unit test I've added in 108ff8e 
was enough to
         my understanding (implement it with NoneType because that's more 
easily "created") """
         return type(None)
 
 
+class TestBaseNetrefMRO(unittest.TestCase):
+    def setUp(self):
+        self.conn = rpyc.classic.connect_thread()
+
+    def tearDown(self):
+        self.conn.close()
+        self.conn = None
+
+    def test_mro(self):
+        # TODO: netref.class_factory, redesign to register builtin types and 
better handle generic-aliases/types
+        #   - components to explore: abc.ABCMeta, abc.ABC.register types
+        #   - add mro test for netrefs to remote builtins
+        self.assertEqual(netref.NetrefMetaclass.__mro__, 
(netref.NetrefMetaclass, type, object))
+
+    def test_basenetref(self):
+        self.assertIsInstance(netref.BaseNetref, netref.NetrefMetaclass)
+        self.assertIsInstance(netref.BaseNetref, object)
+        mro = inspect.getmro(netref.BaseNetref)
+        self.assertEqual(mro, (netref.BaseNetref, object))
+
+    def test_builtins_dict_netref(self):
+        cls = netref.builtin_classes_cache['builtins.dict']
+        mro_netref = inspect.getmro(cls)
+        mro_dict = inspect.getmro(dict)
+        logger.debug('\n')
+        logger.debug(f'dict_netref: {mro_netref}')
+        logger.debug(f'dict:        {mro_dict}')
+        self.conn.execute("dict_ = dict(a=0xd35db33f)")
+        remote_dict = self.conn.namespace['dict_']
+        logger.debug(f'remote_dict: {remote_dict}')
+        self.assertEqual(remote_dict['a'], 3546133311)
+
+
 class Test_Netref_Hierarchy(unittest.TestCase):
 
     @classmethod
@@ -60,7 +98,6 @@
 
     def setUp(self):
         self.conn = rpyc.classic.connect('localhost', port=18878)
-        self.conn2 = None
 
     @classmethod
     def tearDownClass(cls):
@@ -68,8 +105,6 @@
 
     def tearDown(self):
         self.conn.close()
-        if self.conn2 is not None:
-            self.conn2.close()
 
     def test_instancecheck_across_connections(self):
         self.conn2 = rpyc.classic.connect('localhost', port=18878)
@@ -91,9 +126,16 @@
             isinstance([], x)
         i = 0
         self.assertTrue(type(x).__getitem__(x, i) == x.__getitem__(i))
-        _builtins = self.conn.modules.builtins if rpyc.lib.compat.is_py_3k 
else self.conn.modules.__builtin__
-        self.assertEqual(repr(_builtins.float.__class__), repr(type))
-        self.assertEqual(repr(type(_builtins.float)), 
repr(type(_builtins.type)))
+
+    def test_builtins(self):
+        _builtins = self.conn.modules.builtins
+        self.assertEqual(repr(_builtins.dict), repr(dict))  # Check repr 
behavior of netref matches local
+        self.assertEqual(repr(type(_builtins.dict.__class__)), repr(type))  # 
Check netref __class__ is type
+        self.assertIs(type(_builtins.dict.__class__), type)
+        # Check class descriptor for netrefs
+        dict_ = _builtins.dict(space='remote')
+        self.assertIs(type(dict_).__dict__['__class__'].instance, dict)
+        self.assertIs(type(dict_).__dict__['__class__'].owner, type)
 
     def test_instancecheck_list(self):
         service = MyService()
@@ -122,27 +164,28 @@
 
     def test_modules(self):
         """
-        >>> type(sys)
-        <type 'module'>  # base case
-        >>> type(conn.modules.sys)
-        <netref class 'rpyc.core.netref.__builtin__.module'>  # matches base 
case
-        >>> sys.__class__
-        <type 'module'>  # base case
-        >>> conn.modules.sys.__class__
-        <type 'module'>  # matches base case
-        >>> type(sys.__class__)
-        <type 'type'>  # base case
-        >>> type(conn.modules.sys.__class__)
-        <netref class 'rpyc.core.netref.__builtin__.module'>  # doesn't match.
-        # ^Should be a netref class of "type" (or maybe just <type 'type'> 
itself?)
+        >>> type(unittest)
+        <class 'module'>
+        >>> type(self.conn.modules.unittest)
+        <netref class 'rpyc.core.netref.unittest'>  # reflects that it is a 
proxy object to unittest
+        >>> unittest.__class__
+        <class 'module'>  # base case
+        >>> conn.modules.unittest.__class__
+        <class 'module'>  # matches base case
+        >>> type(unittest.__class__)
+        <class 'type'>  # base case
+        >>> type(conn.modules.unittest.__class__)
+        <class 'type'>  # matches base case
         """
-        import sys
-        self.assertEqual(repr(sys.__class__), 
repr(self.conn.modules.sys.__class__))
-        # _builtin = sys.modules['builtins' if rpyc.lib.compat.is_py_3k else 
'__builtins__'].__name__
-        # self.assertEqual(repr(type(self.conn.modules.sys)),
-        #                  "<netref class 
'rpyc.core.netref.{}.module'>".format(_builtin))
-        # self.assertEqual(repr(type(self.conn.modules.sys.__class__)),
-        #                  "<netref class 
'rpyc.core.netref.{}.type'>".format(_builtin))
+        self.assertEqual(repr(self.conn.modules.unittest), repr(unittest))
+        self.assertEqual(repr(type(self.conn.modules.unittest)), "<netref 
class 'rpyc.core.netref.unittest'>")
+        self.assertIs(self.conn.modules.unittest.__class__, type(unittest))
+        self.assertIs(type(self.conn.modules.unittest.__class__), type)
+
+    def test_proxy_instancecheck(self):
+        self.assertIsInstance(self.conn.modules.builtins.RuntimeError(), 
Exception)
+        # TODO: below should pass
+        # self.assertIsInstance(self.conn.modules.builtins.RuntimeError(), 
self.conn.modules.builtins.Exception)
 
 
 if __name__ == '__main__':
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/tests/test_race.py 
new/rpyc-6.0.0/tests/test_race.py
--- old/rpyc-5.3.1/tests/test_race.py   1970-01-01 01:00:00.000000000 +0100
+++ new/rpyc-6.0.0/tests/test_race.py   2024-02-24 00:30:15.000000000 +0100
@@ -0,0 +1,70 @@
+import rpyc
+import rpyc.core.async_ as rc_async_
+import rpyc.core.protocol as rc_protocol
+import contextlib
+import signal
+import threading
+import time
+import unittest
+
+
+class TestRace(unittest.TestCase):
+    def setUp(self):
+        self.connection = rpyc.classic.connect_thread()
+
+        self.a_str = rpyc.async_(self.connection.builtin.str)
+
+    def tearDown(self):
+        self.connection.close()
+
+    def test_asyncresult_race(self):
+        with _patch():
+            def hook():
+                time.sleep(0.2)  # loose race
+
+            _AsyncResult._HOOK = hook
+
+            threading.Thread(target=self.connection.serve_all).start()
+            time.sleep(0.1)  # wait for thread to serve
+
+            # schedule KeyboardInterrupt
+            thread_id = threading.get_ident()
+            _ = lambda: signal.pthread_kill(thread_id, signal.SIGINT)
+            timer = threading.Timer(1, _)
+            timer.start()
+
+            a_result = self.a_str("")  # request
+            time.sleep(0.1)  # wait for race to start
+            try:
+                a_result.wait()
+            except KeyboardInterrupt:
+                raise Exception("deadlock")
+
+            timer.cancel()
+
+
+class _AsyncResult(rc_async_.AsyncResult):
+    _HOOK = None
+
+    def __call__(self, *args, **kwargs):
+        hook = type(self)._HOOK
+        if hook is not None:
+            hook()
+        return super().__call__(*args, **kwargs)
+
+
+@contextlib.contextmanager
+def _patch():
+    AsyncResult = rc_async_.AsyncResult
+    try:
+        rc_async_.AsyncResult = _AsyncResult
+        rc_protocol.AsyncResult = _AsyncResult  # from import
+        yield
+
+    finally:
+        rc_async_.AsyncResult = AsyncResult
+        rc_protocol.AsyncResult = AsyncResult
+
+
+if __name__ == "__main__":
+    unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/tests/test_registry.py 
new/rpyc-6.0.0/tests/test_registry.py
--- old/rpyc-5.3.1/tests/test_registry.py       2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/tests/test_registry.py       2024-02-24 00:30:15.000000000 
+0100
@@ -79,15 +79,16 @@
 
 class TestTcpRegistry(BaseRegistryTest, unittest.TestCase):
     def _get_server(self):
-        return TCPRegistryServer(pruning_timeout=PRUNING_TIMEOUT, 
allow_listing=True)
+        return TCPRegistryServer(host="127.0.0.1", 
pruning_timeout=PRUNING_TIMEOUT, allow_listing=True)
 
     def _get_client(self):
-        return TCPRegistryClient("localhost")
+        return TCPRegistryClient(ip="127.0.0.1")
 
 
 class TestUdpRegistry(BaseRegistryTest, unittest.TestCase):
+    """ May fail due to iptables/packet-drops. """
     def _get_server(self):
-        return UDPRegistryServer(pruning_timeout=PRUNING_TIMEOUT, 
allow_listing=True)
+        return UDPRegistryServer(host="0.0.0.0", 
pruning_timeout=PRUNING_TIMEOUT, allow_listing=True)
 
     def _get_client(self):
         return UDPRegistryClient()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/tests/test_urllib3.py 
new/rpyc-6.0.0/tests/test_urllib3.py
--- old/rpyc-5.3.1/tests/test_urllib3.py        1970-01-01 01:00:00.000000000 
+0100
+++ new/rpyc-6.0.0/tests/test_urllib3.py        2024-02-24 00:30:15.000000000 
+0100
@@ -0,0 +1,41 @@
+import rpyc
+from rpyc.utils.server import ThreadedServer
+from rpyc import SlaveService
+import unittest
+try:
+    import urllib3
+    urllib3.request
+    _urllib3_import_failed = False
+except Exception:
+    _urllib3_import_failed = True
+
+
+@unittest.skipIf(_urllib3_import_failed or True, "urllib3 not available")
+class TestUrllib3(unittest.TestCase):
+    """ #547 """
+
+    def setUp(self):
+        self.cfg = {'sync_request_timeout': 60*60}
+        self.server = ThreadedServer(SlaveService, port=18878, 
auto_register=False, protocol_config=self.cfg)
+        self.server.logger.quiet = False
+        self.server._start_in_thread()
+        self.conn = rpyc.classic.connect('localhost', port=18878)
+
+    def tearDown(self):
+        self.conn.close()
+        self.server.close()
+
+    def test_issue(self):
+        self.conn.execute('import urllib3')
+        urllib3_ = self.conn.modules.urllib3
+        # headers = urllib3.HTTPHeaderDict()
+        # headers.add("Accept", "application/json")
+        # headers.add("Accept", "text/plain")
+        headers = {"X-Request-Id": "test"}
+        resp = urllib3_.request("POST", "https://httpbin.org/post";, 
fields={"hello": "world"}, headers=headers)
+        __import__('code').interact(local=locals() | globals())
+        # self.assertTrue(self.conn.root.instance(remote_list, list))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/rpyc-5.3.1/tests/test_win32pipes.py 
new/rpyc-6.0.0/tests/test_win32pipes.py
--- old/rpyc-5.3.1/tests/test_win32pipes.py     2023-02-22 04:42:03.000000000 
+0100
+++ new/rpyc-6.0.0/tests/test_win32pipes.py     2024-02-24 00:30:15.000000000 
+0100
@@ -7,7 +7,7 @@
 
 
 @unittest.skipIf(sys.platform != "win32", "Requires windows")
-class Test_Pipes(unittest.TestCase):
+class TestPipes(unittest.TestCase):
     def test_basic_io(self):
         p1, p2 = PipeStream.create_pair()
         p1.write(BYTES_LITERAL("hello"))
@@ -36,7 +36,7 @@
 
 
 @unittest.skipIf(sys.platform != "win32", "Requires windows")
-class Test_NamedPipe(object):
+class TestNamedPipe(unittest.TestCase):
     def setUp(self):
         self.pipe_server_thread = rpyc.spawn(self.pipe_server)
         time.sleep(1)  # make sure server is accepting already

Reply via email to