Author: Matti Picus <matti.pi...@gmail.com>
Branch: cpyext-macros-cast
Changeset: r84339:664e7d4392f4
Date: 2016-05-09 21:42 +0300
http://bitbucket.org/pypy/pypy/changeset/664e7d4392f4/

Log:    merge default into branch

diff too long, truncating to 2000 out of 3197 lines

diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -397,20 +397,7 @@
         data.  Later, when this new cdata object is garbage-collected,
         'destructor(old_cdata_object)' will be called.
         """
-        try:
-            gcp = self._backend.gcp
-        except AttributeError:
-            pass
-        else:
-            return gcp(cdata, destructor)
-        #
-        with self._lock:
-            try:
-                gc_weakrefs = self.gc_weakrefs
-            except AttributeError:
-                from .gc_weakref import GcWeakrefs
-                gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self)
-            return gc_weakrefs.build(cdata, destructor)
+        return self._backend.gcp(cdata, destructor)
 
     def _get_cached_btype(self, type):
         assert self._lock.acquire(False) is False
diff --git a/lib_pypy/cffi/backend_ctypes.py b/lib_pypy/cffi/backend_ctypes.py
--- a/lib_pypy/cffi/backend_ctypes.py
+++ b/lib_pypy/cffi/backend_ctypes.py
@@ -460,6 +460,11 @@
                         return x._value
                     raise TypeError("character expected, got %s" %
                                     type(x).__name__)
+                def __nonzero__(self):
+                    return ord(self._value) != 0
+            else:
+                def __nonzero__(self):
+                    return self._value != 0
 
             if kind == 'float':
                 @staticmethod
@@ -993,6 +998,31 @@
         assert onerror is None   # XXX not implemented
         return BType(source, error)
 
+    def gcp(self, cdata, destructor):
+        BType = self.typeof(cdata)
+
+        if destructor is None:
+            if not (hasattr(BType, '_gcp_type') and
+                    BType._gcp_type is BType):
+                raise TypeError("Can remove destructor only on a object "
+                                "previously returned by ffi.gc()")
+            cdata._destructor = None
+            return None
+
+        try:
+            gcp_type = BType._gcp_type
+        except AttributeError:
+            class CTypesDataGcp(BType):
+                __slots__ = ['_orig', '_destructor']
+                def __del__(self):
+                    if self._destructor is not None:
+                        self._destructor(self._orig)
+            gcp_type = BType._gcp_type = CTypesDataGcp
+        new_cdata = self.cast(gcp_type, cdata)
+        new_cdata._orig = cdata
+        new_cdata._destructor = destructor
+        return new_cdata
+
     typeof = type
 
     def getcname(self, BType, replace_with):
diff --git a/pypy/doc/discussion/finalizer-order.rst 
b/pypy/doc/discussion/finalizer-order.rst
--- a/pypy/doc/discussion/finalizer-order.rst
+++ b/pypy/doc/discussion/finalizer-order.rst
@@ -33,26 +33,25 @@
 it from a finalizer.  A finalizer runs earlier, and in topological
 order; care must be taken that the object might still be reachable at
 this point if we're clever enough.  A destructor on the other hand runs
-last; nothing can be done with the object any more.
+last; nothing can be done with the object any more, and the GC frees it
+immediately.
 
 
 Destructors
 -----------
 
 A destructor is an RPython ``__del__()`` method that is called directly
-by the GC when there is no more reference to an object.  Intended for
-objects that just need to free a block of raw memory or close a file.
+by the GC when it is about to free the memory.  Intended for objects
+that just need to free an extra block of raw memory.
 
 There are restrictions on the kind of code you can put in ``__del__()``,
 including all other functions called by it.  These restrictions are
-checked.  In particular you cannot access fields containing GC objects;
-and if you call an external C function, it must be a "safe" function
-(e.g. not releasing the GIL; use ``releasegil=False`` in
-``rffi.llexternal()``).
+checked.  In particular you cannot access fields containing GC objects.
+Right now you can't call any external C function either.
 
-If there are several objects with destructors that die during the same
-GC cycle, they are called in a completely random order --- but that
-should not matter because destructors cannot do much anyway.
+Destructors are called precisely when the GC frees the memory of the
+object.  As long as the object exists (even in some finalizer queue or
+anywhere), its destructor is not called.
 
 
 Register_finalizer
@@ -95,10 +94,15 @@
 To find the queued items, call ``fin.next_dead()`` repeatedly.  It
 returns the next queued item, or ``None`` when the queue is empty.
 
-It is allowed in theory to cumulate several different
+In theory, it would kind of work if you cumulate several different
 ``FinalizerQueue`` instances for objects of the same class, and
 (always in theory) the same ``obj`` could be registered several times
 in the same queue, or in several queues.  This is not tested though.
+For now the untranslated emulation does not support registering the
+same object several times.
+
+Note that the Boehm garbage collector, used in ``rpython -O0``,
+completely ignores ``register_finalizer()``.
 
 
 Ordering of finalizers
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -79,3 +79,13 @@
 It is a more flexible way to make RPython finalizers.
 
 .. branch: unpacking-cpython-shortcut
+
+.. branch: cleanups
+
+.. branch: cpyext-more-slots
+
+.. branch: use-gc-del-3
+
+Use the new rgc.FinalizerQueue mechanism to clean up the handling of
+``__del__`` methods.  Fixes notably issue #2287.  (All RPython
+subclasses of W_Root need to use FinalizerQueue now.)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -11,7 +11,7 @@
     INT_MIN, INT_MAX, UINT_MAX, USHRT_MAX
 
 from pypy.interpreter.executioncontext import (ExecutionContext, ActionFlag,
-    UserDelAction)
+    make_finalizer_queue)
 from pypy.interpreter.error import OperationError, new_exception_class, oefmt
 from pypy.interpreter.argument import Arguments
 from pypy.interpreter.miscutils import ThreadLocals, make_weak_value_dictionary
@@ -28,6 +28,7 @@
     """This is the abstract root class of all wrapped objects that live
     in a 'normal' object space like StdObjSpace."""
     __slots__ = ('__weakref__',)
+    _must_be_light_finalizer_ = True
     user_overridden_class = False
 
     def getdict(self, space):
@@ -136,9 +137,8 @@
         pass
 
     def clear_all_weakrefs(self):
-        """Call this at the beginning of interp-level __del__() methods
-        in subclasses.  It ensures that weakrefs (if any) are cleared
-        before the object is further destroyed.
+        """Ensures that weakrefs (if any) are cleared now.  This is
+        called by UserDelAction before the object is finalized further.
         """
         lifeline = self.getweakref()
         if lifeline is not None:
@@ -151,25 +151,37 @@
             self.delweakref()
             lifeline.clear_all_weakrefs()
 
-    __already_enqueued_for_destruction = ()
+    def _finalize_(self):
+        """The RPython-level finalizer.
 
-    def enqueue_for_destruction(self, space, callback, descrname):
-        """Put the object in the destructor queue of the space.
-        At a later, safe point in time, UserDelAction will call
-        callback(self).  If that raises OperationError, prints it
-        to stderr with the descrname string.
+        By default, it is *not called*.  See self.register_finalizer().
+        Be ready to handle the case where the object is only half
+        initialized.  Also, in some cases the object might still be
+        visible to app-level after _finalize_() is called (e.g. if
+        there is a __del__ that resurrects).
+        """
 
-        Note that 'callback' will usually need to start with:
-            assert isinstance(self, W_SpecificClass)
+    def register_finalizer(self, space):
+        """Register a finalizer for this object, so that
+        self._finalize_() will be called.  You must call this method at
+        most once.  Be ready to handle in _finalize_() the case where
+        the object is half-initialized, even if you only call
+        self.register_finalizer() at the end of the initialization.
+        This is because there are cases where the finalizer is already
+        registered before: if the user makes an app-level subclass with
+        a __del__.  (In that case only, self.register_finalizer() does
+        nothing, because the finalizer is already registered in
+        allocate_instance().)
         """
-        # this function always resurect the object, so when
-        # running on top of CPython we must manually ensure that
-        # we enqueue it only once
-        if not we_are_translated():
-            if callback in self.__already_enqueued_for_destruction:
-                return
-            self.__already_enqueued_for_destruction += (callback,)
-        space.user_del_action.register_callback(self, callback, descrname)
+        if self.user_overridden_class and self.getclass(space).hasuserdel:
+            # already registered by space.allocate_instance()
+            if not we_are_translated():
+                assert space.finalizer_queue._already_registered(self)
+        else:
+            if not we_are_translated():
+                # does not make sense if _finalize_ is not overridden
+                assert self._finalize_.im_func is not W_Root._finalize_.im_func
+            space.finalizer_queue.register_finalizer(self)
 
     # hooks that the mapdict implementations needs:
     def _get_mapdict_map(self):
@@ -389,9 +401,9 @@
         self.interned_strings = make_weak_value_dictionary(self, str, W_Root)
         self.actionflag = ActionFlag()    # changed by the signal module
         self.check_signal_action = None   # changed by the signal module
-        self.user_del_action = UserDelAction(self)
+        make_finalizer_queue(W_Root, self)
         self._code_of_sys_exc_info = None
-        
+
         # can be overridden to a subclass
         self.initialize()
 
@@ -1844,7 +1856,6 @@
     ('get',             'get',       3, ['__get__']),
     ('set',             'set',       3, ['__set__']),
     ('delete',          'delete',    2, ['__delete__']),
-    ('userdel',         'del',       1, ['__del__']),
 ]
 
 ObjSpace.BuiltinModuleTable = [
diff --git a/pypy/interpreter/executioncontext.py 
b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -2,7 +2,7 @@
 from pypy.interpreter.error import OperationError, get_cleared_operation_error
 from rpython.rlib.unroll import unrolling_iterable
 from rpython.rlib.objectmodel import specialize
-from rpython.rlib import jit
+from rpython.rlib import jit, rgc
 
 TICK_COUNTER_STEP = 100
 
@@ -141,6 +141,12 @@
             actionflag.action_dispatcher(self, frame)     # slow path
     bytecode_trace._always_inline_ = True
 
+    def _run_finalizers_now(self):
+        # Tests only: run the actions now, to ensure that the
+        # finalizable objects are really finalized.  Used notably by
+        # pypy.tool.pytest.apptest.
+        self.space.actionflag.action_dispatcher(self, None)
+
     def bytecode_only_trace(self, frame):
         """
         Like bytecode_trace() but doesn't invoke any other events besides the
@@ -515,75 +521,98 @@
     """
 
 
-class UserDelCallback(object):
-    def __init__(self, w_obj, callback, descrname):
-        self.w_obj = w_obj
-        self.callback = callback
-        self.descrname = descrname
-        self.next = None
-
 class UserDelAction(AsyncAction):
     """An action that invokes all pending app-level __del__() method.
     This is done as an action instead of immediately when the
-    interp-level __del__() is invoked, because the latter can occur more
+    WRootFinalizerQueue is triggered, because the latter can occur more
     or less anywhere in the middle of code that might not be happy with
     random app-level code mutating data structures under its feet.
     """
 
     def __init__(self, space):
         AsyncAction.__init__(self, space)
-        self.dying_objects = None
-        self.dying_objects_last = None
-        self.finalizers_lock_count = 0
-        self.enabled_at_app_level = True
-
-    def register_callback(self, w_obj, callback, descrname):
-        cb = UserDelCallback(w_obj, callback, descrname)
-        if self.dying_objects_last is None:
-            self.dying_objects = cb
-        else:
-            self.dying_objects_last.next = cb
-        self.dying_objects_last = cb
-        self.fire()
+        self.finalizers_lock_count = 0        # see pypy/module/gc
+        self.enabled_at_app_level = True      # see pypy/module/gc
+        self.pending_with_disabled_del = None
 
     def perform(self, executioncontext, frame):
-        if self.finalizers_lock_count > 0:
-            return
         self._run_finalizers()
 
+    @jit.dont_look_inside
     def _run_finalizers(self):
-        # Each call to perform() first grabs the self.dying_objects
-        # and replaces it with an empty list.  We do this to try to
-        # avoid too deep recursions of the kind of __del__ being called
-        # while in the middle of another __del__ call.
-        pending = self.dying_objects
-        self.dying_objects = None
-        self.dying_objects_last = None
+        while True:
+            w_obj = self.space.finalizer_queue.next_dead()
+            if w_obj is None:
+                break
+            self._call_finalizer(w_obj)
+
+    def gc_disabled(self, w_obj):
+        # If we're running in 'gc.disable()' mode, record w_obj in the
+        # "call me later" list and return True.  In normal mode, return
+        # False.  Use this function from some _finalize_() methods:
+        # if a _finalize_() method would call some user-defined
+        # app-level function, like a weakref callback, then first do
+        # 'if gc.disabled(self): return'.  Another attempt at
+        # calling _finalize_() will be made after 'gc.enable()'.
+        # (The exact rule for when to use gc_disabled() or not is a bit
+        # vague, but most importantly this includes all user-level
+        # __del__().)
+        pdd = self.pending_with_disabled_del
+        if pdd is None:
+            return False
+        else:
+            pdd.append(w_obj)
+            return True
+
+    def _call_finalizer(self, w_obj):
+        # Before calling the finalizers, clear the weakrefs, if any.
+        w_obj.clear_all_weakrefs()
+
+        # Look up and call the app-level __del__, if any.
         space = self.space
-        while pending is not None:
+        if w_obj.typedef is None:
+            w_del = None       # obscure case: for WeakrefLifeline
+        else:
+            w_del = space.lookup(w_obj, '__del__')
+        if w_del is not None:
+            if self.gc_disabled(w_obj):
+                return
             try:
-                pending.callback(pending.w_obj)
-            except OperationError as e:
-                e.write_unraisable(space, pending.descrname, pending.w_obj)
-                e.clear(space)   # break up reference cycles
-            pending = pending.next
-        #
-        # Note: 'dying_objects' used to be just a regular list instead
-        # of a chained list.  This was the cause of "leaks" if we have a
-        # program that constantly creates new objects with finalizers.
-        # Here is why: say 'dying_objects' is a long list, and there
-        # are n instances in it.  Then we spend some time in this
-        # function, possibly triggering more GCs, but keeping the list
-        # of length n alive.  Then the list is suddenly freed at the
-        # end, and we return to the user program.  At this point the
-        # GC limit is still very high, because just before, there was
-        # a list of length n alive.  Assume that the program continues
-        # to allocate a lot of instances with finalizers.  The high GC
-        # limit means that it could allocate a lot of instances before
-        # reaching it --- possibly more than n.  So the whole procedure
-        # repeats with higher and higher values of n.
-        #
-        # This does not occur in the current implementation because
-        # there is no list of length n: if n is large, then the GC
-        # will run several times while walking the list, but it will
-        # see lower and lower memory usage, with no lower bound of n.
+                space.get_and_call_function(w_del, w_obj)
+            except Exception as e:
+                report_error(space, e, "method __del__ of ", w_obj)
+
+        # Call the RPython-level _finalize_() method.
+        try:
+            w_obj._finalize_()
+        except Exception as e:
+            report_error(space, e, "finalizer of ", w_obj)
+
+
+def report_error(space, e, where, w_obj):
+    if isinstance(e, OperationError):
+        e.write_unraisable(space, where, w_obj)
+        e.clear(space)   # break up reference cycles
+    else:
+        addrstring = w_obj.getaddrstring(space)
+        msg = ("RPython exception %s in %s<%s at 0x%s> ignored\n" % (
+                   str(e), where, space.type(w_obj).name, addrstring))
+        space.call_method(space.sys.get('stderr'), 'write',
+                          space.wrap(msg))
+
+
+def make_finalizer_queue(W_Root, space):
+    """Make a FinalizerQueue subclass which responds to GC finalizer
+    events by 'firing' the UserDelAction class above.  It does not
+    directly fetches the objects to finalize at all; they stay in the 
+    GC-managed queue, and will only be fetched by UserDelAction
+    (between bytecodes)."""
+
+    class WRootFinalizerQueue(rgc.FinalizerQueue):
+        Class = W_Root
+
+        def finalizer_trigger(self):
+            space.user_del_action.fire()
+
+    space.user_del_action = UserDelAction(space)
+    space.finalizer_queue = WRootFinalizerQueue()
diff --git a/pypy/interpreter/generator.py b/pypy/interpreter/generator.py
--- a/pypy/interpreter/generator.py
+++ b/pypy/interpreter/generator.py
@@ -1,6 +1,7 @@
 from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter.error import OperationError, oefmt
 from pypy.interpreter.pyopcode import LoopBlock
+from pypy.interpreter.pycode import CO_YIELD_INSIDE_TRY
 from rpython.rlib import jit
 
 
@@ -13,6 +14,8 @@
         self.frame = frame     # turned into None when frame_finished_execution
         self.pycode = frame.pycode
         self.running = False
+        if self.pycode.co_flags & CO_YIELD_INSIDE_TRY:
+            self.register_finalizer(self.space)
 
     def descr__repr__(self, space):
         if self.pycode is None:
@@ -139,7 +142,6 @@
 
     def descr_close(self):
         """x.close(arg) -> raise GeneratorExit inside generator."""
-        assert isinstance(self, GeneratorIterator)
         space = self.space
         try:
             w_retval = self.throw(space.w_GeneratorExit, space.w_None,
@@ -212,25 +214,21 @@
     unpack_into = _create_unpack_into()
     unpack_into_w = _create_unpack_into()
 
-
-class GeneratorIteratorWithDel(GeneratorIterator):
-
-    def __del__(self):
-        # Only bother enqueuing self to raise an exception if the frame is
-        # still not finished and finally or except blocks are present.
-        self.clear_all_weakrefs()
+    def _finalize_(self):
+        # This is only called if the CO_YIELD_INSIDE_TRY flag is set
+        # on the code object.  If the frame is still not finished and
+        # finally or except blocks are present at the current
+        # position, then raise a GeneratorExit.  Otherwise, there is
+        # no point.
         if self.frame is not None:
             block = self.frame.lastblock
             while block is not None:
                 if not isinstance(block, LoopBlock):
-                    self.enqueue_for_destruction(self.space,
-                                                 GeneratorIterator.descr_close,
-                                                 "interrupting generator of ")
+                    self.descr_close()
                     break
                 block = block.previous
 
 
-
 def get_printable_location_genentry(bytecode):
     return '%s <generator>' % (bytecode.get_repr(),)
 generatorentry_driver = jit.JitDriver(greens=['pycode'],
diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
--- a/pypy/interpreter/pyframe.py
+++ b/pypy/interpreter/pyframe.py
@@ -241,12 +241,8 @@
     def run(self):
         """Start this frame's execution."""
         if self.getcode().co_flags & pycode.CO_GENERATOR:
-            if self.getcode().co_flags & pycode.CO_YIELD_INSIDE_TRY:
-                from pypy.interpreter.generator import GeneratorIteratorWithDel
-                return self.space.wrap(GeneratorIteratorWithDel(self))
-            else:
-                from pypy.interpreter.generator import GeneratorIterator
-                return self.space.wrap(GeneratorIterator(self))
+            from pypy.interpreter.generator import GeneratorIterator
+            return self.space.wrap(GeneratorIterator(self))
         else:
             return self.execute_frame()
 
diff --git a/pypy/interpreter/test/test_typedef.py 
b/pypy/interpreter/test/test_typedef.py
--- a/pypy/interpreter/test/test_typedef.py
+++ b/pypy/interpreter/test/test_typedef.py
@@ -127,10 +127,7 @@
                         """ % (slots, methodname, checks[0], checks[1],
                                checks[2], checks[3]))
         subclasses = {}
-        for key, subcls in typedef._subclass_cache.items():
-            if key[0] is not space.config:
-                continue
-            cls = key[1]
+        for cls, subcls in typedef._unique_subclass_cache.items():
             subclasses.setdefault(cls, {})
             prevsubcls = subclasses[cls].setdefault(subcls.__name__, subcls)
             assert subcls is prevsubcls
@@ -186,35 +183,20 @@
         class W_Level1(W_Root):
             def __init__(self, space1):
                 assert space1 is space
-            def __del__(self):
+                self.register_finalizer(space)
+            def _finalize_(self):
                 space.call_method(w_seen, 'append', space.wrap(1))
-        class W_Level2(W_Root):
-            def __init__(self, space1):
-                assert space1 is space
-            def __del__(self):
-                self.enqueue_for_destruction(space, W_Level2.destructormeth,
-                                             'FOO ')
-            def destructormeth(self):
-                space.call_method(w_seen, 'append', space.wrap(2))
         W_Level1.typedef = typedef.TypeDef(
             'level1',
             __new__ = typedef.generic_new_descr(W_Level1))
-        W_Level2.typedef = typedef.TypeDef(
-            'level2',
-            __new__ = typedef.generic_new_descr(W_Level2))
         #
         w_seen = space.newlist([])
         W_Level1(space)
         gc.collect(); gc.collect()
-        assert space.unwrap(w_seen) == [1]
-        #
-        w_seen = space.newlist([])
-        W_Level2(space)
-        gc.collect(); gc.collect()
         assert space.str_w(space.repr(w_seen)) == "[]"  # not called yet
         ec = space.getexecutioncontext()
         self.space.user_del_action.perform(ec, None)
-        assert space.unwrap(w_seen) == [2]
+        assert space.unwrap(w_seen) == [1]   # called by user_del_action
         #
         w_seen = space.newlist([])
         self.space.appexec([self.space.gettypeobject(W_Level1.typedef)],
@@ -236,29 +218,17 @@
             A4()
         """)
         gc.collect(); gc.collect()
-        assert space.unwrap(w_seen) == [4, 1]
+        assert space.unwrap(w_seen) == [4, 1]    # user __del__, and _finalize_
         #
         w_seen = space.newlist([])
-        self.space.appexec([self.space.gettypeobject(W_Level2.typedef)],
+        self.space.appexec([self.space.gettypeobject(W_Level1.typedef)],
         """(level2):
             class A5(level2):
                 pass
             A5()
         """)
         gc.collect(); gc.collect()
-        assert space.unwrap(w_seen) == [2]
-        #
-        w_seen = space.newlist([])
-        self.space.appexec([self.space.gettypeobject(W_Level2.typedef),
-                            w_seen],
-        """(level2, seen):
-            class A6(level2):
-                def __del__(self):
-                    seen.append(6)
-            A6()
-        """)
-        gc.collect(); gc.collect()
-        assert space.unwrap(w_seen) == [6, 2]
+        assert space.unwrap(w_seen) == [1]     # _finalize_ only
 
     def test_multiple_inheritance(self):
         class W_A(W_Root):
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -24,6 +24,8 @@
         self.bases = bases
         self.heaptype = False
         self.hasdict = '__dict__' in rawdict
+        # no __del__: use an RPython _finalize_() method and register_finalizer
+        assert '__del__' not in rawdict
         self.weakrefable = '__weakref__' in rawdict
         self.doc = rawdict.pop('__doc__', None)
         for base in bases:
@@ -103,26 +105,20 @@
 # we need two subclasses of the app-level type, one to add mapdict, and then 
one
 # to add del to not slow down the GC.
 
-def get_unique_interplevel_subclass(space, cls, needsdel=False):
+def get_unique_interplevel_subclass(space, cls):
     "NOT_RPYTHON: initialization-time only"
-    if hasattr(cls, '__del__') and getattr(cls, "handle_del_manually", False):
-        needsdel = False
     assert cls.typedef.acceptable_as_base_class
-    key = space, cls, needsdel
     try:
-        return _subclass_cache[key]
+        return _unique_subclass_cache[cls]
     except KeyError:
-        # XXX can save a class if cls already has a __del__
-        if needsdel:
-            cls = get_unique_interplevel_subclass(space, cls, False)
-        subcls = _getusercls(space, cls, needsdel)
-        assert key not in _subclass_cache
-        _subclass_cache[key] = subcls
+        subcls = _getusercls(cls)
+        assert cls not in _unique_subclass_cache
+        _unique_subclass_cache[cls] = subcls
         return subcls
 get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo"
-_subclass_cache = {}
+_unique_subclass_cache = {}
 
-def _getusercls(space, cls, wants_del, reallywantdict=False):
+def _getusercls(cls, reallywantdict=False):
     from rpython.rlib import objectmodel
     from pypy.objspace.std.objectobject import W_ObjectObject
     from pypy.module.__builtin__.interp_classobj import W_InstanceObject
@@ -132,11 +128,10 @@
     typedef = cls.typedef
     name = cls.__name__ + "User"
 
-    mixins_needed = []
     if cls is W_ObjectObject or cls is W_InstanceObject:
-        mixins_needed.append(_make_storage_mixin_size_n())
+        base_mixin = _make_storage_mixin_size_n()
     else:
-        mixins_needed.append(MapdictStorageMixin)
+        base_mixin = MapdictStorageMixin
     copy_methods = [BaseUserClassMapdict]
     if reallywantdict or not typedef.hasdict:
         # the type has no dict, mapdict to provide the dict
@@ -147,44 +142,12 @@
         # support
         copy_methods.append(MapdictWeakrefSupport)
         name += "Weakrefable"
-    if wants_del:
-        # This subclass comes with an app-level __del__.  To handle
-        # it, we make an RPython-level __del__ method.  This
-        # RPython-level method is called directly by the GC and it
-        # cannot do random things (calling the app-level __del__ would
-        # be "random things").  So instead, we just call here
-        # enqueue_for_destruction(), and the app-level __del__ will be
-        # called later at a safe point (typically between bytecodes).
-        # If there is also an inherited RPython-level __del__, it is
-        # called afterwards---not immediately!  This base
-        # RPython-level __del__ is supposed to run only when the
-        # object is not reachable any more.  NOTE: it doesn't fully
-        # work: see issue #2287.
-        name += "Del"
-        parent_destructor = getattr(cls, '__del__', None)
-        def call_parent_del(self):
-            assert isinstance(self, subcls)
-            parent_destructor(self)
-        def call_applevel_del(self):
-            assert isinstance(self, subcls)
-            space.userdel(self)
-        class Proto(object):
-            def __del__(self):
-                self.clear_all_weakrefs()
-                self.enqueue_for_destruction(space, call_applevel_del,
-                                             'method __del__ of ')
-                if parent_destructor is not None:
-                    self.enqueue_for_destruction(space, call_parent_del,
-                                                 'internal destructor of ')
-        mixins_needed.append(Proto)
 
     class subcls(cls):
         user_overridden_class = True
-        for base in mixins_needed:
-            objectmodel.import_from_mixin(base)
+        objectmodel.import_from_mixin(base_mixin)
     for copycls in copy_methods:
         _copy_methods(copycls, subcls)
-    del subcls.base
     subcls.__name__ = name
     return subcls
 
diff --git a/pypy/module/__builtin__/interp_classobj.py 
b/pypy/module/__builtin__/interp_classobj.py
--- a/pypy/module/__builtin__/interp_classobj.py
+++ b/pypy/module/__builtin__/interp_classobj.py
@@ -44,13 +44,12 @@
         self.bases_w = bases
         self.w_dict = w_dict
 
+    def has_user_del(self, space):
+        return self.lookup(space, '__del__') is not None
+
     def instantiate(self, space):
         cache = space.fromcache(Cache)
-        if self.lookup(space, '__del__') is not None:
-            w_inst = cache.cls_with_del(space, self)
-        else:
-            w_inst = cache.cls_without_del(space, self)
-        return w_inst
+        return cache.InstanceObjectCls(space, self)
 
     def getdict(self, space):
         return self.w_dict
@@ -132,9 +131,9 @@
                 self.setbases(space, w_value)
                 return
             elif name == "__del__":
-                if self.lookup(space, name) is None:
+                if not self.has_user_del(space):
                     msg = ("a __del__ method added to an existing class will "
-                           "not be called")
+                           "only be called on instances made from now on")
                     space.warn(space.wrap(msg), space.w_RuntimeWarning)
         space.setitem(self.w_dict, w_attr, w_value)
 
@@ -184,14 +183,11 @@
         if hasattr(space, 'is_fake_objspace'):
             # hack: with the fake objspace, we don't want to see typedef's
             # _getusercls() at all
-            self.cls_without_del = W_InstanceObject
-            self.cls_with_del = W_InstanceObject
+            self.InstanceObjectCls = W_InstanceObject
             return
 
-        self.cls_without_del = _getusercls(
-                space, W_InstanceObject, False, reallywantdict=True)
-        self.cls_with_del = _getusercls(
-                space, W_InstanceObject, True, reallywantdict=True)
+        self.InstanceObjectCls = _getusercls(
+                W_InstanceObject, reallywantdict=True)
 
 
 def class_descr_call(space, w_self, __args__):
@@ -297,12 +293,15 @@
 class W_InstanceObject(W_Root):
     def __init__(self, space, w_class):
         # note that user_setup is overridden by the typedef.py machinery
+        self.space = space
         self.user_setup(space, space.gettypeobject(self.typedef))
         assert isinstance(w_class, W_ClassObject)
         self.w_class = w_class
+        if w_class.has_user_del(space):
+            space.finalizer_queue.register_finalizer(self)
 
     def user_setup(self, space, w_subtype):
-        self.space = space
+        pass
 
     def set_oldstyle_class(self, space, w_class):
         if w_class is None or not isinstance(w_class, W_ClassObject):
@@ -368,8 +367,7 @@
                 self.set_oldstyle_class(space, w_value)
                 return
             if name == '__del__' and w_meth is None:
-                cache = space.fromcache(Cache)
-                if (not isinstance(self, cache.cls_with_del)
+                if (not self.w_class.has_user_del(space)
                     and self.getdictvalue(space, '__del__') is None):
                     msg = ("a __del__ method added to an instance with no "
                            "__del__ in the class will not be called")
@@ -646,13 +644,14 @@
             raise oefmt(space.w_TypeError, "instance has no next() method")
         return space.call_function(w_func)
 
-    def descr_del(self, space):
-        # Note that this is called from executioncontext.UserDelAction
-        # via the space.userdel() method.
+    def _finalize_(self):
+        space = self.space
         w_func = self.getdictvalue(space, '__del__')
         if w_func is None:
             w_func = self.getattr_from_class(space, '__del__')
         if w_func is not None:
+            if self.space.user_del_action.gc_disabled(self):
+                return
             space.call_function(w_func)
 
     def descr_exit(self, space, w_type, w_value, w_tb):
@@ -729,7 +728,6 @@
     __pow__ = interp2app(W_InstanceObject.descr_pow),
     __rpow__ = interp2app(W_InstanceObject.descr_rpow),
     next = interp2app(W_InstanceObject.descr_next),
-    __del__ = interp2app(W_InstanceObject.descr_del),
     __exit__ = interp2app(W_InstanceObject.descr_exit),
     __dict__ = dict_descr,
     **rawdict
diff --git a/pypy/module/_cffi_backend/allocator.py 
b/pypy/module/_cffi_backend/allocator.py
--- a/pypy/module/_cffi_backend/allocator.py
+++ b/pypy/module/_cffi_backend/allocator.py
@@ -45,14 +45,11 @@
                 rffi.c_memset(rffi.cast(rffi.VOIDP, ptr), 0,
                               rffi.cast(rffi.SIZE_T, datasize))
             #
-            if self.w_free is None:
-                # use this class which does not have a __del__, but still
-                # keeps alive w_raw_cdata
-                res = cdataobj.W_CDataNewNonStdNoFree(space, ptr, ctype, 
length)
-            else:
-                res = cdataobj.W_CDataNewNonStdFree(space, ptr, ctype, length)
+            res = cdataobj.W_CDataNewNonStd(space, ptr, ctype, length)
+            res.w_raw_cdata = w_raw_cdata
+            if self.w_free is not None:
                 res.w_free = self.w_free
-            res.w_raw_cdata = w_raw_cdata
+                res.register_finalizer(space)
             return res
 
     @unwrap_spec(w_init=WrappedDefault(None))
diff --git a/pypy/module/_cffi_backend/cdataobj.py 
b/pypy/module/_cffi_backend/cdataobj.py
--- a/pypy/module/_cffi_backend/cdataobj.py
+++ b/pypy/module/_cffi_backend/cdataobj.py
@@ -71,7 +71,7 @@
 
     def nonzero(self):
         with self as ptr:
-            nonzero = bool(ptr)
+            nonzero = self.ctype.nonzero(ptr)
         return self.space.wrap(nonzero)
 
     def int(self, space):
@@ -365,8 +365,16 @@
         return self.ctype.size
 
     def with_gc(self, w_destructor):
+        space = self.space
+        if space.is_none(w_destructor):
+            if isinstance(self, W_CDataGCP):
+                self.w_destructor = None
+                return space.w_None
+            raise oefmt(space.w_TypeError,
+                        "Can remove destructor only on a object "
+                        "previously returned by ffi.gc()")
         with self as ptr:
-            return W_CDataGCP(self.space, ptr, self.ctype, self, w_destructor)
+            return W_CDataGCP(space, ptr, self.ctype, self, w_destructor)
 
     def unpack(self, length):
         from pypy.module._cffi_backend.ctypeptr import W_CTypePtrOrArray
@@ -441,22 +449,11 @@
         lltype.free(self._ptr, flavor='raw')
 
 
-class W_CDataNewNonStdNoFree(W_CDataNewOwning):
-    """Subclass using a non-standard allocator, no free()"""
-    _attrs_ = ['w_raw_cdata']
+class W_CDataNewNonStd(W_CDataNewOwning):
+    """Subclass using a non-standard allocator"""
+    _attrs_ = ['w_raw_cdata', 'w_free']
 
-class W_CDataNewNonStdFree(W_CDataNewNonStdNoFree):
-    """Subclass using a non-standard allocator, with a free()"""
-    _attrs_ = ['w_free']
-
-    def __del__(self):
-        self.clear_all_weakrefs()
-        self.enqueue_for_destruction(self.space,
-                                     W_CDataNewNonStdFree.call_destructor,
-                                     'destructor of ')
-
-    def call_destructor(self):
-        assert isinstance(self, W_CDataNewNonStdFree)
+    def _finalize_(self):
         self.space.call_function(self.w_free, self.w_raw_cdata)
 
 
@@ -538,21 +535,19 @@
 class W_CDataGCP(W_CData):
     """For ffi.gc()."""
     _attrs_ = ['w_original_cdata', 'w_destructor']
-    _immutable_fields_ = ['w_original_cdata', 'w_destructor']
+    _immutable_fields_ = ['w_original_cdata']
 
     def __init__(self, space, cdata, ctype, w_original_cdata, w_destructor):
         W_CData.__init__(self, space, cdata, ctype)
         self.w_original_cdata = w_original_cdata
         self.w_destructor = w_destructor
+        self.register_finalizer(space)
 
-    def __del__(self):
-        self.clear_all_weakrefs()
-        self.enqueue_for_destruction(self.space, W_CDataGCP.call_destructor,
-                                     'destructor of ')
-
-    def call_destructor(self):
-        assert isinstance(self, W_CDataGCP)
-        self.space.call_function(self.w_destructor, self.w_original_cdata)
+    def _finalize_(self):
+        w_destructor = self.w_destructor
+        if w_destructor is not None:
+            self.w_destructor = None
+            self.space.call_function(w_destructor, self.w_original_cdata)
 
 
 W_CData.typedef = TypeDef(
diff --git a/pypy/module/_cffi_backend/cdlopen.py 
b/pypy/module/_cffi_backend/cdlopen.py
--- a/pypy/module/_cffi_backend/cdlopen.py
+++ b/pypy/module/_cffi_backend/cdlopen.py
@@ -25,10 +25,13 @@
                 raise wrap_dlopenerror(ffi.space, e, filename)
         W_LibObject.__init__(self, ffi, filename)
         self.libhandle = handle
+        self.register_finalizer(ffi.space)
 
-    def __del__(self):
-        if self.libhandle:
-            dlclose(self.libhandle)
+    def _finalize_(self):
+        h = self.libhandle
+        if h != rffi.cast(DLLHANDLE, 0):
+            self.libhandle = rffi.cast(DLLHANDLE, 0)
+            dlclose(h)
 
     def cdlopen_fetch(self, name):
         if not self.libhandle:
diff --git a/pypy/module/_cffi_backend/ctypeobj.py 
b/pypy/module/_cffi_backend/ctypeobj.py
--- a/pypy/module/_cffi_backend/ctypeobj.py
+++ b/pypy/module/_cffi_backend/ctypeobj.py
@@ -147,6 +147,9 @@
         raise oefmt(space.w_TypeError, "cannot add a cdata '%s' and a number",
                     self.name)
 
+    def nonzero(self, cdata):
+        return bool(cdata)
+
     def insert_name(self, extra, extra_position):
         name = '%s%s%s' % (self.name[:self.name_position],
                            extra,
diff --git a/pypy/module/_cffi_backend/ctypeprim.py 
b/pypy/module/_cffi_backend/ctypeprim.py
--- a/pypy/module/_cffi_backend/ctypeprim.py
+++ b/pypy/module/_cffi_backend/ctypeprim.py
@@ -93,6 +93,18 @@
             return self.space.newlist_int(result)
         return W_CType.unpack_ptr(self, w_ctypeptr, ptr, length)
 
+    def nonzero(self, cdata):
+        if self.size <= rffi.sizeof(lltype.Signed):
+            value = misc.read_raw_long_data(cdata, self.size)
+            return value != 0
+        else:
+            return self._nonzero_longlong(cdata)
+
+    def _nonzero_longlong(self, cdata):
+        # in its own function: LONGLONG may make the whole function jit-opaque
+        value = misc.read_raw_signed_data(cdata, self.size)
+        return bool(value)
+
 
 class W_CTypePrimitiveCharOrUniChar(W_CTypePrimitive):
     _attrs_ = []
@@ -435,6 +447,9 @@
             return self.space.newlist_float(result)
         return W_CType.unpack_ptr(self, w_ctypeptr, ptr, length)
 
+    def nonzero(self, cdata):
+        return misc.is_nonnull_float(cdata, self.size)
+
 
 class W_CTypePrimitiveLongDouble(W_CTypePrimitiveFloat):
     _attrs_ = []
@@ -501,3 +516,7 @@
                                              rffi.LONGDOUBLE, rffi.LONGDOUBLEP)
             return True
         return W_CTypePrimitive.pack_list_of_items(self, cdata, w_ob)
+
+    @jit.dont_look_inside
+    def nonzero(self, cdata):
+        return misc.is_nonnull_longdouble(cdata)
diff --git a/pypy/module/_cffi_backend/libraryobj.py 
b/pypy/module/_cffi_backend/libraryobj.py
--- a/pypy/module/_cffi_backend/libraryobj.py
+++ b/pypy/module/_cffi_backend/libraryobj.py
@@ -15,7 +15,6 @@
 
 class W_Library(W_Root):
     _immutable_ = True
-    handle = rffi.cast(DLLHANDLE, 0)
 
     def __init__(self, space, filename, flags):
         self.space = space
@@ -27,8 +26,9 @@
             except DLOpenError as e:
                 raise wrap_dlopenerror(space, e, filename)
         self.name = filename
+        self.register_finalizer(space)
 
-    def __del__(self):
+    def _finalize_(self):
         h = self.handle
         if h != rffi.cast(DLLHANDLE, 0):
             self.handle = rffi.cast(DLLHANDLE, 0)
diff --git a/pypy/module/_cffi_backend/misc.py 
b/pypy/module/_cffi_backend/misc.py
--- a/pypy/module/_cffi_backend/misc.py
+++ b/pypy/module/_cffi_backend/misc.py
@@ -256,7 +256,7 @@
 def is_nonnull_longdouble(cdata):
     return _is_nonnull_longdouble(read_raw_longdouble_data(cdata))
 def is_nonnull_float(cdata, size):
-    return read_raw_float_data(cdata, size) != 0.0
+    return read_raw_float_data(cdata, size) != 0.0    # note: True if a NaN
 
 def object_as_bool(space, w_ob):
     # convert and cast a Python object to a boolean.  Accept an integer
diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py 
b/pypy/module/_cffi_backend/test/_backend_test_c.py
--- a/pypy/module/_cffi_backend/test/_backend_test_c.py
+++ b/pypy/module/_cffi_backend/test/_backend_test_c.py
@@ -141,9 +141,13 @@
     INF = 1E200 * 1E200
     for name in ["float", "double"]:
         p = new_primitive_type(name)
-        assert bool(cast(p, 0))
+        assert bool(cast(p, 0)) is False      # since 1.7
+        assert bool(cast(p, -0.0)) is False   # since 1.7
+        assert bool(cast(p, 1e-42)) is True
+        assert bool(cast(p, -1e-42)) is True
         assert bool(cast(p, INF))
         assert bool(cast(p, -INF))
+        assert bool(cast(p, float("nan")))
         assert int(cast(p, -150)) == -150
         assert int(cast(p, 61.91)) == 61
         assert long(cast(p, 61.91)) == 61
@@ -202,7 +206,8 @@
 
 def test_character_type():
     p = new_primitive_type("char")
-    assert bool(cast(p, '\x00'))
+    assert bool(cast(p, 'A')) is True
+    assert bool(cast(p, '\x00')) is False    # since 1.7
     assert cast(p, '\x00') != cast(p, -17*256)
     assert int(cast(p, 'A')) == 65
     assert long(cast(p, 'A')) == 65
@@ -2558,7 +2563,8 @@
     BBoolP = new_pointer_type(BBool)
     assert int(cast(BBool, False)) == 0
     assert int(cast(BBool, True)) == 1
-    assert bool(cast(BBool, False)) is True    # warning!
+    assert bool(cast(BBool, False)) is False    # since 1.7
+    assert bool(cast(BBool, True)) is True
     assert int(cast(BBool, 3)) == 1
     assert int(cast(BBool, long(3))) == 1
     assert int(cast(BBool, long(10)**4000)) == 1
diff --git a/pypy/module/_cffi_backend/test/test_ffi_obj.py 
b/pypy/module/_cffi_backend/test/test_ffi_obj.py
--- a/pypy/module/_cffi_backend/test/test_ffi_obj.py
+++ b/pypy/module/_cffi_backend/test/test_ffi_obj.py
@@ -331,6 +331,25 @@
             gc.collect()
         assert seen == [1]
 
+    def test_ffi_gc_disable(self):
+        import _cffi_backend as _cffi1_backend
+        ffi = _cffi1_backend.FFI()
+        p = ffi.new("int *", 123)
+        raises(TypeError, ffi.gc, p, None)
+        seen = []
+        q1 = ffi.gc(p, lambda p: seen.append(1))
+        q2 = ffi.gc(q1, lambda p: seen.append(2))
+        import gc; gc.collect()
+        assert seen == []
+        assert ffi.gc(q1, None) is None
+        del q1, q2
+        for i in range(5):
+            if seen:
+                break
+            import gc
+            gc.collect()
+        assert seen == [2]
+
     def test_ffi_new_allocator_1(self):
         import _cffi_backend as _cffi1_backend
         ffi = _cffi1_backend.FFI()
diff --git a/pypy/module/_file/interp_file.py b/pypy/module/_file/interp_file.py
--- a/pypy/module/_file/interp_file.py
+++ b/pypy/module/_file/interp_file.py
@@ -43,22 +43,18 @@
 
     def __init__(self, space):
         self.space = space
+        self.register_finalizer(space)
 
-    def __del__(self):
+    def _finalize_(self):
         # assume that the file and stream objects are only visible in the
-        # thread that runs __del__, so no race condition should be possible
-        self.clear_all_weakrefs()
+        # thread that runs _finalize_, so no race condition should be
+        # possible and no locking is done here.
         if self.stream is not None:
-            self.enqueue_for_destruction(self.space, W_File.destructor,
-                                         'close() method of ')
-
-    def destructor(self):
-        assert isinstance(self, W_File)
-        try:
-            self.direct_close()
-        except StreamErrors as e:
-            operr = wrap_streamerror(self.space, e, self.w_name)
-            raise operr
+            try:
+                self.direct_close()
+            except StreamErrors as e:
+                operr = wrap_streamerror(self.space, e, self.w_name)
+                raise operr
 
     def fdopenstream(self, stream, fd, mode, w_name=None):
         self.fd = fd
diff --git a/pypy/module/_hashlib/interp_hashlib.py 
b/pypy/module/_hashlib/interp_hashlib.py
--- a/pypy/module/_hashlib/interp_hashlib.py
+++ b/pypy/module/_hashlib/interp_hashlib.py
@@ -76,11 +76,14 @@
         except:
             lltype.free(ctx, flavor='raw')
             raise
+        self.register_finalizer(space)
 
-    def __del__(self):
-        if self.ctx:
-            ropenssl.EVP_MD_CTX_cleanup(self.ctx)
-            lltype.free(self.ctx, flavor='raw')
+    def _finalize_(self):
+        ctx = self.ctx
+        if ctx:
+            self.ctx = lltype.nullptr(ropenssl.EVP_MD_CTX.TO)
+            ropenssl.EVP_MD_CTX_cleanup(ctx)
+            lltype.free(ctx, flavor='raw')
 
     def digest_type_by_name(self, space):
         digest_type = ropenssl.EVP_get_digestbyname(self.name)
diff --git a/pypy/module/_io/interp_bufferedio.py 
b/pypy/module/_io/interp_bufferedio.py
--- a/pypy/module/_io/interp_bufferedio.py
+++ b/pypy/module/_io/interp_bufferedio.py
@@ -952,9 +952,15 @@
             self.w_writer = None
             raise
 
-    def __del__(self):
-        self.clear_all_weakrefs()
+    def _finalize_(self):
         # Don't call the base __del__: do not close the files!
+        # Usually the _finalize_() method is not called at all because
+        # we set 'needs_to_finalize = False' in this class, so
+        # W_IOBase.__init__() won't call register_finalizer().
+        # However, this method might still be called: if the user
+        # makes an app-level subclass and adds a custom __del__.
+        pass
+    needs_to_finalize = False
 
     # forward to reader
     for method in ['read', 'peek', 'read1', 'readinto', 'readable']:
diff --git a/pypy/module/_io/interp_iobase.py b/pypy/module/_io/interp_iobase.py
--- a/pypy/module/_io/interp_iobase.py
+++ b/pypy/module/_io/interp_iobase.py
@@ -59,6 +59,8 @@
         self.__IOBase_closed = False
         if add_to_autoflusher:
             get_autoflusher(space).add(self)
+        if self.needs_to_finalize:
+            self.register_finalizer(space)
 
     def getdict(self, space):
         return self.w_dict
@@ -71,13 +73,7 @@
             return True
         return False
 
-    def __del__(self):
-        self.clear_all_weakrefs()
-        self.enqueue_for_destruction(self.space, W_IOBase.destructor,
-                                     'internal __del__ of ')
-
-    def destructor(self):
-        assert isinstance(self, W_IOBase)
+    def _finalize_(self):
         space = self.space
         w_closed = space.findattr(self, space.wrap('closed'))
         try:
@@ -90,6 +86,7 @@
             # equally as bad, and potentially more frequent (because of
             # shutdown issues).
             pass
+    needs_to_finalize = True
 
     def _CLOSED(self):
         # Use this macro whenever you want to check the internal `closed`
diff --git a/pypy/module/_multibytecodec/app_multibytecodec.py 
b/pypy/module/_multibytecodec/app_multibytecodec.py
--- a/pypy/module/_multibytecodec/app_multibytecodec.py
+++ b/pypy/module/_multibytecodec/app_multibytecodec.py
@@ -44,8 +44,10 @@
                 self, data))
 
     def reset(self):
-        self.stream.write(MultibyteIncrementalEncoder.encode(
-                self, '', final=True))
+        data = MultibyteIncrementalEncoder.encode(
+            self, '', final=True)
+        if len(data) > 0:
+            self.stream.write(data)
         MultibyteIncrementalEncoder.reset(self)
 
     def writelines(self, lines):
diff --git a/pypy/module/_multibytecodec/interp_incremental.py 
b/pypy/module/_multibytecodec/interp_incremental.py
--- a/pypy/module/_multibytecodec/interp_incremental.py
+++ b/pypy/module/_multibytecodec/interp_incremental.py
@@ -20,8 +20,9 @@
         self.codec = codec.codec
         self.name = codec.name
         self._initialize()
+        self.register_finalizer(space)
 
-    def __del__(self):
+    def _finalize_(self):
         self._free()
 
     def reset_w(self):
diff --git a/pypy/module/_multibytecodec/test/test_app_stream.py 
b/pypy/module/_multibytecodec/test/test_app_stream.py
--- a/pypy/module/_multibytecodec/test/test_app_stream.py
+++ b/pypy/module/_multibytecodec/test/test_app_stream.py
@@ -90,3 +90,15 @@
         w.write(u'\u304b')
         w.write(u'\u309a')
         assert w.stream.output == ['\x83m', '', '\x82\xf5']
+
+    def test_writer_seek_no_empty_write(self):
+        # issue #2293: codecs.py will sometimes issue a reset()
+        # on a StreamWriter attached to a file that is not opened
+        # for writing at all.  We must not emit a "write('')"!
+        class FakeFile:
+            def write(self, data):
+                raise IOError("can't write!")
+        #
+        w = self.ShiftJisx0213StreamWriter(FakeFile())
+        w.reset()
+        # assert did not crash
diff --git a/pypy/module/_multiprocessing/interp_connection.py 
b/pypy/module/_multiprocessing/interp_connection.py
--- a/pypy/module/_multiprocessing/interp_connection.py
+++ b/pypy/module/_multiprocessing/interp_connection.py
@@ -40,14 +40,17 @@
     BUFFER_SIZE = 1024
     buffer = lltype.nullptr(rffi.CCHARP.TO)
 
-    def __init__(self, flags):
+    def __init__(self, space, flags):
         self.flags = flags
         self.buffer = lltype.malloc(rffi.CCHARP.TO, self.BUFFER_SIZE,
                                     flavor='raw')
+        self.register_finalizer(space)
 
-    def __del__(self):
-        if self.buffer:
-            lltype.free(self.buffer, flavor='raw')
+    def _finalize_(self):
+        buf = self.buffer
+        if buf:
+            self.buffer = lltype.nullptr(rffi.CCHARP.TO)
+            lltype.free(buf, flavor='raw')
         try:
             self.do_close()
         except OSError:
@@ -242,7 +245,7 @@
     def __init__(self, space, fd, flags):
         if fd == self.INVALID_HANDLE_VALUE or fd < 0:
             raise oefmt(space.w_IOError, "invalid handle %d", fd)
-        W_BaseConnection.__init__(self, flags)
+        W_BaseConnection.__init__(self, space, flags)
         self.fd = fd
 
     @unwrap_spec(fd=int, readable=bool, writable=bool)
@@ -363,8 +366,8 @@
     if sys.platform == 'win32':
         from rpython.rlib.rwin32 import INVALID_HANDLE_VALUE
 
-    def __init__(self, handle, flags):
-        W_BaseConnection.__init__(self, flags)
+    def __init__(self, space, handle, flags):
+        W_BaseConnection.__init__(self, space, flags)
         self.handle = handle
 
     @unwrap_spec(readable=bool, writable=bool)
@@ -375,7 +378,7 @@
         flags = (readable and READABLE) | (writable and WRITABLE)
 
         self = space.allocate_instance(W_PipeConnection, w_subtype)
-        W_PipeConnection.__init__(self, handle, flags)
+        W_PipeConnection.__init__(self, space, handle, flags)
         return space.wrap(self)
 
     def descr_repr(self, space):
diff --git a/pypy/module/_multiprocessing/interp_semaphore.py 
b/pypy/module/_multiprocessing/interp_semaphore.py
--- a/pypy/module/_multiprocessing/interp_semaphore.py
+++ b/pypy/module/_multiprocessing/interp_semaphore.py
@@ -430,11 +430,12 @@
 
 
 class W_SemLock(W_Root):
-    def __init__(self, handle, kind, maxvalue):
+    def __init__(self, space, handle, kind, maxvalue):
         self.handle = handle
         self.kind = kind
         self.count = 0
         self.maxvalue = maxvalue
+        self.register_finalizer(space)
 
     def kind_get(self, space):
         return space.newint(self.kind)
@@ -508,7 +509,7 @@
     @unwrap_spec(kind=int, maxvalue=int)
     def rebuild(space, w_cls, w_handle, kind, maxvalue):
         self = space.allocate_instance(W_SemLock, w_cls)
-        self.__init__(handle_w(space, w_handle), kind, maxvalue)
+        self.__init__(space, handle_w(space, w_handle), kind, maxvalue)
         return space.wrap(self)
 
     def enter(self, space):
@@ -517,7 +518,7 @@
     def exit(self, space, __args__):
         self.release(space)
 
-    def __del__(self):
+    def _finalize_(self):
         delete_semaphore(self.handle)
 
 @unwrap_spec(kind=int, value=int, maxvalue=int)
@@ -534,7 +535,7 @@
         raise wrap_oserror(space, e)
 
     self = space.allocate_instance(W_SemLock, w_subtype)
-    self.__init__(handle, kind, maxvalue)
+    self.__init__(space, handle, kind, maxvalue)
 
     return space.wrap(self)
 
diff --git a/pypy/module/_pickle_support/maker.py 
b/pypy/module/_pickle_support/maker.py
--- a/pypy/module/_pickle_support/maker.py
+++ b/pypy/module/_pickle_support/maker.py
@@ -4,7 +4,7 @@
 from pypy.interpreter.function import Function, Method
 from pypy.interpreter.module import Module
 from pypy.interpreter.pytraceback import PyTraceback
-from pypy.interpreter.generator import GeneratorIteratorWithDel
+from pypy.interpreter.generator import GeneratorIterator
 from rpython.rlib.objectmodel import instantiate
 from pypy.interpreter.gateway import unwrap_spec
 from pypy.objspace.std.iterobject import W_SeqIterObject, 
W_ReverseSeqIterObject
@@ -59,7 +59,7 @@
     return space.wrap(tb)
 
 def generator_new(space):
-    new_generator = instantiate(GeneratorIteratorWithDel)
+    new_generator = instantiate(GeneratorIterator)
     return space.wrap(new_generator)
 
 @unwrap_spec(current=int, remaining=int, step=int)
diff --git a/pypy/module/_ssl/interp_ssl.py b/pypy/module/_ssl/interp_ssl.py
--- a/pypy/module/_ssl/interp_ssl.py
+++ b/pypy/module/_ssl/interp_ssl.py
@@ -278,6 +278,8 @@
         sock_fd = space.int_w(space.call_method(w_sock, "fileno"))
         self.ssl = libssl_SSL_new(w_ctx.ctx)  # new ssl struct
 
+        self.register_finalizer(space)
+
         index = compute_unique_id(self)
         libssl_SSL_set_app_data(self.ssl, rffi.cast(rffi.VOIDP, index))
         SOCKET_STORAGE.set(index, self)
@@ -317,16 +319,15 @@
             self.ssl_sock_weakref_w = None
         return self
 
-    def __del__(self):
-        self.enqueue_for_destruction(self.space, _SSLSocket.destructor,
-                                     '__del__() method of ')
-
-    def destructor(self):
-        assert isinstance(self, _SSLSocket)
-        if self.peer_cert:
-            libssl_X509_free(self.peer_cert)
-        if self.ssl:
-            libssl_SSL_free(self.ssl)
+    def _finalize_(self):
+        peer_cert = self.peer_cert
+        if peer_cert:
+            self.peer_cert = lltype.nullptr(X509.TO)
+            libssl_X509_free(peer_cert)
+        ssl = self.ssl
+        if ssl:
+            self.ssl = lltype.nullptr(SSL.TO)
+            libssl_SSL_free(ssl)
 
     @unwrap_spec(data='bufferstr')
     def write(self, space, data):
@@ -1285,6 +1286,7 @@
         self = space.allocate_instance(_SSLContext, w_subtype)
         self.ctx = ctx
         self.check_hostname = False
+        self.register_finalizer(space)
         options = SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS
         if protocol != PY_SSL_VERSION_SSL2:
             options |= SSL_OP_NO_SSLv2
@@ -1308,8 +1310,11 @@
 
         return self
 
-    def __del__(self):
-        libssl_SSL_CTX_free(self.ctx)
+    def _finalize_(self):
+        ctx = self.ctx
+        if ctx:
+            self.ctx = lltype.nullptr(SSL_CTX.TO)
+            libssl_SSL_CTX_free(ctx)
 
     @unwrap_spec(server_side=int)
     def descr_wrap_socket(self, space, w_sock, server_side, 
w_server_hostname=None, w_ssl_sock=None):
diff --git a/pypy/module/_weakref/interp__weakref.py 
b/pypy/module/_weakref/interp__weakref.py
--- a/pypy/module/_weakref/interp__weakref.py
+++ b/pypy/module/_weakref/interp__weakref.py
@@ -3,7 +3,8 @@
 from pypy.interpreter.error import oefmt
 from pypy.interpreter.gateway import interp2app, ObjSpace
 from pypy.interpreter.typedef import TypeDef
-from rpython.rlib import jit
+from pypy.interpreter.executioncontext import AsyncAction, report_error
+from rpython.rlib import jit, rgc
 from rpython.rlib.rshrinklist import AbstractShrinkList
 from rpython.rlib.objectmodel import specialize
 from rpython.rlib.rweakref import dead_ref
@@ -16,9 +17,12 @@
 
 
 class WeakrefLifeline(W_Root):
+    typedef = None
+
     cached_weakref  = None
     cached_proxy    = None
     other_refs_weak = None
+    has_callbacks   = False
 
     def __init__(self, space):
         self.space = space
@@ -99,31 +103,10 @@
                     return w_ref
         return space.w_None
 
-
-class WeakrefLifelineWithCallbacks(WeakrefLifeline):
-
-    def __init__(self, space, oldlifeline=None):
-        self.space = space
-        if oldlifeline is not None:
-            self.cached_weakref = oldlifeline.cached_weakref
-            self.cached_proxy = oldlifeline.cached_proxy
-            self.other_refs_weak = oldlifeline.other_refs_weak
-
-    def __del__(self):
-        """This runs when the interp-level object goes away, and allows
-        its lifeline to go away.  The purpose of this is to activate the
-        callbacks even if there is no __del__ method on the interp-level
-        W_Root subclass implementing the object.
-        """
-        if self.other_refs_weak is None:
-            return
-        items = self.other_refs_weak.items()
-        for i in range(len(items)-1, -1, -1):
-            w_ref = items[i]()
-            if w_ref is not None and w_ref.w_callable is not None:
-                w_ref.enqueue_for_destruction(self.space,
-                                              W_WeakrefBase.activate_callback,
-                                              'weakref callback of ')
+    def enable_callbacks(self):
+        if not self.has_callbacks:
+            self.space.finalizer_queue.register_finalizer(self)
+            self.has_callbacks = True
 
     @jit.dont_look_inside
     def make_weakref_with_callback(self, w_subtype, w_obj, w_callable):
@@ -131,6 +114,7 @@
         w_ref = space.allocate_instance(W_Weakref, w_subtype)
         W_Weakref.__init__(w_ref, space, w_obj, w_callable)
         self.append_wref_to(w_ref)
+        self.enable_callbacks()
         return w_ref
 
     @jit.dont_look_inside
@@ -141,8 +125,33 @@
         else:
             w_proxy = W_Proxy(space, w_obj, w_callable)
         self.append_wref_to(w_proxy)
+        self.enable_callbacks()
         return w_proxy
 
+    def _finalize_(self):
+        """This is called at the end, if enable_callbacks() was invoked.
+        It activates the callbacks.
+        """
+        if self.other_refs_weak is None:
+            return
+        #
+        # If this is set, then we're in the 'gc.disable()' mode.  In that
+        # case, don't invoke the callbacks now.
+        if self.space.user_del_action.gc_disabled(self):
+            return
+        #
+        items = self.other_refs_weak.items()
+        self.other_refs_weak = None
+        for i in range(len(items)-1, -1, -1):
+            w_ref = items[i]()
+            if w_ref is not None and w_ref.w_callable is not None:
+                try:
+                    w_ref.activate_callback()
+                except Exception as e:
+                    report_error(self.space, e,
+                                 "weakref callback ", w_ref.w_callable)
+
+
 # ____________________________________________________________
 
 
@@ -163,7 +172,6 @@
         self.w_obj_weak = dead_ref
 
     def activate_callback(w_self):
-        assert isinstance(w_self, W_WeakrefBase)
         w_self.space.call_function(w_self.w_callable, w_self)
 
     def descr__repr__(self, space):
@@ -227,32 +235,16 @@
         w_obj.setweakref(space, lifeline)
     return lifeline
 
-def getlifelinewithcallbacks(space, w_obj):
-    lifeline = w_obj.getweakref()
-    if not isinstance(lifeline, WeakrefLifelineWithCallbacks):  # or None
-        oldlifeline = lifeline
-        lifeline = WeakrefLifelineWithCallbacks(space, oldlifeline)
-        w_obj.setweakref(space, lifeline)
-    return lifeline
-
-
-def get_or_make_weakref(space, w_subtype, w_obj):
-    return getlifeline(space, w_obj).get_or_make_weakref(w_subtype, w_obj)
-
-
-def make_weakref_with_callback(space, w_subtype, w_obj, w_callable):
-    lifeline = getlifelinewithcallbacks(space, w_obj)
-    return lifeline.make_weakref_with_callback(w_subtype, w_obj, w_callable)
-
 
 def descr__new__weakref(space, w_subtype, w_obj, w_callable=None,
                         __args__=None):
     if __args__.arguments_w:
         raise oefmt(space.w_TypeError, "__new__ expected at most 2 arguments")
+    lifeline = getlifeline(space, w_obj)
     if space.is_none(w_callable):
-        return get_or_make_weakref(space, w_subtype, w_obj)
+        return lifeline.get_or_make_weakref(w_subtype, w_obj)
     else:
-        return make_weakref_with_callback(space, w_subtype, w_obj, w_callable)
+        return lifeline.make_weakref_with_callback(w_subtype, w_obj, 
w_callable)
 
 W_Weakref.typedef = TypeDef("weakref",
     __doc__ = """A weak reference to an object 'obj'.  A 'callback' can be 
given,
@@ -308,23 +300,15 @@
         return space.call_args(w_obj, __args__)
 
 
-def get_or_make_proxy(space, w_obj):
-    return getlifeline(space, w_obj).get_or_make_proxy(w_obj)
-
-
-def make_proxy_with_callback(space, w_obj, w_callable):
-    lifeline = getlifelinewithcallbacks(space, w_obj)
-    return lifeline.make_proxy_with_callback(w_obj, w_callable)
-
-
 def proxy(space, w_obj, w_callable=None):
     """Create a proxy object that weakly references 'obj'.
 'callback', if given, is called with the proxy as an argument when 'obj'
 is about to be finalized."""
+    lifeline = getlifeline(space, w_obj)
     if space.is_none(w_callable):
-        return get_or_make_proxy(space, w_obj)
+        return lifeline.get_or_make_proxy(w_obj)
     else:
-        return make_proxy_with_callback(space, w_obj, w_callable)
+        return lifeline.make_proxy_with_callback(w_obj, w_callable)
 
 def descr__new__proxy(space, w_subtype, w_obj, w_callable=None):
     raise oefmt(space.w_TypeError, "cannot create 'weakproxy' instances")
@@ -345,7 +329,7 @@
 
 proxy_typedef_dict = {}
 callable_proxy_typedef_dict = {}
-special_ops = {'repr': True, 'userdel': True, 'hash': True}
+special_ops = {'repr': True, 'hash': True}
 
 for opname, _, arity, special_methods in ObjSpace.MethodTable:
     if opname in special_ops or not special_methods:
diff --git a/pypy/module/_weakref/test/test_weakref.py 
b/pypy/module/_weakref/test/test_weakref.py
--- a/pypy/module/_weakref/test/test_weakref.py
+++ b/pypy/module/_weakref/test/test_weakref.py
@@ -1,6 +1,9 @@
 class AppTestWeakref(object):
     spaceconfig = dict(usemodules=('_weakref',))
-                    
+
+    def setup_class(cls):
+        cls.w_runappdirect = cls.space.wrap(cls.runappdirect)
+
     def test_simple(self):
         import _weakref, gc
         class A(object):
@@ -287,6 +290,9 @@
             assert a1 is None
 
     def test_del_and_callback_and_id(self):
+        if not self.runappdirect:
+            skip("the id() doesn't work correctly in __del__ and "
+                 "callbacks before translation")
         import gc, weakref
         seen_del = []
         class A(object):
diff --git a/pypy/module/bz2/interp_bz2.py b/pypy/module/bz2/interp_bz2.py
--- a/pypy/module/bz2/interp_bz2.py
+++ b/pypy/module/bz2/interp_bz2.py
@@ -518,8 +518,14 @@
     def __init__(self, space, compresslevel):
         self.space = space
         self.bzs = lltype.malloc(bz_stream.TO, flavor='raw', zero=True)
-        self.running = False
-        self._init_bz2comp(compresslevel)
+        try:
+            self.running = False
+            self._init_bz2comp(compresslevel)
+        except:
+            lltype.free(self.bzs, flavor='raw')
+            self.bzs = lltype.nullptr(bz_stream.TO)
+            raise
+        self.register_finalizer(space)
 
     def _init_bz2comp(self, compresslevel):
         if compresslevel < 1 or compresslevel > 9:
@@ -532,9 +538,12 @@
 
         self.running = True
 
-    def __del__(self):
-        BZ2_bzCompressEnd(self.bzs)
-        lltype.free(self.bzs, flavor='raw')
+    def _finalize_(self):
+        bzs = self.bzs
+        if bzs:
+            self.bzs = lltype.nullptr(bz_stream.TO)
+            BZ2_bzCompressEnd(bzs)
+            lltype.free(bzs, flavor='raw')
 
     @unwrap_spec(data='bufferstr')
     def compress(self, data):
@@ -621,10 +630,16 @@
         self.space = space
 
         self.bzs = lltype.malloc(bz_stream.TO, flavor='raw', zero=True)
-        self.running = False
-        self.unused_data = ""
+        try:
+            self.running = False
+            self.unused_data = ""
 
-        self._init_bz2decomp()
+            self._init_bz2decomp()
+        except:
+            lltype.free(self.bzs, flavor='raw')
+            self.bzs = lltype.nullptr(bz_stream.TO)
+            raise
+        self.register_finalizer(space)
 
     def _init_bz2decomp(self):
         bzerror = BZ2_bzDecompressInit(self.bzs, 0, 0)
@@ -633,9 +648,12 @@
 
         self.running = True
 
-    def __del__(self):
-        BZ2_bzDecompressEnd(self.bzs)
-        lltype.free(self.bzs, flavor='raw')
+    def _finalize_(self):
+        bzs = self.bzs
+        if bzs:
+            self.bzs = lltype.nullptr(bz_stream.TO)
+            BZ2_bzDecompressEnd(bzs)
+            lltype.free(bzs, flavor='raw')
 
     @unwrap_spec(data='bufferstr')
     def decompress(self, data):
diff --git a/pypy/module/bz2/test/support.py b/pypy/module/bz2/test/support.py
--- a/pypy/module/bz2/test/support.py
+++ b/pypy/module/bz2/test/support.py
@@ -10,5 +10,6 @@
         #
         while tries and ll2ctypes.ALLOCATED:
             gc.collect() # to make sure we disallocate buffers
+            self.space.getexecutioncontext()._run_finalizers_now()
             tries -= 1
         assert not ll2ctypes.ALLOCATED
diff --git a/pypy/module/cppyy/interp_cppyy.py 
b/pypy/module/cppyy/interp_cppyy.py
--- a/pypy/module/cppyy/interp_cppyy.py
+++ b/pypy/module/cppyy/interp_cppyy.py
@@ -1020,9 +1020,12 @@
 
 
 class W_CPPInstance(W_Root):
-    _attrs_ = ['space', 'cppclass', '_rawobject', 'isref', 'python_owns']
+    _attrs_ = ['space', 'cppclass', '_rawobject', 'isref', 'python_owns',
+               'finalizer_registered']
     _immutable_fields_ = ["cppclass", "isref"]
 
+    finalizer_registered = False
+
     def __init__(self, space, cppclass, rawobject, isref, python_owns):
         self.space = space
         self.cppclass = cppclass
@@ -1032,6 +1035,12 @@
         assert not isref or not python_owns
         self.isref = isref
         self.python_owns = python_owns
+        self._opt_register_finalizer()
+
+    def _opt_register_finalizer(self):
+        if self.python_owns and not self.finalizer_registered:
+            self.register_finalizer(self.space)
+            self.finalizer_registered = True
 
     def _nullcheck(self):
         if not self._rawobject or (self.isref and not self.get_rawobject()):
@@ -1045,6 +1054,7 @@
     @unwrap_spec(value=bool)
     def fset_python_owns(self, space, value):
         self.python_owns = space.is_true(value)
+        self._opt_register_finalizer()
 
     def get_cppthis(self, calling_scope):
         return self.cppclass.get_cppthis(self, calling_scope)
@@ -1143,16 +1153,14 @@
                                (self.cppclass.name, rffi.cast(rffi.ULONG, 
self.get_rawobject())))
 
     def destruct(self):
-        assert isinstance(self, W_CPPInstance)
         if self._rawobject and not self.isref:
             memory_regulator.unregister(self)
             capi.c_destruct(self.space, self.cppclass, self._rawobject)
             self._rawobject = capi.C_NULL_OBJECT
 
-    def __del__(self):
+    def _finalize_(self):
         if self.python_owns:
-            self.enqueue_for_destruction(self.space, W_CPPInstance.destruct,
-                                         '__del__() method of ')
+            self.destruct()
 
 W_CPPInstance.typedef = TypeDef(
     'CPPInstance',
diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py
--- a/pypy/module/cpyext/slotdefs.py
+++ b/pypy/module/cpyext/slotdefs.py
@@ -374,7 +374,75 @@
     header = pypy_decl
     if mangle_name('', typedef.name) is None:
         header = None
-    if name == 'tp_setattro':
+    handled = False
+    # unary functions
+    for tp_name, attr in [('tp_as_number.c_nb_int', '__int__'),
+                          ('tp_as_number.c_nb_long', '__long__'),
+                          ('tp_as_number.c_nb_float', '__float__'),
+                          ('tp_as_number.c_nb_negative', '__neg__'),
+                          ('tp_as_number.c_nb_positive', '__pos__'),
+                          ('tp_as_number.c_nb_absolute', '__abs__'),
+                          ('tp_as_number.c_nb_invert', '__invert__'),
+                          ('tp_as_number.c_nb_index', '__index__'),
+                          ('tp_str', '__str__'),
+                          ('tp_repr', '__repr__'),
+                          ('tp_iter', '__iter__'),
+                          ]:
+        if name == tp_name:
+            slot_fn = w_type.getdictvalue(space, attr)
+            if slot_fn is None:
+                return
+
+            @cpython_api([PyObject], PyObject, header=header)
+            @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), 
typedef.name))
+            def slot_func(space, w_self):
+                return space.call_function(slot_fn, w_self)
+            api_func = slot_func.api_func
+            handled = True
+
+    # binary functions
+    for tp_name, attr in [('tp_as_number.c_nb_add', '__add__'),
+                          ('tp_as_number.c_nb_subtract', '__subtract__'),
+                          ('tp_as_number.c_nb_multiply', '__mul__'),
+                          ('tp_as_number.c_nb_divide', '__div__'),
+                          ('tp_as_number.c_nb_remainder', '__mod__'),
+                          ('tp_as_number.c_nb_divmod', '__divmod__'),
+                          ('tp_as_number.c_nb_lshift', '__lshift__'),
+                          ('tp_as_number.c_nb_rshift', '__rshift__'),
+                          ('tp_as_number.c_nb_and', '__and__'),
+                          ('tp_as_number.c_nb_xor', '__xor__'),
+                          ('tp_as_number.c_nb_or', '__or__'),
+                          ]:
+        if name == tp_name:
+            slot_fn = w_type.getdictvalue(space, attr)
+            if slot_fn is None:
+                return
+
+            @cpython_api([PyObject, PyObject], PyObject, header=header)
+            @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), 
typedef.name))
+            def slot_func(space, w_self, w_arg):
+                return space.call_function(slot_fn, w_self, w_arg)
+            api_func = slot_func.api_func
+            handled = True
+
+    # ternary functions
+    for tp_name, attr in [('tp_as_number.c_nb_power', ''),
+                          ]:
+        if name == tp_name:
+            slot_fn = w_type.getdictvalue(space, attr)
+            if slot_fn is None:
+                return
+
+            @cpython_api([PyObject, PyObject, PyObject], PyObject, 
header=header)
+            @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), 
typedef.name))
+            def slot_func(space, w_self, w_arg1, w_arg2):
+                return space.call_function(slot_fn, w_self, w_arg1, w_arg2)
+            api_func = slot_func.api_func
+            handled = True
+
+    if handled:
+        pass
+    elif name == 'tp_setattro':
         setattr_fn = w_type.getdictvalue(space, '__setattr__')
         delattr_fn = w_type.getdictvalue(space, '__delattr__')
         if setattr_fn is None:
@@ -401,28 +469,6 @@
             return space.call_function(getattr_fn, w_self, w_name)
         api_func = slot_tp_getattro.api_func
 
-    elif name == 'tp_as_number.c_nb_int':
-        int_fn = w_type.getdictvalue(space, '__int__')
-        if int_fn is None:
-            return
-
-        @cpython_api([PyObject], PyObject, header=header)
-        @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), typedef.name))
-        def slot_nb_int(space, w_self):
-            return space.call_function(int_fn, w_self)
-        api_func = slot_nb_int.api_func
-
-    elif name == 'tp_as_number.c_nb_float':
-        float_fn = w_type.getdictvalue(space, '__float__')
-        if float_fn is None:
-            return
-
-        @cpython_api([PyObject], PyObject, header=header)
-        @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), typedef.name))
-        def slot_nb_float(space, w_self):
-            return space.call_function(float_fn, w_self)
-        api_func = slot_nb_float.api_func
-
     elif name == 'tp_call':
         call_fn = w_type.getdictvalue(space, '__call__')
         if call_fn is None:
@@ -436,28 +482,6 @@
             return space.call_args(call_fn, args)
         api_func = slot_tp_call.api_func
 
-    elif name == 'tp_str':
-        str_fn = w_type.getdictvalue(space, '__str__')
-        if str_fn is None:
-            return
-
-        @cpython_api([PyObject], PyObject, header=header)
-        @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), typedef.name))
-        def slot_tp_str(space, w_self):
-            return space.call_function(str_fn, w_self)
-        api_func = slot_tp_str.api_func
-
-    elif name == 'tp_iter':
-        iter_fn = w_type.getdictvalue(space, '__iter__')
-        if iter_fn is None:
-            return
-
-        @cpython_api([PyObject], PyObject, header=header)
-        @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), typedef.name))
-        def slot_tp_iter(space, w_self):
-            return space.call_function(iter_fn, w_self)
-        api_func = slot_tp_iter.api_func
-
     elif name == 'tp_iternext':
         iternext_fn = w_type.getdictvalue(space, 'next')
         if iternext_fn is None:
@@ -501,6 +525,7 @@
             return space.call_args(space.get(new_fn, w_self), args)
         api_func = slot_tp_new.api_func
     else:
+        # missing: tp_as_number.nb_nonzero, tp_as_number.nb_coerce
         return
 
     return lambda: llhelper(api_func.functype, api_func.get_wrapper(space))
diff --git a/pypy/module/cpyext/src/abstract.c 
b/pypy/module/cpyext/src/abstract.c
--- a/pypy/module/cpyext/src/abstract.c
+++ b/pypy/module/cpyext/src/abstract.c
@@ -326,3 +326,9 @@
     return tmp;
 }
 
+/* for binary compatibility with 5.1 */
+PyAPI_FUNC(void) PyPyObject_Del(PyObject *);
+void PyPyObject_Del(PyObject *op)
+{
+    PyObject_FREE(op);
+}
diff --git a/pypy/module/cpyext/test/test_api.py 
b/pypy/module/cpyext/test/test_api.py
--- a/pypy/module/cpyext/test/test_api.py
+++ b/pypy/module/cpyext/test/test_api.py
@@ -1,4 +1,4 @@
-import py
+import py, pytest
 from rpython.rtyper.lltypesystem import rffi, lltype
 from pypy.interpreter.baseobjspace import W_Root
 from pypy.module.cpyext.state import State
@@ -100,7 +100,8 @@
         PyPy_TypedefTest2(space, ppos)
         lltype.free(ppos, flavor='raw')
 
-
+@pytest.mark.skipif(os.environ.get('USER')=='root', 
+                    reason='root can write to all files')
 def test_copy_header_files(tmpdir):
     api.copy_header_files(tmpdir, True)
     def check(name):
diff --git a/pypy/module/cpyext/test/test_bytesobject.py 
b/pypy/module/cpyext/test/test_bytesobject.py
--- a/pypy/module/cpyext/test/test_bytesobject.py
+++ b/pypy/module/cpyext/test/test_bytesobject.py
@@ -40,7 +40,7 @@
                  #endif
                  if(s->ob_type->tp_basicsize != expected_size)
                  {
-                     printf("tp_basicsize==%ld\\n", s->ob_type->tp_basicsize); 
+                     printf("tp_basicsize==%zd\\n", s->ob_type->tp_basicsize); 
                      result = 0;
                  }
                  Py_DECREF(s);
diff --git a/pypy/module/cpyext/test/test_typeobject.py 
b/pypy/module/cpyext/test/test_typeobject.py
--- a/pypy/module/cpyext/test/test_typeobject.py
+++ b/pypy/module/cpyext/test/test_typeobject.py
@@ -921,3 +921,105 @@
                           '    multiple bases have instance lay-out conflict')
         else:
             raise AssertionError("did not get TypeError!")
+
+    def test_call_tp_dealloc_when_created_from_python(self):
+        module = self.import_extension('foo', [
+            ("fetchFooType", "METH_VARARGS",
+             """
+                PyObject *o;
+                Foo_Type.tp_basicsize = sizeof(FooObject);
+                Foo_Type.tp_dealloc = &dealloc_foo;
+                Foo_Type.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_CHECKTYPES
+                                    | Py_TPFLAGS_BASETYPE;
+                Foo_Type.tp_new = &new_foo;
+                Foo_Type.tp_free = &PyObject_Del;
+                if (PyType_Ready(&Foo_Type) < 0) return NULL;
+
+                o = PyObject_New(PyObject, &Foo_Type);
+                init_foo(o);
+                Py_DECREF(o);   /* calls dealloc_foo immediately */
+
+                Py_INCREF(&Foo_Type);
+                return (PyObject *)&Foo_Type;
+             """),
+            ("newInstance", "METH_O",
+             """
+                PyTypeObject *tp = (PyTypeObject *)args;
+                PyObject *e = PyTuple_New(0);
+                PyObject *o = tp->tp_new(tp, e, NULL);
_______________________________________________
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit

Reply via email to