Author: mattip <matti.pi...@gmail.com>
Branch: release-2.6.x
Changeset: r79144:1786177ea575
Date: 2015-08-22 20:30 +0300
http://bitbucket.org/pypy/pypy/changeset/1786177ea575/

Log:    merge default into release

diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO
--- a/lib_pypy/cffi.egg-info/PKG-INFO
+++ b/lib_pypy/cffi.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cffi
-Version: 1.2.0
+Version: 1.2.1
 Summary: Foreign Function Interface for Python calling C code.
 Home-page: http://cffi.readthedocs.org
 Author: Armin Rigo, Maciej Fijalkowski
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -4,8 +4,8 @@
 from .api import FFI, CDefError, FFIError
 from .ffiplatform import VerificationError, VerificationMissing
 
-__version__ = "1.2.0"
-__version_info__ = (1, 2, 0)
+__version__ = "1.2.1"
+__version_info__ = (1, 2, 1)
 
 # The verifier module file names are based on the CRC32 of a string that
 # contains the following version number.  It may be older than __version__
diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h
--- a/lib_pypy/cffi/_cffi_include.h
+++ b/lib_pypy/cffi/_cffi_include.h
@@ -46,7 +46,7 @@
 # endif
 #else
 # include <stdint.h>
-# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
 #  include <alloca.h>
 # endif
 #endif
diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py
--- a/lib_pypy/cffi/cparser.py
+++ b/lib_pypy/cffi/cparser.py
@@ -15,9 +15,11 @@
 except ImportError:
     lock = None
 
-_r_comment = re.compile(r"/\*.*?\*/|//.*?$", re.DOTALL | re.MULTILINE)
-_r_define  = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)\s+(.*?)$",
-                        re.MULTILINE)
+_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
+                        re.DOTALL | re.MULTILINE)
+_r_define  = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
+                        r"\b((?:[^\n\\]|\\.)*?)$",
+                        re.DOTALL | re.MULTILINE)
 _r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
 _r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
 _r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
@@ -39,6 +41,7 @@
     macros = {}
     for match in _r_define.finditer(csource):
         macroname, macrovalue = match.groups()
+        macrovalue = macrovalue.replace('\\\n', '').strip()
         macros[macroname] = macrovalue
     csource = _r_define.sub('', csource)
     # Replace "[...]" with "[__dotdotdotarray__]"
@@ -423,13 +426,10 @@
                 raise api.CDefError(
                     "%s: a function with only '(...)' as argument"
                     " is not correct C" % (funcname or 'in expression'))
-        elif (len(params) == 1 and
-            isinstance(params[0].type, pycparser.c_ast.TypeDecl) and
-            isinstance(params[0].type.type, pycparser.c_ast.IdentifierType)
-                and list(params[0].type.type.names) == ['void']):
-            del params[0]
         args = [self._as_func_arg(self._get_type(argdeclnode.type))
                 for argdeclnode in params]
+        if not ellipsis and args == [model.void_type]:
+            args = []
         result = self._get_type(typenode.type)
         return model.RawFunctionType(tuple(args), result, ellipsis)
 
diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py
--- a/lib_pypy/cffi/recompiler.py
+++ b/lib_pypy/cffi/recompiler.py
@@ -4,11 +4,6 @@
 
 VERSION = "0x2601"
 
-try:
-    int_type = (int, long)
-except NameError:    # Python 3
-    int_type = int
-
 
 class GlobalExpr:
     def __init__(self, name, address, type_op, size=0, check_value=0):
diff --git a/lib_pypy/cffi/setuptools_ext.py b/lib_pypy/cffi/setuptools_ext.py
--- a/lib_pypy/cffi/setuptools_ext.py
+++ b/lib_pypy/cffi/setuptools_ext.py
@@ -81,10 +81,16 @@
     allsources.extend(kwds.pop('sources', []))
     ext = Extension(name=module_name, sources=allsources, **kwds)
 
-    def make_mod(tmpdir):
+    def make_mod(tmpdir, pre_run=None):
         c_file = os.path.join(tmpdir, module_name + source_extension)
         log.info("generating cffi module %r" % c_file)
         mkpath(tmpdir)
+        # a setuptools-only, API-only hook: called with the "ext" and "ffi"
+        # arguments just before we turn the ffi into C code.  To use it,
+        # subclass the 'distutils.command.build_ext.build_ext' class and
+        # add a method 'def pre_run(self, ext, ffi)'.
+        if pre_run is not None:
+            pre_run(ext, ffi)
         updated = recompiler.make_c_source(ffi, module_name, source, c_file)
         if not updated:
             log.info("already up-to-date")
@@ -98,7 +104,8 @@
     class build_ext_make_mod(base_class):
         def run(self):
             if ext.sources[0] == '$PLACEHOLDER':
-                ext.sources[0] = make_mod(self.build_temp)
+                pre_run = getattr(self, 'pre_run', None)
+                ext.sources[0] = make_mod(self.build_temp, pre_run)
             base_class.run(self)
     dist.cmdclass['build_ext'] = build_ext_make_mod
     # NB. multiple runs here will create multiple 'build_ext_make_mod'
diff --git a/pypy/doc/index-of-whatsnew.rst b/pypy/doc/index-of-whatsnew.rst
--- a/pypy/doc/index-of-whatsnew.rst
+++ b/pypy/doc/index-of-whatsnew.rst
@@ -7,6 +7,7 @@
 .. toctree::
 
    whatsnew-head.rst
+   whatsnew-2.6.1.rst
    whatsnew-2.6.0.rst
    whatsnew-2.5.1.rst
    whatsnew-2.5.0.rst
diff --git a/pypy/doc/whatsnew-2.6.1.rst b/pypy/doc/whatsnew-2.6.1.rst
--- a/pypy/doc/whatsnew-2.6.1.rst
+++ b/pypy/doc/whatsnew-2.6.1.rst
@@ -32,7 +32,10 @@
 ``lst[0]`` is still *not* the float ``42.0`` but the integer ``42``.)
 
 .. branch: cffi-callback-onerror
+Part of cffi 1.2.
+
 .. branch: cffi-new-allocator
+Part of cffi 1.2.
 
 .. branch: unicode-dtype
 
@@ -67,3 +70,7 @@
 .. branch: vmprof-review
 
 Clean up of vmprof, notably to handle correctly multiple threads
+
+.. branch: no_boehm_dl
+
+Remove extra link library from Boehm GC
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -3,5 +3,5 @@
 =======================
 
 .. this is a revision shortly after release-2.6.1
-.. startrev: 83ebc73d4fcb
+.. startrev: 07769be4057b
 
diff --git a/pypy/module/_cffi_backend/__init__.py 
b/pypy/module/_cffi_backend/__init__.py
--- a/pypy/module/_cffi_backend/__init__.py
+++ b/pypy/module/_cffi_backend/__init__.py
@@ -2,7 +2,7 @@
 from pypy.interpreter.mixedmodule import MixedModule
 from rpython.rlib import rdynload
 
-VERSION = "1.2.0"
+VERSION = "1.2.1"
 
 
 class Module(MixedModule):
diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py 
b/pypy/module/_cffi_backend/test/_backend_test_c.py
--- a/pypy/module/_cffi_backend/test/_backend_test_c.py
+++ b/pypy/module/_cffi_backend/test/_backend_test_c.py
@@ -3427,4 +3427,4 @@
 
 def test_version():
     # this test is here mostly for PyPy
-    assert __version__ == "1.2.0"
+    assert __version__ == "1.2.1"
diff --git a/pypy/module/cpyext/include/object.h 
b/pypy/module/cpyext/include/object.h
--- a/pypy/module/cpyext/include/object.h
+++ b/pypy/module/cpyext/include/object.h
@@ -379,6 +379,8 @@
     PyObject *ht_name, *ht_slots;
 } PyHeapTypeObject;
 
+#define PyObject_Bytes PyObject_Str
+
 /* Flag bits for printing: */
 #define Py_PRINT_RAW   1       /* No string quotes etc. */
 
diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_parsing.py 
b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_parsing.py
--- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_parsing.py
+++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_parsing.py
@@ -160,6 +160,35 @@
     assert func.name == 'sin'
     assert func.BType == '<func (<double>, <double>), <double>, False>'
 
+def test_remove_line_continuation_comments():
+    ffi = FFI(backend=FakeBackend())
+    ffi.cdef("""
+        double // blah \\
+                  more comments
+        x(void);
+        double // blah\\\\
+        y(void);
+        double // blah\\ \
+                  etc
+        z(void);
+    """)
+    m = ffi.dlopen(lib_m)
+    m.x
+    m.y
+    m.z
+
+def test_line_continuation_in_defines():
+    ffi = FFI(backend=FakeBackend())
+    ffi.cdef("""
+        #define ABC\\
+            42
+        #define BCD   \\
+            43
+    """)
+    m = ffi.dlopen(lib_m)
+    assert m.ABC == 42
+    assert m.BCD == 43
+
 def test_define_not_supported_for_now():
     ffi = FFI(backend=FakeBackend())
     e = py.test.raises(CDefError, ffi.cdef, '#define FOO "blah"')
@@ -238,6 +267,13 @@
     ffi = FFI()
     ffi.cdef("typedef _Bool bool; void f(bool);")
 
+def test_void_renamed_as_only_arg():
+    ffi = FFI()
+    ffi.cdef("typedef void void_t1;"
+             "typedef void_t1 void_t;"
+             "typedef int (*func_t)(void_t);")
+    assert ffi.typeof("func_t").args == ()
+
 def test_win_common_types():
     from cffi.commontypes import COMMON_TYPES, _CACHE
     from cffi.commontypes import win_common_types, resolve_common_type
diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py 
b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py
--- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py
+++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py
@@ -25,6 +25,9 @@
     if 1:     # test the .cpp mode too
         kwds.setdefault('source_extension', '.cpp')
         source = 'extern "C" {\n%s\n}' % (source,)
+    else:
+        kwds['extra_compile_args'] = (kwds.get('extra_compile_args', []) +
+                                      ['-Werror'])
     return recompiler._verify(ffi, module_name, source, *args, **kwds)
 
 
diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py 
b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py
--- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py
+++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py
@@ -318,15 +318,32 @@
                 import cffi
                 ffi = cffi.FFI()
                 ffi.set_source("pack3.mymod", "/*code would be here*/")
+                ffi._hi_there = 42
             """)
         with open("setup.py", "w") as f:
-            f.write("""if 1:
+            f.write("from __future__ import print_function\n"
+                """if 1:
                 from setuptools import setup
+                from distutils.command.build_ext import build_ext
+                import os
+
+                class TestBuildExt(build_ext):
+                    def pre_run(self, ext, ffi):
+                        print('_make_setuptools_api: in pre_run:', end=" ")
+                        assert ffi._hi_there == 42
+                        assert ext.name == "pack3.mymod"
+                        fn = os.path.join(os.path.dirname(self.build_lib),
+                                          '..', 'see_me')
+                        print('creating %r' % (fn,))
+                        open(fn, 'w').close()
+
                 setup(name='example1',
                       version='0.1',
                       packages=['pack3'],
                       package_dir={'': 'src1'},
-                      cffi_modules=["src1/pack3/_build.py:ffi"])
+                      cffi_modules=["src1/pack3/_build.py:ffi"],
+                      cmdclass={'build_ext': TestBuildExt},
+                      )
             """)
 
     @chdir_to_tmp
@@ -335,6 +352,7 @@
         self.run(["setup.py", "build"])
         self.check_produced_files({'setup.py': None,
                                    'build': '?',
+                                   'see_me': None,
                                    'src1': {'pack3': {'__init__.py': None,
                                                       '_build.py': None}}})
 
@@ -344,6 +362,7 @@
         self.run(["setup.py", "build_ext", "-i"])
         self.check_produced_files({'setup.py': None,
                                    'build': '?',
+                                   'see_me': None,
                                    'src1': {'pack3': {'__init__.py': None,
                                                       '_build.py': None,
                                                       'mymod.SO': None}}})
diff --git a/rpython/jit/backend/arm/runner.py 
b/rpython/jit/backend/arm/runner.py
--- a/rpython/jit/backend/arm/runner.py
+++ b/rpython/jit/backend/arm/runner.py
@@ -64,12 +64,6 @@
                                               operations,
                                               original_loop_token, log=log)
 
-    def clear_latest_values(self, count):
-        setitem = self.assembler.fail_boxes_ptr.setitem
-        null = lltype.nullptr(llmemory.GCREF.TO)
-        for index in range(count):
-            setitem(index, null)
-
     def cast_ptr_to_int(x):
         adr = llmemory.cast_ptr_to_adr(x)
         return CPU_ARM.cast_adr_to_int(adr)
diff --git a/rpython/jit/backend/llsupport/regalloc.py 
b/rpython/jit/backend/llsupport/regalloc.py
--- a/rpython/jit/backend/llsupport/regalloc.py
+++ b/rpython/jit/backend/llsupport/regalloc.py
@@ -636,8 +636,7 @@
             assert isinstance(box, Box)
             loc = self.fm.get_new_loc(box)
             locs.append(loc.value - base_ofs)
-        if looptoken.compiled_loop_token is not None:
-            # for tests
+        if looptoken.compiled_loop_token is not None:   # <- for tests
             looptoken.compiled_loop_token._ll_initial_locs = locs
 
     def can_merge_with_next_guard(self, op, i, operations):
diff --git a/rpython/jit/backend/x86/runner.py 
b/rpython/jit/backend/x86/runner.py
--- a/rpython/jit/backend/x86/runner.py
+++ b/rpython/jit/backend/x86/runner.py
@@ -100,12 +100,6 @@
         return self.assembler.assemble_bridge(faildescr, inputargs, operations,
                                               original_loop_token, log, logger)
 
-    def clear_latest_values(self, count):
-        setitem = self.assembler.fail_boxes_ptr.setitem
-        null = lltype.nullptr(llmemory.GCREF.TO)
-        for index in range(count):
-            setitem(index, null)
-
     def cast_ptr_to_int(x):
         adr = llmemory.cast_ptr_to_adr(x)
         return CPU386.cast_adr_to_int(adr)
diff --git a/rpython/jit/metainterp/test/test_ajit.py 
b/rpython/jit/metainterp/test/test_ajit.py
--- a/rpython/jit/metainterp/test/test_ajit.py
+++ b/rpython/jit/metainterp/test/test_ajit.py
@@ -268,6 +268,7 @@
                 y -= 1
             return res
         res = self.meta_interp(f, [6, sys.maxint, 48])
+        self.check_trace_count(6)
         assert res == f(6, sys.maxint, 48)
 
     def test_loop_invariant_mul_bridge_ovf2(self):
diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
--- a/rpython/memory/gc/incminimark.py
+++ b/rpython/memory/gc/incminimark.py
@@ -166,7 +166,7 @@
 
 # The marking phase. We walk the list 'objects_to_trace' of all gray objects
 # and mark all of the things they point to gray. This step lasts until there
-# are no more gray objects.
+# are no more gray objects.  ('objects_to_trace' never contains pinned objs.)
 STATE_MARKING = 1
 
 # here we kill all the unvisited objects
@@ -1146,6 +1146,9 @@
                       "raw_malloc_might_sweep must be empty outside SWEEPING")
 
             if self.gc_state == STATE_MARKING:
+                self.objects_to_trace.foreach(self._check_not_in_nursery, None)
+                self.more_objects_to_trace.foreach(self._check_not_in_nursery,
+                                                   None)
                 self._debug_objects_to_trace_dict1 = \
                                             self.objects_to_trace.stack2dict()
                 self._debug_objects_to_trace_dict2 = \
@@ -1156,6 +1159,10 @@
             else:
                 MovingGCBase.debug_check_consistency(self)
 
+    def _check_not_in_nursery(self, obj, ignore):
+        ll_assert(not self.is_in_nursery(obj),
+                  "'objects_to_trace' contains a nursery object")
+
     def debug_check_object(self, obj):
         # We are after a minor collection, and possibly after a major
         # collection step.  No object should be in the nursery (except
@@ -1789,6 +1796,8 @@
                 # If we're incrementally marking right now, sorry, we also
                 # need to add the object to 'more_objects_to_trace' and have
                 # it fully traced once at the end of the current marking phase.
+                ll_assert(not self.is_in_nursery(obj),
+                          "expected nursery obj in 
collect_cardrefs_to_nursery")
                 if self.gc_state == STATE_MARKING:
                     self.header(obj).tid &= ~GCFLAG_VISITED
                     self.more_objects_to_trace.append(obj)
@@ -1845,8 +1854,11 @@
         # need to record the not-visited-yet (white) old objects.  So
         # as a conservative approximation, we need to add the object to
         # the list if and only if it doesn't have GCFLAG_VISITED yet.
+        #
+        # Additionally, ignore pinned objects.
+        #
         obj = root.address[0]
-        if not self.header(obj).tid & GCFLAG_VISITED:
+        if (self.header(obj).tid & (GCFLAG_VISITED | GCFLAG_PINNED)) == 0:
             self.more_objects_to_trace.append(obj)
 
     def _trace_drag_out(self, root, parent):
@@ -1899,7 +1911,7 @@
                 #
                 self.old_objects_pointing_to_pinned.append(parent)
                 self.updated_old_objects_pointing_to_pinned = True
-                self.header(parent).tid |= GCFLAG_PINNED
+                self.header(parent).tid |= GCFLAG_PINNED_OBJECT_PARENT_KNOWN
             #
             if hdr.tid & GCFLAG_VISITED:
                 return
@@ -2033,6 +2045,7 @@
         new.delete()
 
     def _add_to_more_objects_to_trace(self, obj, ignored):
+        ll_assert(not self.is_in_nursery(obj), "unexpected nursery obj here")
         self.header(obj).tid &= ~GCFLAG_VISITED
         self.more_objects_to_trace.append(obj)
 
@@ -2287,8 +2300,7 @@
     def collect_roots(self):
         # Collect all roots.  Starts from all the objects
         # from 'prebuilt_root_objects'.
-        self.prebuilt_root_objects.foreach(self._collect_obj,
-                                           self.objects_to_trace)
+        self.prebuilt_root_objects.foreach(self._collect_obj, None)
         #
         # Add the roots from the other sources.
         self.root_walker.walk_roots(
@@ -2298,43 +2310,48 @@
         #
         # If we are in an inner collection caused by a call to a finalizer,
         # the 'run_finalizers' objects also need to be kept alive.
-        self.run_finalizers.foreach(self._collect_obj,
-                                    self.objects_to_trace)
+        self.run_finalizers.foreach(self._collect_obj, None)
 
     def enumerate_all_roots(self, callback, arg):
         self.prebuilt_root_objects.foreach(callback, arg)
         MovingGCBase.enumerate_all_roots(self, callback, arg)
     enumerate_all_roots._annspecialcase_ = 'specialize:arg(1)'
 
-    @staticmethod
-    def _collect_obj(obj, objects_to_trace):
-        objects_to_trace.append(obj)
+    def _collect_obj(self, obj, ignored):
+        # Ignore pinned objects, which are the ones still in the nursery here.
+        # Cache effects: don't read any flag out of 'obj' at this point.
+        # But only checking if it is in the nursery or not is fine.
+        llop.debug_nonnull_pointer(lltype.Void, obj)
+        if not self.is_in_nursery(obj):
+            self.objects_to_trace.append(obj)
+        else:
+            # A pinned object can be found here. Such an object is handled
+            # by minor collections and shouldn't be specially handled by
+            # major collections. Therefore we only add non-pinned objects
+            # to the 'objects_to_trace' list.
+            ll_assert(self._is_pinned(obj),
+                      "non-pinned nursery obj in _collect_obj")
+    _collect_obj._always_inline_ = True
 
     def _collect_ref_stk(self, root):
-        obj = root.address[0]
-        llop.debug_nonnull_pointer(lltype.Void, obj)
-        if not self._is_pinned(obj):
-            # XXX: check if this is the right way (groggi).
-            # A pinned object can be on the stack. Such an object is handled
-            # by minor collections and shouldn't be specially handled by
-            # major collections. Therefore we only add not pinned objects to 
the
-            # list below.
-            self.objects_to_trace.append(obj)
+        self._collect_obj(root.address[0], None)
 
     def _collect_ref_rec(self, root, ignored):
-        self.objects_to_trace.append(root.address[0])
+        self._collect_obj(root.address[0], None)
 
     def visit_all_objects(self):
         while self.objects_to_trace.non_empty():
             self.visit_all_objects_step(sys.maxint)
 
+    TEST_VISIT_SINGLE_STEP = False    # for tests
+
     def visit_all_objects_step(self, size_to_track):
         # Objects can be added to pending by visit
         pending = self.objects_to_trace
         while pending.non_empty():
             obj = pending.pop()
             size_to_track -= self.visit(obj)
-            if size_to_track < 0:
+            if size_to_track < 0 or self.TEST_VISIT_SINGLE_STEP:
                 return 0
         return size_to_track
 
@@ -2349,10 +2366,17 @@
         # flag set, then the object should be in 'prebuilt_root_objects',
         # and the GCFLAG_VISITED will be reset at the end of the
         # collection.
-        # Objects with GCFLAG_PINNED can't have gcptrs (see pin()), they can be
-        # ignored.
+        # We shouldn't see an object with GCFLAG_PINNED here (the pinned
+        # objects are never added to 'objects_to_trace').  The same-valued
+        # flag GCFLAG_PINNED_OBJECT_PARENT_KNOWN is used during minor
+        # collections and shouldn't be set here either.
+        #
         hdr = self.header(obj)
-        if hdr.tid & (GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS | GCFLAG_PINNED):
+        ll_assert((hdr.tid & GCFLAG_PINNED) == 0,
+                  "pinned object in 'objects_to_trace'")
+        ll_assert(not self.is_in_nursery(obj),
+                  "nursery object in 'objects_to_trace'")
+        if hdr.tid & (GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS):
             return 0
         #
         # It's the first time.  We set the flag VISITED.  The trick is
@@ -2582,6 +2606,7 @@
         # recursively convert objects from state 1 to state 2.
         # The call to visit_all_objects() will add the GCFLAG_VISITED
         # recursively.
+        ll_assert(not self.is_in_nursery(obj), "pinned finalizer object??")
         self.objects_to_trace.append(obj)
         self.visit_all_objects()
 
diff --git a/rpython/memory/gc/test/test_object_pinning.py 
b/rpython/memory/gc/test/test_object_pinning.py
--- a/rpython/memory/gc/test/test_object_pinning.py
+++ b/rpython/memory/gc/test/test_object_pinning.py
@@ -88,7 +88,7 @@
 
 class TestIncminimark(PinningGCTest):
     from rpython.memory.gc.incminimark import IncrementalMiniMarkGC as GCClass
-    from rpython.memory.gc.incminimark import STATE_SCANNING
+    from rpython.memory.gc.incminimark import STATE_SCANNING, STATE_MARKING
 
     def test_try_pin_gcref_containing_type(self):
         # scenario: incminimark's object pinning can't pin objects that may
@@ -917,3 +917,65 @@
         py.test.raises(Exception, self.malloc, T)
     test_full_pinned_nursery_pin_fail.max_number_of_pinned_objects = 50
 
+
+    def test_pin_bug1(self):
+        #
+        # * the nursery contains a pinned object 'ptr1'
+        #
+        # * outside the nursery is another object 'ptr2' pointing to 'ptr1'
+        #
+        # * during one incremental tracing step, we see 'ptr2' but don't
+        #   trace 'ptr1' right now: it is left behind on the trace-me-later
+        #   list
+        #
+        # * then we run the program, unpin 'ptr1', and remove it from 'ptr2'
+        #
+        # * at the next minor collection, we free 'ptr1' because we don't
+        #   find anything pointing to it (it is removed from 'ptr2'),
+        #   but 'ptr1' is still in the trace-me-later list
+        #
+        # * the trace-me-later list is deep enough that 'ptr1' is not
+        #   seen right now!  it is only seen at some later minor collection
+        #
+        # * at that later point, crash, because 'ptr1' in the nursery was
+        #   overwritten
+        #
+        ptr2 = self.malloc(S)
+        ptr2.someInt = 102
+        self.stackroots.append(ptr2)
+
+        self.gc.collect()
+        ptr2 = self.stackroots[-1]    # now outside the nursery
+        adr2 = llmemory.cast_ptr_to_adr(ptr2)
+
+        ptr1 = self.malloc(T)
+        adr1 = llmemory.cast_ptr_to_adr(ptr1)
+        ptr1.someInt = 101
+        self.write(ptr2, 'data', ptr1)
+        res = self.gc.pin(adr1)
+        assert res
+
+        self.gc.minor_collection()
+        assert self.gc.gc_state == self.STATE_SCANNING
+        self.gc.major_collection_step()
+        assert self.gc.objects_to_trace.tolist() == [adr2]
+        assert self.gc.more_objects_to_trace.tolist() == []
+
+        self.gc.TEST_VISIT_SINGLE_STEP = True
+
+        self.gc.minor_collection()
+        assert self.gc.gc_state == self.STATE_MARKING
+        self.gc.major_collection_step()
+        assert self.gc.objects_to_trace.tolist() == []
+        assert self.gc.more_objects_to_trace.tolist() == [adr2]
+
+        self.write(ptr2, 'data', lltype.nullptr(T))
+        self.gc.unpin(adr1)
+
+        assert ptr1.someInt == 101
+        self.gc.minor_collection()        # should free 'ptr1'
+        py.test.raises(RuntimeError, "ptr1.someInt")
+        assert self.gc.gc_state == self.STATE_MARKING
+        self.gc.major_collection_step()   # should not crash reading 'ptr1'!
+
+        del self.gc.TEST_VISIT_SINGLE_STEP
diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py
--- a/rpython/rlib/rvmprof/cintf.py
+++ b/rpython/rlib/rvmprof/cintf.py
@@ -79,10 +79,26 @@
 
     cont_name = 'rpyvmprof_f_%s_%s' % (name, token)
     tramp_name = 'rpyvmprof_t_%s_%s' % (name, token)
+    orig_tramp_name = tramp_name
 
     func.c_name = cont_name
     func._dont_inline_ = True
 
+    if sys.platform == 'darwin':
+        # according to internet "At the time UNIX was written in 1974...."
+        # "... all C functions are prefixed with _"
+        cont_name = '_' + cont_name
+        tramp_name = '_' + tramp_name
+        PLT = ""
+        size_decl = ""
+        type_decl = ""
+    else:
+        PLT = "@PLT"
+        type_decl = "\t.type\t%s, @function" % (tramp_name,)
+        size_decl = "\t.size\t%s, .-%s" % (
+            tramp_name, tramp_name)
+
+
     assert detect_cpu.autodetect().startswith(detect_cpu.MODEL_X86_64), (
         "rvmprof only supports x86-64 CPUs for now")
 
@@ -104,20 +120,22 @@
     target = udir.join('module_cache')
     target.ensure(dir=1)
     target = target.join('trampoline_%s_%s.vmprof.s' % (name, token))
+    # NOTE! the tabs in this file are absolutely essential, things
+    #       that don't start with \t are silently ignored (<arigato>: WAT!?)
     target.write("""\
 \t.text
 \t.globl\t%(tramp_name)s
-\t.type\t%(tramp_name)s, @function
+%(type_decl)s
 %(tramp_name)s:
 \t.cfi_startproc
 \tpushq\t%(reg)s
 \t.cfi_def_cfa_offset 16
-\tcall %(cont_name)s@PLT
+\tcall %(cont_name)s%(PLT)s
 \taddq\t$8, %%rsp
 \t.cfi_def_cfa_offset 8
 \tret
 \t.cfi_endproc
-\t.size\t%(tramp_name)s, .-%(tramp_name)s
+%(size_decl)s
 """ % locals())
 
     def tok2cname(tok):
@@ -129,7 +147,7 @@
 
     header = 'RPY_EXTERN %s %s(%s);\n' % (
         tok2cname(restok),
-        tramp_name,
+        orig_tramp_name,
         ', '.join([tok2cname(tok) for tok in token] + ['long']))
 
     header += """\
@@ -143,7 +161,7 @@
 #endif
 #define VMPROF_ADDR_OF_TRAMPOLINE cmp_%s
 }
-""" % (tramp_name, tramp_name, tramp_name)
+""" % (tramp_name, orig_tramp_name, tramp_name)
 
     eci = ExternalCompilationInfo(
         post_include_bits = [header],
@@ -151,7 +169,7 @@
     )
 
     return rffi.llexternal(
-        tramp_name,
+        orig_tramp_name,
         [token2lltype(tok) for tok in token] + [lltype.Signed],
         token2lltype(restok),
         compilation_info=eci,
diff --git a/rpython/rlib/rvmprof/src/vmprof_getpc.h 
b/rpython/rlib/rvmprof/src/vmprof_getpc.h
--- a/rpython/rlib/rvmprof/src/vmprof_getpc.h
+++ b/rpython/rlib/rvmprof/src/vmprof_getpc.h
@@ -53,7 +53,9 @@
 // If #define _GNU_SOURCE causes problems, this might work instead.
 // It will cause problems for FreeBSD though!, because it turns off
 // the needed __BSD_VISIBLE.
-//#define _XOPEN_SOURCE 500
+#ifdef __APPLE__
+#define _XOPEN_SOURCE 500
+#endif
 
 #include <string.h>         // for memcmp
 #if defined(HAVE_SYS_UCONTEXT_H)
@@ -179,7 +181,11 @@
 // configure.ac (or set it manually in your config.h).
 #else
 void* GetPC(ucontext_t *signal_ucontext) {
+#ifdef __APPLE__
+  return (void*)(signal_ucontext->uc_mcontext->__ss.__rip);
+#else
   return (void*)signal_ucontext->PC_FROM_UCONTEXT;   // defined in config.h
+#endif
 }
 
 #endif
diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h 
b/rpython/rlib/rvmprof/src/vmprof_main.h
--- a/rpython/rlib/rvmprof/src/vmprof_main.h
+++ b/rpython/rlib/rvmprof/src/vmprof_main.h
@@ -27,6 +27,7 @@
 #include <unistd.h>
 #include <stdio.h>
 #include <sys/types.h>
+#include <signal.h>
 #include <sys/stat.h>
 #include <fcntl.h>
 #include "vmprof_getpc.h"
@@ -51,6 +52,14 @@
 static int opened_profile(char *interp_name);
 static void flush_codes(void);
 
+#ifdef __APPLE__
+#define UNWIND_NAME "/usr/lib/system/libunwind.dylib"
+#define UNW_PREFIX "unw"
+#else
+#define UNWIND_NAME "libunwind.so"
+#define UNW_PREFIX "_ULx86_64"
+#endif
+
 RPY_EXTERN
 char *vmprof_init(int fd, double interval, char *interp_name)
 {
@@ -61,15 +70,15 @@
     if (!unw_get_reg) {
         void *libhandle;
 
-        if (!(libhandle = dlopen("libunwind.so", RTLD_LAZY | RTLD_LOCAL)))
+        if (!(libhandle = dlopen(UNWIND_NAME, RTLD_LAZY | RTLD_LOCAL)))
             goto error;
-        if (!(unw_get_reg = dlsym(libhandle, "_ULx86_64_get_reg")))
+        if (!(unw_get_reg = dlsym(libhandle, UNW_PREFIX "_get_reg")))
             goto error;
-        if (!(unw_get_proc_info = dlsym(libhandle, "_ULx86_64_get_proc_info")))
+        if (!(unw_get_proc_info = dlsym(libhandle, UNW_PREFIX 
"_get_proc_info")))
             goto error;
-        if (!(unw_init_local = dlsym(libhandle, "_ULx86_64_init_local")))
+        if (!(unw_init_local = dlsym(libhandle, UNW_PREFIX  "_init_local")))
             goto error;
-        if (!(unw_step = dlsym(libhandle, "_ULx86_64_step")))
+        if (!(unw_step = dlsym(libhandle, UNW_PREFIX  "_step")))
             goto error;
     }
     if (prepare_concurrent_bufs() < 0)
@@ -452,12 +461,16 @@
     close(srcfd);
 #else
     // freebsd and mac
+#if defined(__APPLE__)
+       sprintf(buf, "vmmap %d", getpid());
+#else
     sprintf(buf, "procstat -v %d", getpid());
+#endif
     FILE *srcf = popen(buf, "r");
     if (!srcf)
         return -1;
 
-    while ((size = fread(buf, 1, sizeof buf, src))) {
+    while ((size = fread(buf, 1, sizeof buf, srcf))) {
         if (_write_all(buf, size) < 0) {
             pclose(srcf);
             return -1;
diff --git a/rpython/rlib/rvmprof/src/vmprof_mt.h 
b/rpython/rlib/rvmprof/src/vmprof_mt.h
--- a/rpython/rlib/rvmprof/src/vmprof_mt.h
+++ b/rpython/rlib/rvmprof/src/vmprof_mt.h
@@ -66,7 +66,7 @@
     unprepare_concurrent_bufs();
     profbuf_all_buffers = mmap(NULL, sizeof(struct profbuf_s) * 
MAX_NUM_BUFFERS,
                                PROT_READ | PROT_WRITE,
-                               MAP_PRIVATE | MAP_ANONYMOUS,
+                               MAP_PRIVATE | MAP_ANON,
                                -1, 0);
     if (profbuf_all_buffers == MAP_FAILED) {
         profbuf_all_buffers = NULL;
diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py 
b/rpython/rlib/rvmprof/test/test_ztranslation.py
--- a/rpython/rlib/rvmprof/test/test_ztranslation.py
+++ b/rpython/rlib/rvmprof/test/test_ztranslation.py
@@ -1,5 +1,6 @@
 import time, os, sys
-sys.path += ['.'] # for subprocess in test_interpreted
+if __name__ == '__main__':
+    sys.path += ['../../../..']    # for subprocess in test_interpreted
 import py
 from rpython.tool.udir import udir
 from rpython.rlib import rvmprof
@@ -59,7 +60,8 @@
 def test_interpreted():
     # takes forever if the Python process is already big...
     import subprocess
-    subprocess.check_call([sys.executable, __file__])
+    subprocess.check_call([sys.executable, os.path.basename(__file__)],
+                          cwd=(os.path.dirname(__file__) or '.'))
 
 def test_compiled():
     fn = compile(main, [], gcpolicy="minimark")
diff --git a/rpython/rtyper/tool/rffi_platform.py 
b/rpython/rtyper/tool/rffi_platform.py
--- a/rpython/rtyper/tool/rffi_platform.py
+++ b/rpython/rtyper/tool/rffi_platform.py
@@ -854,7 +854,7 @@
         platform = None
     else:
         library_dir = ''
-        libraries = ['gc', 'dl']
+        libraries = ['gc']
         includes=['gc/gc.h']
     eci = ExternalCompilationInfo(
         platform=platform,
diff --git a/rpython/translator/platform/posix.py 
b/rpython/translator/platform/posix.py
--- a/rpython/translator/platform/posix.py
+++ b/rpython/translator/platform/posix.py
@@ -181,6 +181,7 @@
             ('all', '$(DEFAULT_TARGET)', []),
             ('$(TARGET)', '$(OBJECTS)', '$(CC_LINK) $(LDFLAGSEXTRA) -o $@ 
$(OBJECTS) $(LIBDIRS) $(LIBS) $(LINKFILES) $(LDFLAGS)'),
             ('%.o', '%.c', '$(CC) $(CFLAGS) $(CFLAGSEXTRA) -o $@ -c $< 
$(INCLUDEDIRS)'),
+            ('%.o', '%.s', '$(CC) $(CFLAGS) $(CFLAGSEXTRA) -o $@ -c $< 
$(INCLUDEDIRS)'),
             ('%.o', '%.cxx', '$(CXX) $(CFLAGS) $(CFLAGSEXTRA) -o $@ -c $< 
$(INCLUDEDIRS)'),
             ]
 
_______________________________________________
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit

Reply via email to