[pypy-commit] pypy vecopt-merge: partial test fixes in backend directory

2015-09-30 Thread plan_rich
Author: Richard Plangger 
Branch: vecopt-merge
Changeset: r79902:3e5ea9adeb03
Date: 2015-09-30 09:10 +0200
http://bitbucket.org/pypy/pypy/changeset/3e5ea9adeb03/

Log:partial test fixes in backend directory

diff --git a/rpython/jit/backend/detect_cpu.py 
b/rpython/jit/backend/detect_cpu.py
--- a/rpython/jit/backend/detect_cpu.py
+++ b/rpython/jit/backend/detect_cpu.py
@@ -135,6 +135,7 @@
 MODEL_X86: ['floats', 'singlefloats', 'longlong'],
 MODEL_X86_NO_SSE2: ['longlong'],
 MODEL_X86_64: ['floats', 'singlefloats'],
+MODEL_X86_64_SSE4: ['floats', 'singlefloats'],
 MODEL_ARM: ['floats', 'singlefloats', 'longlong'],
 MODEL_PPC_64: [], # we don't even have PPC directory, so no
 }[backend_name]
diff --git a/rpython/jit/backend/llsupport/test/test_descr.py 
b/rpython/jit/backend/llsupport/test/test_descr.py
--- a/rpython/jit/backend/llsupport/test/test_descr.py
+++ b/rpython/jit/backend/llsupport/test/test_descr.py
@@ -144,6 +144,7 @@
 descr4 = get_array_descr(c0, A4)
 descr5 = get_array_descr(c0, A5)
 descr6 = get_array_descr(c0, A6)
+import pdb; pdb.set_trace()
 assert isinstance(descr1, ArrayDescr)
 assert descr1 == get_array_descr(c0, lltype.GcArray(lltype.Char))
 assert descr1.flag == FLAG_UNSIGNED
diff --git a/rpython/jit/backend/x86/regalloc.py 
b/rpython/jit/backend/x86/regalloc.py
--- a/rpython/jit/backend/x86/regalloc.py
+++ b/rpython/jit/backend/x86/regalloc.py
@@ -23,9 +23,8 @@
 from rpython.jit.codewriter import longlong
 from rpython.jit.codewriter.effectinfo import EffectInfo
 from rpython.jit.metainterp.history import (Const, ConstInt, ConstPtr,
-ConstFloat, INT, REF, FLOAT, VECTOR, TargetToken)
+ConstFloat, INT, REF, FLOAT, VECTOR, TargetToken, AbstractFailDescr)
 from rpython.jit.metainterp.resoperation import rop, ResOperation
-from rpython.jit.metainterp.compile import ResumeGuardDescr
 from rpython.jit.metainterp.resume import AccumInfo
 from rpython.rlib import rgc
 from rpython.rlib.objectmodel import we_are_translated
@@ -320,8 +319,10 @@
 def locs_for_fail(self, guard_op):
 faillocs = [self.loc(arg) for arg in guard_op.getfailargs()]
 descr = guard_op.getdescr()
-assert isinstance(descr, ResumeGuardDescr)
-if descr and descr.rd_accum_list:
+if not descr:
+return faillocs
+assert isinstance(descr, AbstractFailDescr)
+if descr.rd_accum_list:
 accuminfo = descr.rd_accum_list
 while accuminfo:
 accuminfo.vector_loc = faillocs[accuminfo.getpos_in_failargs()]
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy vecopt-merge: fixed test_parser tests (overwrote update_memo)

2015-09-30 Thread plan_rich
Author: Richard Plangger 
Branch: vecopt-merge
Changeset: r79903:b76456a0884f
Date: 2015-09-30 09:21 +0200
http://bitbucket.org/pypy/pypy/changeset/b76456a0884f/

Log:fixed test_parser tests (overwrote update_memo)

diff --git a/rpython/jit/tool/oparser.py b/rpython/jit/tool/oparser.py
--- a/rpython/jit/tool/oparser.py
+++ b/rpython/jit/tool/oparser.py
@@ -366,7 +366,7 @@
 Internally you will see the same variable names as
 in the trace as string.
 """
-regex = re.compile("[prif](\d+)")
+regex = re.compile("[prifv](\d+)")
 match = regex.match(name)
 if match:
 counter = int(match.group(1))
diff --git a/rpython/tool/jitlogparser/parser.py 
b/rpython/tool/jitlogparser/parser.py
--- a/rpython/tool/jitlogparser/parser.py
+++ b/rpython/tool/jitlogparser/parser.py
@@ -164,7 +164,8 @@
 return self.Op(intern(opname[opnum].lower()), args, None,
descr, fail_args)
 
-
+def update_memo(self, val, name):
+pass
 
 class NonCodeError(Exception):
 pass
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy vecopt-merge: fixed tests that use the oparser_model instead of the real impl

2015-09-30 Thread plan_rich
Author: Richard Plangger 
Branch: vecopt-merge
Changeset: r79904:eb5e01de1f93
Date: 2015-09-30 09:26 +0200
http://bitbucket.org/pypy/pypy/changeset/eb5e01de1f93/

Log:fixed tests that use the oparser_model instead of the real impl

diff --git a/rpython/jit/tool/oparser_model.py 
b/rpython/jit/tool/oparser_model.py
--- a/rpython/jit/tool/oparser_model.py
+++ b/rpython/jit/tool/oparser_model.py
@@ -79,19 +79,28 @@
 type = 'V'
 
 class Const(object):
+bytesize = 8
+signed = True
 def __init__(self, value=None):
 self.value = value
 
 def _get_str(self):
 return str(self.value)
 
+def is_constant(self):
+return True
+
 class ConstInt(Const):
+datatype = 'i'
 pass
 
 class ConstPtr(Const):
+datatype = 'r'
 pass
 
 class ConstFloat(Const):
+datatype = 'f'
+signed = False
 pass
 
 @classmethod
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy fortran-order: convert 'order' in cpyext from char to int

2015-09-30 Thread mattip
Author: mattip 
Branch: fortran-order
Changeset: r79913:76c343105002
Date: 2015-09-30 23:52 +0300
http://bitbucket.org/pypy/pypy/changeset/76c343105002/

Log:convert 'order' in cpyext from char to int

diff --git a/pypy/module/cpyext/ndarrayobject.py 
b/pypy/module/cpyext/ndarrayobject.py
--- a/pypy/module/cpyext/ndarrayobject.py
+++ b/pypy/module/cpyext/ndarrayobject.py
@@ -12,6 +12,7 @@
 from pypy.module.micronumpy.descriptor import get_dtype_cache, W_Dtype
 from pypy.module.micronumpy.concrete import ConcreteArray
 from pypy.module.micronumpy import ufuncs
+import pypy.module.micronumpy.constants as NPY 
 from rpython.rlib.rawstorage import RAW_STORAGE_PTR
 from pypy.interpreter.typedef import TypeDef
 from pypy.interpreter.baseobjspace import W_Root
@@ -203,12 +204,12 @@
 return shape, dtype
 
 def simple_new(space, nd, dims, typenum,
-order='C', owning=False, w_subtype=None):
+order=NPY.CORDER, owning=False, w_subtype=None):
 shape, dtype = get_shape_and_dtype(space, nd, dims, typenum)
 return W_NDimArray.from_shape(space, shape, dtype)
 
 def simple_new_from_data(space, nd, dims, typenum, data,
-order='C', owning=False, w_subtype=None):
+order=NPY.CORDER, owning=False, w_subtype=None):
 shape, dtype = get_shape_and_dtype(space, nd, dims, typenum)
 storage = rffi.cast(RAW_STORAGE_PTR, data)
 return W_NDimArray.from_shape_and_storage(space, shape, storage, dtype,
@@ -238,7 +239,7 @@
 raise OperationError(space.w_NotImplementedError,
  space.wrap("strides must be NULL"))
 
-order = 'C' if flags & NPY_C_CONTIGUOUS else 'F'
+order = NPY.CORDER if flags & NPY_C_CONTIGUOUS else NPY.FORTRANORDER
 owning = True if flags & NPY_OWNDATA else False
 w_subtype = None
 
diff --git a/pypy/module/cpyext/test/test_ndarrayobject.py 
b/pypy/module/cpyext/test/test_ndarrayobject.py
--- a/pypy/module/cpyext/test/test_ndarrayobject.py
+++ b/pypy/module/cpyext/test/test_ndarrayobject.py
@@ -4,16 +4,17 @@
 from rpython.rtyper.lltypesystem import rffi, lltype
 from pypy.module.micronumpy.ndarray import W_NDimArray
 from pypy.module.micronumpy.descriptor import get_dtype_cache
+import pypy.module.micronumpy.constants as NPY 
 
 def scalar(space):
 dtype = get_dtype_cache(space).w_float64dtype
 return W_NDimArray.new_scalar(space, dtype, space.wrap(10.))
 
-def array(space, shape, order='C'):
+def array(space, shape, order=NPY.CORDER):
 dtype = get_dtype_cache(space).w_float64dtype
 return W_NDimArray.from_shape(space, shape, dtype, order=order)
 
-def iarray(space, shape, order='C'):
+def iarray(space, shape, order=NPY.CORDER):
 dtype = get_dtype_cache(space).w_int64dtype
 return W_NDimArray.from_shape(space, shape, dtype, order=order)
 
@@ -32,8 +33,8 @@
 
 def test_FLAGS(self, space, api):
 s = array(space, [10])
-c = array(space, [10, 5, 3], order='C')
-f = array(space, [10, 5, 3], order='F')
+c = array(space, [10, 5, 3], order=NPY.CORDER)
+f = array(space, [10, 5, 3], order=NPY.FORTRANORDER)
 assert api._PyArray_FLAGS(s) & 0x0001
 assert api._PyArray_FLAGS(s) & 0x0002
 assert api._PyArray_FLAGS(c) & 0x0001
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy fortran-order: implement order, pass many tests

2015-09-30 Thread mattip
Author: mattip 
Branch: fortran-order
Changeset: r79911:a14943e46cef
Date: 2015-09-30 23:17 +0300
http://bitbucket.org/pypy/pypy/changeset/a14943e46cef/

Log:implement order, pass many tests

diff --git a/pypy/module/micronumpy/arrayops.py 
b/pypy/module/micronumpy/arrayops.py
--- a/pypy/module/micronumpy/arrayops.py
+++ b/pypy/module/micronumpy/arrayops.py
@@ -108,7 +108,8 @@
 w_axis = space.wrap(0)
 if space.is_none(w_axis):
 args_w = [w_arg.reshape(space,
-space.newlist([w_arg.descr_get_size(space)]))
+space.newlist([w_arg.descr_get_size(space)]),
+w_arg.get_order())
   for w_arg in args_w]
 w_axis = space.wrap(0)
 dtype = args_w[0].get_dtype()
diff --git a/pypy/module/micronumpy/concrete.py 
b/pypy/module/micronumpy/concrete.py
--- a/pypy/module/micronumpy/concrete.py
+++ b/pypy/module/micronumpy/concrete.py
@@ -92,17 +92,18 @@
 def get_storage_size(self):
 return self.size
 
-def reshape(self, orig_array, new_shape):
+def reshape(self, orig_array, new_shape, order=NPY.ANYORDER):
 # Since we got to here, prod(new_shape) == self.size
+order = support.get_order_as_CF(self.order, order)
 new_strides = None
 if self.size == 0:
-new_strides, _ = calc_strides(new_shape, self.dtype, self.order)
+new_strides, _ = calc_strides(new_shape, self.dtype, order)
 else:
 if len(self.get_shape()) == 0:
 new_strides = [self.dtype.elsize] * len(new_shape)
 else:
 new_strides = calc_new_strides(new_shape, self.get_shape(),
-   self.get_strides(), self.order)
+   self.get_strides(), order)
 if new_strides is None or len(new_strides) != len(new_shape):
 return None
 if new_strides is not None:
@@ -306,10 +307,11 @@
 return SliceArray(self.start, strides,
   backstrides, shape, self, orig_array)
 
-def copy(self, space):
+def copy(self, space, order=NPY.ANYORDER):
+order = support.get_order_as_CF(self.order, order)
 strides, backstrides = calc_strides(self.get_shape(), self.dtype,
-self.order)
-impl = ConcreteArray(self.get_shape(), self.dtype, self.order, strides,
+order)
+impl = ConcreteArray(self.get_shape(), self.dtype, order, strides,
  backstrides)
 return loop.setslice(space, self.get_shape(), impl, self)
 
diff --git a/pypy/module/micronumpy/ctors.py b/pypy/module/micronumpy/ctors.py
--- a/pypy/module/micronumpy/ctors.py
+++ b/pypy/module/micronumpy/ctors.py
@@ -201,6 +201,7 @@
 
 
 def _zeros_or_empty(space, w_shape, w_dtype, w_order, zero):
+order = order_converter(space, w_order, NPY.CORDER)
 dtype = space.interp_w(descriptor.W_Dtype,
 space.call_function(space.gettypefor(descriptor.W_Dtype), w_dtype))
 if dtype.is_str_or_unicode() and dtype.elsize < 1:
@@ -214,7 +215,7 @@
 support.product_check(shape)
 except OverflowError:
 raise oefmt(space.w_ValueError, "array is too big.")
-return W_NDimArray.from_shape(space, shape, dtype=dtype, zero=zero)
+return W_NDimArray.from_shape(space, shape, dtype, order, zero=zero)
 
 def empty(space, w_shape, w_dtype=None, w_order=None):
 return _zeros_or_empty(space, w_shape, w_dtype, w_order, zero=False)
diff --git a/pypy/module/micronumpy/loop.py b/pypy/module/micronumpy/loop.py
--- a/pypy/module/micronumpy/loop.py
+++ b/pypy/module/micronumpy/loop.py
@@ -680,8 +680,7 @@
 def tostring(space, arr):
 builder = StringBuilder()
 iter, state = arr.create_iter()
-w_res_str = W_NDimArray.from_shape(space, [1], arr.get_dtype(),
-order=NPY.CORDER)
+w_res_str = W_NDimArray.from_shape(space, [1], arr.get_dtype())
 itemsize = arr.get_dtype().elsize
 with w_res_str.implementation as storage:
 res_str_casted = rffi.cast(rffi.CArrayPtr(lltype.Char),
diff --git a/pypy/module/micronumpy/ndarray.py 
b/pypy/module/micronumpy/ndarray.py
--- a/pypy/module/micronumpy/ndarray.py
+++ b/pypy/module/micronumpy/ndarray.py
@@ -99,12 +99,13 @@
 def descr_tostring(self, space, w_order=None):
 try:
 order = order_converter(space, w_order, NPY.CORDER)
-except OperationError as e:
-raise oefmt(space.w_TypeError, "order not understood") 
-if order == NPY.FORTRANORDER:
-raise OperationError(space.w_NotImplementedError, space.wrap(
-"unsupported value for order"))
-return space.wrap(loop.tostring(space, self))
+except:
+raise 

[pypy-commit] pypy fortran-order: revert, do not change this file

2015-09-30 Thread mattip
Author: mattip 
Branch: fortran-order
Changeset: r79912:e456a1c6df81
Date: 2015-09-30 23:38 +0300
http://bitbucket.org/pypy/pypy/changeset/e456a1c6df81/

Log:revert, do not change this file

diff --git a/_pytest/core.py b/_pytest/core.py
--- a/_pytest/core.py
+++ b/_pytest/core.py
@@ -175,7 +175,7 @@
 continue
 try:
 plugin = ep.load()
-except (DistributionNotFound, ImportError):
+except DistributionNotFound:
 continue
 self._plugin_distinfo.append((ep.dist, plugin))
 self.register(plugin, name=name)
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: Add more passing tests

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2284:524f9b024b0e
Date: 2015-09-30 11:49 +0200
http://bitbucket.org/cffi/cffi/changeset/524f9b024b0e/

Log:Add more passing tests

diff --git a/testing/cffi1/test_recompiler.py b/testing/cffi1/test_recompiler.py
--- a/testing/cffi1/test_recompiler.py
+++ b/testing/cffi1/test_recompiler.py
@@ -1231,3 +1231,42 @@
 foo_s = ffi.typeof("struct foo_s")
 assert foo_s.fields[0][0] == 'a'
 assert foo_s.fields[0][1].type is ffi.typeof("int[]")
+
+def test_const_array_fields_unknownlength():
+ffi = FFI()
+ffi.cdef("""struct foo_s { const int a[...]; ...; };""")
+lib = verify(ffi, 'test_const_array_fields_unknownlength', """
+struct foo_s { const int a[4]; };""")
+foo_s = ffi.typeof("struct foo_s")
+assert foo_s.fields[0][0] == 'a'
+assert foo_s.fields[0][1].type is ffi.typeof("int[4]")
+
+def test_const_function_args():
+ffi = FFI()
+ffi.cdef("""int foobar(const int a, const int *b, const int c[]);""")
+lib = verify(ffi, 'test_const_function_args', """
+int foobar(const int a, const int *b, const int c[]) {
+return a + *b + *c;
+}
+""")
+assert lib.foobar(100, ffi.new("int *", 40), ffi.new("int *", 2)) == 142
+
+def test_const_function_type_args():
+ffi = FFI()
+ffi.cdef("""int (*foobar)(const int a, const int *b, const int c[]);""")
+lib = verify(ffi, 'test_const_function_type_args', """
+int (*foobar)(const int a, const int *b, const int c[]);
+""")
+t = ffi.typeof(lib.foobar)
+assert t.args[0] is ffi.typeof("int")
+assert t.args[1] is ffi.typeof("int *")
+assert t.args[2] is ffi.typeof("int *")
+
+def test_const_constant():
+ffi = FFI()
+ffi.cdef("""struct foo_s { int x,y; }; const struct foo_s myfoo;""")
+lib = verify(ffi, 'test_const_constant', """
+struct foo_s { int x,y; }; const struct foo_s myfoo = { 40, 2 };
+""")
+assert lib.myfoo.x == 40
+assert lib.myfoo.y == 2
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: in-progress: add qualifiers through model.py, cparser.py, and recompiler.py

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2282:ab9e37952de6
Date: 2015-09-30 11:00 +0200
http://bitbucket.org/cffi/cffi/changeset/ab9e37952de6/

Log:in-progress: add qualifiers through model.py, cparser.py, and
recompiler.py

diff --git a/cffi/api.py b/cffi/api.py
--- a/cffi/api.py
+++ b/cffi/api.py
@@ -609,7 +609,7 @@
 def make_accessor_locked(name):
 key = 'function ' + name
 if key in ffi._parser._declarations:
-tp = ffi._parser._declarations[key]
+tp, _ = ffi._parser._declarations[key]
 BType = ffi._get_cached_btype(tp)
 try:
 value = backendlib.load_function(BType, name)
@@ -620,7 +620,7 @@
 #
 key = 'variable ' + name
 if key in ffi._parser._declarations:
-tp = ffi._parser._declarations[key]
+tp, _ = ffi._parser._declarations[key]
 BType = ffi._get_cached_btype(tp)
 read_variable = backendlib.read_variable
 write_variable = backendlib.write_variable
@@ -632,7 +632,7 @@
 if not copied_enums:
 from . import model
 error = None
-for key, tp in ffi._parser._declarations.items():
+for key, (tp, _) in ffi._parser._declarations.items():
 if not isinstance(tp, model.EnumType):
 continue
 try:
diff --git a/cffi/cparser.py b/cffi/cparser.py
--- a/cffi/cparser.py
+++ b/cffi/cparser.py
@@ -192,6 +192,7 @@
 if not decl.name:
 raise api.CDefError("typedef does not declare any 
name",
 decl)
+quals = 0
 if (isinstance(decl.type.type, 
pycparser.c_ast.IdentifierType)
 and decl.type.type.names[-1] == '__dotdotdot__'):
 realtype = self._get_unknown_type(decl)
@@ -202,8 +203,9 @@
   decl.type.type.type.names == ['__dotdotdot__']):
 realtype = model.unknown_ptr_type(decl.name)
 else:
-realtype = self._get_type(decl.type, name=decl.name)
-self._declare('typedef ' + decl.name, realtype)
+realtype, quals = self._get_type_and_quals(
+decl.type, name=decl.name)
+self._declare('typedef ' + decl.name, realtype, 
quals=quals)
 else:
 raise api.CDefError("unrecognized construct", decl)
 except api.FFIError as e:
@@ -255,9 +257,9 @@
 def _parse_decl(self, decl):
 node = decl.type
 if isinstance(node, pycparser.c_ast.FuncDecl):
-tp = self._get_type(node, name=decl.name)
+tp, quals = self._get_type_and_quals(node, name=decl.name)
 assert isinstance(tp, model.RawFunctionType)
-tp = self._get_type_pointer(tp)
+tp = self._get_type_pointer(tp, quals)
 self._declare('function ' + decl.name, tp)
 else:
 if isinstance(node, pycparser.c_ast.Struct):
@@ -271,9 +273,10 @@
 decl)
 #
 if decl.name:
-tp = self._get_type(node, partial_length_ok=True)
+tp, quals = self._get_type_and_quals(node,
+ partial_length_ok=True)
 if tp.is_raw_function:
-tp = self._get_type_pointer(tp)
+tp = self._get_type_pointer(tp, quals)
 self._declare('function ' + decl.name, tp)
 elif (tp.is_integer_type() and
 hasattr(decl, 'init') and
@@ -287,10 +290,10 @@
 _r_int_literal.match(decl.init.expr.value)):
 self._add_integer_constant(decl.name,
'-' + decl.init.expr.value)
-elif self._is_constant_globalvar(node):
-self._declare('constant ' + decl.name, tp)
+elif (quals & model.Q_CONST) and not tp.is_array_type:
+self._declare('constant ' + decl.name, tp, quals=quals)
 else:
-self._declare('variable ' + decl.name, tp)
+self._declare('variable ' + decl.name, tp, quals=quals)
 
 def parse_type(self, cdecl):
 ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
@@ -298,40 +301,51 @@
 exprnode = ast.ext[-1].type.args.params[0]
 if isinstance(exprnode, pycparser.c_ast.ID):
 raise api.CDefError("unknown identifier '%s'" % (exprnode.name,))
-return self._get_type(exprnode.type)
+tp, quals = self._get_type_and_quals(exprnode.type)
+return tp
 
-def _declare(self, name, obj, included=False):
+def _declare(self, 

[pypy-commit] cffi qualtypes: A branch to finally fix issues #126/#64

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2281:7a3319459076
Date: 2015-09-30 08:58 +0200
http://bitbucket.org/cffi/cffi/changeset/7a3319459076/

Log:A branch to finally fix issues #126/#64

___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: Tests and fixes for "const" fields

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2283:f8c4c25618be
Date: 2015-09-30 11:34 +0200
http://bitbucket.org/cffi/cffi/changeset/f8c4c25618be/

Log:Tests and fixes for "const" fields

diff --git a/cffi/model.py b/cffi/model.py
--- a/cffi/model.py
+++ b/cffi/model.py
@@ -20,7 +20,7 @@
 is_array_type = False
 is_raw_function = False
 
-def get_c_name(self, replace_with='', context='a C file'):
+def get_c_name(self, replace_with='', context='a C file', quals=0):
 result = self.c_name_with_marker
 assert result.count('&') == 1
 # some logic duplication with ffi.getctype()... :-(
@@ -30,6 +30,7 @@
 replace_with = '(%s)' % replace_with
 elif not replace_with[0] in '[(':
 replace_with = ' ' + replace_with
+replace_with = qualify(quals, replace_with)
 result = result.replace('&', replace_with)
 if '$' in result:
 from .ffiplatform import VerificationError
diff --git a/cffi/recompiler.py b/cffi/recompiler.py
--- a/cffi/recompiler.py
+++ b/cffi/recompiler.py
@@ -788,9 +788,9 @@
and (ftype.length is None or ftype.length == '...')):
 ftype = ftype.item
 fname = fname + '[0]'
-tmp = model.qualify(fqual, '*tmp')
 prnt('  { %s = >%s; (void)tmp; }' % (
-ftype.get_c_name(tmp, 'field %r'%fname), fname))
+ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
+fname))
 except ffiplatform.VerificationError as e:
 prnt('  /* %s */' % str(e))   # cannot verify it, ignore
 prnt('}')
@@ -1087,8 +1087,7 @@
 # (If 'tp' is a function _pointer_ type, then casts from "fn_t
 # **" to "void *" are again no-ops, as far as I can tell.)
 decl = '*_cffi_var_%s(void)' % (name,)
-decl = model.qualify(self._current_quals, decl)
-prnt('static ' + tp.get_c_name(decl))
+prnt('static ' + tp.get_c_name(decl, quals=self._current_quals))
 prnt('{')
 prnt('  return %s(%s);' % (ampersand, name))
 prnt('}')
diff --git a/testing/cffi1/test_recompiler.py b/testing/cffi1/test_recompiler.py
--- a/testing/cffi1/test_recompiler.py
+++ b/testing/cffi1/test_recompiler.py
@@ -1191,3 +1191,43 @@
 py.test.raises(ffi.error, getattr, lib, 'my_value')
 e = py.test.raises(ffi.error, setattr, lib, 'my_value', 50)
 assert str(e.value) == "global variable 'my_value' is at address NULL"
+
+def test_const_fields():
+ffi = FFI()
+ffi.cdef("""struct foo_s { const int a; void *const b; };""")
+lib = verify(ffi, 'test_const_fields', """
+struct foo_s { const int a; void *const b; };""")
+foo_s = ffi.typeof("struct foo_s")
+assert foo_s.fields[0][0] == 'a'
+assert foo_s.fields[0][1].type is ffi.typeof("int")
+assert foo_s.fields[1][0] == 'b'
+assert foo_s.fields[1][1].type is ffi.typeof("void *")
+
+def test_restrict_fields():
+if sys.platform == 'win32':
+py.test.skip("'__restrict__' probably not recognized")
+ffi = FFI()
+ffi.cdef("""struct foo_s { void * restrict b; };""")
+lib = verify(ffi, 'test_const_fields', """
+struct foo_s { void * __restrict__ b; };""")
+foo_s = ffi.typeof("struct foo_s")
+assert foo_s.fields[0][0] == 'b'
+assert foo_s.fields[0][1].type is ffi.typeof("void *")
+
+def test_const_array_fields():
+ffi = FFI()
+ffi.cdef("""struct foo_s { const int a[4]; };""")
+lib = verify(ffi, 'test_const_array_fields', """
+struct foo_s { const int a[4]; };""")
+foo_s = ffi.typeof("struct foo_s")
+assert foo_s.fields[0][0] == 'a'
+assert foo_s.fields[0][1].type is ffi.typeof("int[4]")
+
+def test_const_array_fields_varlength():
+ffi = FFI()
+ffi.cdef("""struct foo_s { const int a[]; ...; };""")
+lib = verify(ffi, 'test_const_array_fields_varlength', """
+struct foo_s { const int a[4]; };""")
+foo_s = ffi.typeof("struct foo_s")
+assert foo_s.fields[0][0] == 'a'
+assert foo_s.fields[0][1].type is ffi.typeof("int[]")
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: Fix the verify() for const fields too, because it's very easy

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2285:960b1aaf63ca
Date: 2015-09-30 11:53 +0200
http://bitbucket.org/cffi/cffi/changeset/960b1aaf63ca/

Log:Fix the verify() for const fields too, because it's very easy

diff --git a/cffi/vengine_cpy.py b/cffi/vengine_cpy.py
--- a/cffi/vengine_cpy.py
+++ b/cffi/vengine_cpy.py
@@ -480,7 +480,8 @@
 # only accept exactly the type declared.
 try:
 prnt('  { %s = >%s; (void)tmp; }' % (
-ftype.get_c_name('*tmp', 'field %r'%fname), fname))
+ftype.get_c_name('*tmp', 'field %r'%fname, 
quals=fqual),
+fname))
 except ffiplatform.VerificationError as e:
 prnt('  /* %s */' % str(e))   # cannot verify it, ignore
 prnt('}')
diff --git a/cffi/vengine_gen.py b/cffi/vengine_gen.py
--- a/cffi/vengine_gen.py
+++ b/cffi/vengine_gen.py
@@ -272,7 +272,8 @@
 # only accept exactly the type declared.
 try:
 prnt('  { %s = >%s; (void)tmp; }' % (
-ftype.get_c_name('*tmp', 'field %r'%fname), fname))
+ftype.get_c_name('*tmp', 'field %r'%fname, 
quals=fqual),
+fname))
 except ffiplatform.VerificationError as e:
 prnt('  /* %s */' % str(e))   # cannot verify it, ignore
 prnt('}')
diff --git a/testing/cffi0/test_verify.py b/testing/cffi0/test_verify.py
--- a/testing/cffi0/test_verify.py
+++ b/testing/cffi0/test_verify.py
@@ -2247,3 +2247,13 @@
 e = py.test.raises(VerificationError, ffi.verify, "")
 assert str(e.value) == ("feature not supported with ffi.verify(), but only 
"
  "with ffi.set_source(): 'typedef unsigned long... 
t1'")
+
+def test_const_fields():
+ffi = FFI()
+ffi.cdef("""struct foo_s { const int a; void *const b; };""")
+ffi.verify("""struct foo_s { const int a; void *const b; };""")
+foo_s = ffi.typeof("struct foo_s")
+assert foo_s.fields[0][0] == 'a'
+assert foo_s.fields[0][1].type is ffi.typeof("int")
+assert foo_s.fields[1][0] == 'b'
+assert foo_s.fields[1][1].type is ffi.typeof("void *")
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi default: hg merge qualtypes

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: 
Changeset: r2292:f0d15960e353
Date: 2015-09-30 12:44 +0200
http://bitbucket.org/cffi/cffi/changeset/f0d15960e353/

Log:hg merge qualtypes

Fixes a long-standing problem with producing warnings-free code if
the real C header uses "const" or "restrict". See issues #64 and
#126.

diff --git a/cffi/api.py b/cffi/api.py
--- a/cffi/api.py
+++ b/cffi/api.py
@@ -609,7 +609,7 @@
 def make_accessor_locked(name):
 key = 'function ' + name
 if key in ffi._parser._declarations:
-tp = ffi._parser._declarations[key]
+tp, _ = ffi._parser._declarations[key]
 BType = ffi._get_cached_btype(tp)
 try:
 value = backendlib.load_function(BType, name)
@@ -620,7 +620,7 @@
 #
 key = 'variable ' + name
 if key in ffi._parser._declarations:
-tp = ffi._parser._declarations[key]
+tp, _ = ffi._parser._declarations[key]
 BType = ffi._get_cached_btype(tp)
 read_variable = backendlib.read_variable
 write_variable = backendlib.write_variable
@@ -632,7 +632,7 @@
 if not copied_enums:
 from . import model
 error = None
-for key, tp in ffi._parser._declarations.items():
+for key, (tp, _) in ffi._parser._declarations.items():
 if not isinstance(tp, model.EnumType):
 continue
 try:
diff --git a/cffi/cparser.py b/cffi/cparser.py
--- a/cffi/cparser.py
+++ b/cffi/cparser.py
@@ -192,6 +192,7 @@
 if not decl.name:
 raise api.CDefError("typedef does not declare any 
name",
 decl)
+quals = 0
 if (isinstance(decl.type.type, 
pycparser.c_ast.IdentifierType)
 and decl.type.type.names[-1] == '__dotdotdot__'):
 realtype = self._get_unknown_type(decl)
@@ -202,8 +203,9 @@
   decl.type.type.type.names == ['__dotdotdot__']):
 realtype = model.unknown_ptr_type(decl.name)
 else:
-realtype = self._get_type(decl.type, name=decl.name)
-self._declare('typedef ' + decl.name, realtype)
+realtype, quals = self._get_type_and_quals(
+decl.type, name=decl.name)
+self._declare('typedef ' + decl.name, realtype, 
quals=quals)
 else:
 raise api.CDefError("unrecognized construct", decl)
 except api.FFIError as e:
@@ -255,9 +257,9 @@
 def _parse_decl(self, decl):
 node = decl.type
 if isinstance(node, pycparser.c_ast.FuncDecl):
-tp = self._get_type(node, name=decl.name)
+tp, quals = self._get_type_and_quals(node, name=decl.name)
 assert isinstance(tp, model.RawFunctionType)
-tp = self._get_type_pointer(tp)
+tp = self._get_type_pointer(tp, quals)
 self._declare('function ' + decl.name, tp)
 else:
 if isinstance(node, pycparser.c_ast.Struct):
@@ -271,9 +273,10 @@
 decl)
 #
 if decl.name:
-tp = self._get_type(node, partial_length_ok=True)
+tp, quals = self._get_type_and_quals(node,
+ partial_length_ok=True)
 if tp.is_raw_function:
-tp = self._get_type_pointer(tp)
+tp = self._get_type_pointer(tp, quals)
 self._declare('function ' + decl.name, tp)
 elif (tp.is_integer_type() and
 hasattr(decl, 'init') and
@@ -287,10 +290,10 @@
 _r_int_literal.match(decl.init.expr.value)):
 self._add_integer_constant(decl.name,
'-' + decl.init.expr.value)
-elif self._is_constant_globalvar(node):
-self._declare('constant ' + decl.name, tp)
+elif (quals & model.Q_CONST) and not tp.is_array_type:
+self._declare('constant ' + decl.name, tp, quals=quals)
 else:
-self._declare('variable ' + decl.name, tp)
+self._declare('variable ' + decl.name, tp, quals=quals)
 
 def parse_type(self, cdecl):
 ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
@@ -298,40 +301,51 @@
 exprnode = ast.ext[-1].type.args.params[0]
 if isinstance(exprnode, pycparser.c_ast.ID):
 raise api.CDefError("unknown identifier '%s'" % (exprnode.name,))
-return self._get_type(exprnode.type)
+tp, quals = self._get_type_and_quals(exprnode.type)
+

[pypy-commit] cffi qualtypes: Documentation

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2288:32ca56181815
Date: 2015-09-30 12:25 +0200
http://bitbucket.org/cffi/cffi/changeset/32ca56181815/

Log:Documentation

diff --git a/doc/source/cdef.rst b/doc/source/cdef.rst
--- a/doc/source/cdef.rst
+++ b/doc/source/cdef.rst
@@ -232,6 +232,15 @@
 ``TCHAR`` and friends where hard-coded as unicode, but ``UNICODE`` was,
 inconsistently, not defined by default.)
 
+Note that you can use the type-qualifiers ``const`` and ``restrict``
+(but not ``__restrict`` or ``__restrict__``) in the ``cdef()``, but
+this has no effect on the cdata objects that you get at run-time (they
+are never ``const``).  The effect is limited to knowing if a global
+variable is meant to be a constant or not.  Also, *new in version
+1.3:* when using ``set_source()`` or ``verify()``, these two
+qualifiers are copied from the cdef to the generated C code; this
+fixes warnings by the C compiler.
+
 
 .. _loading-libraries:
 
diff --git a/doc/source/whatsnew.rst b/doc/source/whatsnew.rst
--- a/doc/source/whatsnew.rst
+++ b/doc/source/whatsnew.rst
@@ -13,6 +13,12 @@
 * Issue #217: fix possible unaligned pointer manipulation, which crash
   on some architectures (64-bit, non-x86).
 
+* Issues #64 and #126: when using ``set_source()`` or ``verify()``,
+  the ``const`` and ``restrict`` keywords are copied from the cdef
+  to the generated C code; this fixes warnings by the C compiler.
+  It also fixes corner cases like ``typedef const int T; T a;``
+  which would previously not consider ``a`` as a constant.
+
 
 v1.2.1
 ==
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: add a passing test

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2289:5f8686315e12
Date: 2015-09-30 12:33 +0200
http://bitbucket.org/cffi/cffi/changeset/5f8686315e12/

Log:add a passing test

diff --git a/testing/cffi0/test_parsing.py b/testing/cffi0/test_parsing.py
--- a/testing/cffi0/test_parsing.py
+++ b/testing/cffi0/test_parsing.py
@@ -341,6 +341,19 @@
 tp, quals = ffi._parser._declarations['variable a']
 assert bool(quals & model.Q_RESTRICT) == expected_output
 
+def test_different_const_funcptr_types():
+lst = []
+for input in [
+"int(*)(int *a)",
+"int(*)(int const *a)",
+"int(*)(int * const a)",
+"int(*)(int const a[])"]:
+ffi = FFI(backend=FakeBackend())
+lst.append(ffi._parser.parse_type(input))
+assert lst[0] != lst[1]
+assert lst[0] == lst[2]
+assert lst[1] == lst[3]
+
 def test_enum():
 ffi = FFI()
 ffi.cdef("""
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: A note

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2290:db28e5f548ff
Date: 2015-09-30 12:37 +0200
http://bitbucket.org/cffi/cffi/changeset/db28e5f548ff/

Log:A note

diff --git a/cffi/model.py b/cffi/model.py
--- a/cffi/model.py
+++ b/cffi/model.py
@@ -12,6 +12,9 @@
 if quals & Q_CONST:
 replace_with = ' const ' + replace_with.lstrip()
 if quals & Q_RESTRICT:
+# It seems that __restrict is supported by gcc and msvc.
+# If you hit some different compiler, add a #define in
+# _cffi_include.h for it (and in its copies, documented there)
 replace_with = ' __restrict ' + replace_with.lstrip()
 return replace_with
 
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: Seems that "__restrict" is recognized in all gcc modes I could try,

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2287:f50320b732db
Date: 2015-09-30 12:08 +0200
http://bitbucket.org/cffi/cffi/changeset/f50320b732db/

Log:Seems that "__restrict" is recognized in all gcc modes I could try,
and it should also be recognized on MSVC

diff --git a/cffi/model.py b/cffi/model.py
--- a/cffi/model.py
+++ b/cffi/model.py
@@ -12,7 +12,7 @@
 if quals & Q_CONST:
 replace_with = ' const ' + replace_with.lstrip()
 if quals & Q_RESTRICT:
-replace_with = ' restrict ' + replace_with.lstrip()
+replace_with = ' __restrict ' + replace_with.lstrip()
 return replace_with
 
 
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi qualtypes: fixes

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: qualtypes
Changeset: r2286:4ef1d72e9b05
Date: 2015-09-30 12:00 +0200
http://bitbucket.org/cffi/cffi/changeset/4ef1d72e9b05/

Log:fixes

diff --git a/cffi/recompiler.py b/cffi/recompiler.py
--- a/cffi/recompiler.py
+++ b/cffi/recompiler.py
@@ -203,8 +203,7 @@
 
 def _generate(self, step_name):
 lst = self.ffi._parser._declarations.items()
-lst.sort()
-for name, (tp, quals) in lst:
+for name, (tp, quals) in sorted(lst):
 kind, realname = name.split(' ', 1)
 try:
 method = getattr(self, '_generate_cpy_%s_%s' % (kind,
diff --git a/testing/cffi1/test_recompiler.py b/testing/cffi1/test_recompiler.py
--- a/testing/cffi1/test_recompiler.py
+++ b/testing/cffi1/test_recompiler.py
@@ -1208,7 +1208,7 @@
 py.test.skip("'__restrict__' probably not recognized")
 ffi = FFI()
 ffi.cdef("""struct foo_s { void * restrict b; };""")
-lib = verify(ffi, 'test_const_fields', """
+lib = verify(ffi, 'test_restrict_fields', """
 struct foo_s { void * __restrict__ b; };""")
 foo_s = ffi.typeof("struct foo_s")
 assert foo_s.fields[0][0] == 'b'
@@ -1270,3 +1270,13 @@
 """)
 assert lib.myfoo.x == 40
 assert lib.myfoo.y == 2
+
+def test_const_via_typedef():
+ffi = FFI()
+ffi.cdef("""typedef const int const_t; const_t aaa;""")
+lib = verify(ffi, 'test_const_via_typedef', """
+typedef const int const_t;
+#define aaa 42
+""")
+assert lib.aaa == 42
+py.test.raises(AttributeError, "lib.aaa = 43")
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3k: hg merge 5345333d8dcd (last changeset in default branch that was merged into PyPy2 release branch).

2015-09-30 Thread mjacob
Author: Manuel Jacob 
Branch: py3k
Changeset: r79914:865002a1287a
Date: 2015-10-01 00:51 +0200
http://bitbucket.org/pypy/pypy/changeset/865002a1287a/

Log:hg merge 5345333d8dcd (last changeset in default branch that was
merged into PyPy2 release branch).

diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -352,8 +352,7 @@
 Except when otherwise stated (look for LICENSE files or copyright/license
 information at the beginning of each file) the files in the 'lib-python/2.7'
 directory are all copyrighted by the Python Software Foundation and licensed
-under the Python Software License of which you can find a copy here:
-http://www.python.org/doc/Copyright.html 
+under the terms that you can find here: https://docs.python.org/2/license.html
 
 License for 'pypy/module/unicodedata/'
 ==
@@ -435,4 +434,4 @@
 
 The code is based on gperftools. You may see a copy of the License for it at
 
-https://code.google.com/p/gperftools/source/browse/COPYING
+https://github.com/gperftools/gperftools/blob/master/COPYING
diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO
--- a/lib_pypy/cffi.egg-info/PKG-INFO
+++ b/lib_pypy/cffi.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cffi
-Version: 1.2.1
+Version: 1.3.0
 Summary: Foreign Function Interface for Python calling C code.
 Home-page: http://cffi.readthedocs.org
 Author: Armin Rigo, Maciej Fijalkowski
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -4,8 +4,8 @@
 from .api import FFI, CDefError, FFIError
 from .ffiplatform import VerificationError, VerificationMissing
 
-__version__ = "1.2.1"
-__version_info__ = (1, 2, 1)
+__version__ = "1.3.0"
+__version_info__ = (1, 3, 0)
 
 # The verifier module file names are based on the CRC32 of a string that
 # contains the following version number.  It may be older than __version__
diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h
--- a/lib_pypy/cffi/_cffi_include.h
+++ b/lib_pypy/cffi/_cffi_include.h
@@ -214,6 +214,12 @@
  (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) :\
  _CFFI__UNKNOWN_PRIM)
 
+#define _cffi_prim_float(size)  \
+((size) == sizeof(float) ? _CFFI_PRIM_FLOAT :   \
+ (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
+ (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE :   \
+ _CFFI__UNKNOWN_FLOAT_PRIM)
+
 #define _cffi_check_int(got, got_nonpos, expected)  \
 ((got_nonpos) == (expected <= 0) && \
  (got) == (unsigned long long)expected)
diff --git a/lib_pypy/cffi/cffi_opcode.py b/lib_pypy/cffi/cffi_opcode.py
--- a/lib_pypy/cffi/cffi_opcode.py
+++ b/lib_pypy/cffi/cffi_opcode.py
@@ -106,7 +106,9 @@
 PRIM_UINTMAX   = 47
 
 _NUM_PRIM  = 48
-_UNKNOWN_PRIM  = -1
+_UNKNOWN_PRIM  = -1
+_UNKNOWN_FLOAT_PRIM= -2
+_UNKNOWN_LONG_DOUBLE   = -3
 
 PRIMITIVE_TO_INDEX = {
 'char':   PRIM_CHAR,
diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py
--- a/lib_pypy/cffi/cparser.py
+++ b/lib_pypy/cffi/cparser.py
@@ -648,10 +648,21 @@
 assert typenames[-1] == '__dotdotdot__'
 if len(typenames) == 1:
 return model.unknown_type(decl.name)
-for t in typenames[:-1]:
-if t not in ['int', 'short', 'long', 'signed', 'unsigned', 'char']:
-raise api.FFIError(':%d: bad usage of "..."' % decl.coord.line)
+
+if (typenames[:-1] == ['float'] or
+typenames[:-1] == ['double']):
+# not for 'long double' so far
+result = model.UnknownFloatType(decl.name)
+else:
+for t in typenames[:-1]:
+if t not in ['int', 'short', 'long', 'signed',
+ 'unsigned', 'char']:
+raise api.FFIError(':%d: bad usage of "..."' %
+   decl.coord.line)
+result = model.UnknownIntegerType(decl.name)
+
 if self._uses_new_feature is None:
 self._uses_new_feature = "'typedef %s... %s'" % (
 ' '.join(typenames[:-1]), decl.name)
-return model.UnknownIntegerType(decl.name)
+
+return result
diff --git a/lib_pypy/cffi/model.py b/lib_pypy/cffi/model.py
--- a/lib_pypy/cffi/model.py
+++ b/lib_pypy/cffi/model.py
@@ -158,12 +158,23 @@
 self.c_name_with_marker = name + '&'
 
 def is_integer_type(self):
-return True# for now
+return True
 
 def build_backend_type(self, ffi, finishlist):
 raise NotImplementedError("integer type '%s' can only be used after "
   "compilation" % self.name)
 
+class UnknownFloatType(BasePrimitiveType):
+_attrs_ = ('name', )
+
+def __init__(self, name):

[pypy-commit] pypy py3k: 2to3

2015-09-30 Thread mjacob
Author: Manuel Jacob 
Branch: py3k
Changeset: r79915:6029baccadac
Date: 2015-09-30 16:29 +0200
http://bitbucket.org/pypy/pypy/changeset/6029baccadac/

Log:2to3

diff --git a/pypy/module/_vmprof/test/test__vmprof.py 
b/pypy/module/_vmprof/test/test__vmprof.py
--- a/pypy/module/_vmprof/test/test__vmprof.py
+++ b/pypy/module/_vmprof/test/test__vmprof.py
@@ -40,7 +40,7 @@
 count += 1
 i += 2 * WORD + size
 else:
-raise AssertionError(ord(s[i]))
+raise AssertionError(s[i])
 return count
 
 import _vmprof
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy vecopt-merge: some minor reverts from defaults

2015-09-30 Thread plan_rich
Author: Richard Plangger 
Branch: vecopt-merge
Changeset: r79906:bec18388a99b
Date: 2015-09-30 15:42 +0200
http://bitbucket.org/pypy/pypy/changeset/bec18388a99b/

Log:some minor reverts from defaults

diff --git a/rpython/jit/backend/llsupport/test/test_descr.py 
b/rpython/jit/backend/llsupport/test/test_descr.py
--- a/rpython/jit/backend/llsupport/test/test_descr.py
+++ b/rpython/jit/backend/llsupport/test/test_descr.py
@@ -144,7 +144,6 @@
 descr4 = get_array_descr(c0, A4)
 descr5 = get_array_descr(c0, A5)
 descr6 = get_array_descr(c0, A6)
-import pdb; pdb.set_trace()
 assert isinstance(descr1, ArrayDescr)
 assert descr1 == get_array_descr(c0, lltype.GcArray(lltype.Char))
 assert descr1.flag == FLAG_UNSIGNED
diff --git a/rpython/jit/backend/x86/assembler.py 
b/rpython/jit/backend/x86/assembler.py
--- a/rpython/jit/backend/x86/assembler.py
+++ b/rpython/jit/backend/x86/assembler.py
@@ -575,7 +575,7 @@
 frame_depth = max(self.current_clt.frame_info.jfi_frame_depth,
   frame_depth_no_fixed_size + JITFRAME_FIXED_SIZE)
 if logger:
-logger.log_bridge(inputargs, operations, "rewritten",
+logger.log_bridge(inputargs, operations, "rewritten", faildescr,
   ops_offset=ops_offset)
 self.fixup_target_tokens(rawstart)
 self.update_frame_depth(frame_depth)
@@ -1020,7 +1020,7 @@
 faildescr, failargs,
 faillocs, frame_depth)
 genop_guard_list[guard_opnum](self, guard_op, guard_token,
- arglocs, resloc)
+  arglocs, resloc)
 if not we_are_translated():
 # must be added by the genop_guard_list[]()
 assert guard_token is self.pending_guard_tokens[-1]
diff --git a/rpython/jit/backend/x86/test/test_rx86.py 
b/rpython/jit/backend/x86/test/test_rx86.py
--- a/rpython/jit/backend/x86/test/test_rx86.py
+++ b/rpython/jit/backend/x86/test/test_rx86.py
@@ -14,9 +14,6 @@
 def getvalue(self):
 return ''.join(self.buffer)
 
-def clear(self):
-self.buffer = []
-
 def force_frame_size(self, frame_size):
 pass
 
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy vecopt-merge: forgot to cache the descr when concrete type is set, some minor changes reverted

2015-09-30 Thread plan_rich
Author: Richard Plangger 
Branch: vecopt-merge
Changeset: r79905:190de8806c18
Date: 2015-09-30 15:31 +0200
http://bitbucket.org/pypy/pypy/changeset/190de8806c18/

Log:forgot to cache the descr when concrete type is set, some minor
changes reverted

diff --git a/rpython/jit/backend/arm/assembler.py 
b/rpython/jit/backend/arm/assembler.py
--- a/rpython/jit/backend/arm/assembler.py
+++ b/rpython/jit/backend/arm/assembler.py
@@ -932,7 +932,6 @@
 while regalloc.position() < len(operations) - 1:
 regalloc.next_instruction()
 i = regalloc.position()
-self.position = i
 op = operations[i]
 self.mc.mark_op(op)
 opnum = op.getopnum()
diff --git a/rpython/jit/backend/arm/opassembler.py 
b/rpython/jit/backend/arm/opassembler.py
--- a/rpython/jit/backend/arm/opassembler.py
+++ b/rpython/jit/backend/arm/opassembler.py
@@ -180,13 +180,13 @@
 
 gcmap = allocate_gcmap(self, frame_depth, JITFRAME_FIXED_SIZE)
 token = ArmGuardToken(self.cpu, gcmap,
-  descr,
-  failargs=op.getfailargs(),
-  fail_locs=arglocs,
-  offset=offset,
-  guard_opnum=op.getopnum(),
-  frame_depth=frame_depth,
-  fcond=fcond)
+descr,
+failargs=op.getfailargs(),
+fail_locs=arglocs,
+offset=offset,
+guard_opnum=op.getopnum(),
+frame_depth=frame_depth,
+fcond=fcond)
 return token
 
 def _emit_guard(self, op, arglocs, is_guard_not_invalidated=False):
@@ -199,6 +199,7 @@
 pos = self.mc.currpos()
 token = self.build_guard_token(op, arglocs[0].value, arglocs[1:], pos, 
fcond)
 self.pending_guards.append(token)
+assert token.guard_not_invalidated() == is_guard_not_invalidated
 # For all guards that are not GUARD_NOT_INVALIDATED we emit a
 # breakpoint to ensure the location is patched correctly. In the case
 # of GUARD_NOT_INVALIDATED we use just a NOP, because it is only
diff --git a/rpython/jit/backend/llgraph/runner.py 
b/rpython/jit/backend/llgraph/runner.py
--- a/rpython/jit/backend/llgraph/runner.py
+++ b/rpython/jit/backend/llgraph/runner.py
@@ -209,8 +209,7 @@
 def is_array_of_primitives(self):
 kind = getkind(self.A.OF)
 return kind == 'float' or \
-   kind == 'int' or \
-   kind == ''
+   kind == 'int'
 
 def is_always_pure(self):
 return self._is_pure
@@ -221,12 +220,6 @@
 def __repr__(self):
 return 'ArrayDescr(%r)' % (self.OUTERA,)
 
-def get_all_fielddescrs(self):
-return self.all_interiorfielddescrs
-
-def __repr__(self):
-return 'ArrayDescr(%r)' % (self.OUTERA,)
-
 def is_array_of_pointers(self):
 return getkind(self.A.OF) == 'ref'
 
@@ -1069,7 +1062,7 @@
 
 def fail_guard(self, descr, saved_data=None):
 values = []
-for i,box in enumerate(self.current_op.getfailargs()):
+for box in self.current_op.getfailargs():
 if box is not None:
 value = self.env[box]
 else:
@@ -1095,9 +1088,6 @@
 argboxes = self.current_op.getarglist()
 self.do_renaming(argboxes, args)
 
-def execute_guard_early_exit(self, descr):
-pass
-
 def _test_true(self, arg):
 if isinstance(arg, list):
 return all(arg)
diff --git a/rpython/jit/backend/llsupport/assembler.py 
b/rpython/jit/backend/llsupport/assembler.py
--- a/rpython/jit/backend/llsupport/assembler.py
+++ b/rpython/jit/backend/llsupport/assembler.py
@@ -2,8 +2,8 @@
 from rpython.jit.backend.llsupport.memcpy import memcpy_fn, memset_fn
 from rpython.jit.backend.llsupport.symbolic import WORD
 from rpython.jit.backend.llsupport.codemap import CodemapBuilder
-from rpython.jit.metainterp.history import (INT, REF, FLOAT, VECTOR,
-JitCellToken, ConstInt, AbstractFailDescr)
+from rpython.jit.metainterp.history import (INT, REF, FLOAT, JitCellToken,
+ConstInt, AbstractFailDescr, VECTOR)
 from rpython.jit.metainterp.resoperation import ResOperation, rop
 from rpython.rlib import rgc
 from rpython.rlib.debug import (debug_start, debug_stop, have_debug_prints_for,
diff --git a/rpython/jit/backend/llsupport/descr.py 
b/rpython/jit/backend/llsupport/descr.py
--- a/rpython/jit/backend/llsupport/descr.py
+++ b/rpython/jit/backend/llsupport/descr.py
@@ -285,9 +285,6 @@
 def get_item_size_in_bytes(self):
 return self.itemsize
 
-def get_flag(self):
-return self.flag
-
 def is_array_of_structs(self):
 return self.flag == 

[pypy-commit] pypy vecopt-merge: reverted some changes that are not necessary for the branch

2015-09-30 Thread plan_rich
Author: Richard Plangger 
Branch: vecopt-merge
Changeset: r79907:fe486d25512c
Date: 2015-09-30 15:53 +0200
http://bitbucket.org/pypy/pypy/changeset/fe486d25512c/

Log:reverted some changes that are not necessary for the branch

diff --git a/pypy/module/micronumpy/ufuncs.py b/pypy/module/micronumpy/ufuncs.py
--- a/pypy/module/micronumpy/ufuncs.py
+++ b/pypy/module/micronumpy/ufuncs.py
@@ -276,7 +276,7 @@
 loop.accumulate_flat(
 space, self.func, obj, dtype, out, self.identity)
 if call__array_wrap__:
-out = space.call_method(obj, '__array_wrap__', out, None)
+out = space.call_method(obj, '__array_wrap__', out, 
space.w_None)
 return out
 
 axis_flags = [False] * shapelen
diff --git a/pypy/module/pypyjit/test_pypy_c/test_00_model.py 
b/pypy/module/pypyjit/test_pypy_c/test_00_model.py
--- a/pypy/module/pypyjit/test_pypy_c/test_00_model.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_00_model.py
@@ -72,7 +72,7 @@
 stderr = ''
 assert not stderr
 #
-if discard_stdout_before_last_line or True:
+if discard_stdout_before_last_line:
 stdout = stdout.splitlines(True)[-1]
 #
 # parse the JIT log
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy default: Fix for the case of a slice(5, sys.maxint) if 5 is greater than 'length'

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: 
Changeset: r79908:0d6164b07b67
Date: 2015-09-30 16:45 +0200
http://bitbucket.org/pypy/pypy/changeset/0d6164b07b67/

Log:Fix for the case of a slice(5, sys.maxint) if 5 is greater than
'length'

diff --git a/pypy/objspace/std/sliceobject.py b/pypy/objspace/std/sliceobject.py
--- a/pypy/objspace/std/sliceobject.py
+++ b/pypy/objspace/std/sliceobject.py
@@ -239,13 +239,16 @@
 # hack for the JIT, for slices with no end specified:
 # this avoids the two comparisons that follow
 if jit.isconstant(stop) and stop == sys.maxint:
-return start, length
-if stop < start:
-stop = start
-if stop > length:
-stop = length
-if jit.isconstant(start) and start == 0:
-pass# no need to do the following check here
-elif start > length:
-start = length
+pass
+else:
+if stop < start:
+stop = start
+if stop <= length:
+return start, stop
+# here is the case where 'stop' is larger than the list
+stop = length
+if jit.isconstant(start) and start == 0:
+pass# no need to do the following check here
+elif start > stop:
+start = stop
 return start, stop
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] extradoc extradoc: start working on the talk

2015-09-30 Thread fijal
Author: fijal
Branch: extradoc
Changeset: r5558:e11054f8dc0a
Date: 2015-09-30 16:25 +0200
http://bitbucket.org/pypy/extradoc/changeset/e11054f8dc0a/

Log:start working on the talk

diff --git a/talk/pyconza2015/talk.rst b/talk/pyconza2015/talk.rst
new file mode 100644
--- /dev/null
+++ b/talk/pyconza2015/talk.rst
@@ -0,0 +1,130 @@
+
+How PyPy runs your program
+==
+
+About me
+
+
+* PyPy core developer for 8 years
+
+* running consulting business baroquesoftware.com
+
+This talk
+-
+
+The idea is to learn:
+
+* how pypy runs your programs
+
+* how to assess the performance of your program
+
+* additionally, why a lot of common folklore is not true
+
+The basics of PyPy
+--
+
+* python interpreter, just that
+
+* uses magic to run code faster (most of the time)
+
+* different base, not written in C
+
+PyPy - the wider angle
+--
+
+* download it, should come in your distribution
+
+* x86, x86_64, arm; windows, os x, linux
+
+* open source (MIT)
+
+PyPy - usage
+
+
+* mostly long running server programs
+
+* call C using cffi, a lot of libraries just work
+
+* use virtualenv (you should anyway)
+
+PyPy - magic
+
+
+* just in time compiler, replaces bytecode to assembler under your feet
+
+* takes a while to warm up, which defeats most short running programs
+
+* most of the time faster, sometimes slower
+
+* heavily optimizing, tons of heuristics for **typical** python programs
+
+PyPy - smallish example
+---
+
+* take python code
+
+* run python in interpreted mode (slow)
+
+* run python in meta-interpreter mode (VERY SLOW)
+
+* compile to optimized assembler
+
+* repeat if necessary
+
+Tracing JIT
+---
+
+* follow what the program is doing
+
+* enter special mode where all the operations are recorded
+
+* compile the recorded list of operations
+
+* add a bunch of "guards" that check that we're following the correct path
+  and correct optimizations
+
+* if guard fails, jump to interpreter
+
+* if guard fails enough jump to metainterpreter
+
+* repeat until all the paths are compiled to assembler
+
+Performance
+---
+
+* you need a metric (response time, number of requests)
+
+* the less you're trying to measure, the better
+
+* benchmarks are a vast improvement
+
+* repeatability is the key
+
+Optimization for dummies
+-
+
+* Obligatory citation
+
+  - *premature optimization is the root of all evil* (D. Knuth)
+
+* Pareto principle, or 80-20 rule
+
+  - 80% of the time will be spent in 20% of the program
+
+  - 20% of 1 mln is 200 000
+
+* Two golden rules:
+
+  1. Identify the slow spots
+
+  2. Optimize them
+
+Guidos points about optimizing python
+-
+
+xxx find the link
+
+Why we're here?
+---
+
+y
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3.3: hg merge d12dfd19fd86

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: py3.3
Changeset: r79909:4fa19970ddeb
Date: 2015-09-30 17:39 +0100
http://bitbucket.org/pypy/pypy/changeset/4fa19970ddeb/

Log:hg merge d12dfd19fd86

This includes a few more commits from "default" which fix two
problems:

 - an issue with pinned objects
- linux asmgcc was broken

diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
--- a/rpython/memory/gc/incminimark.py
+++ b/rpython/memory/gc/incminimark.py
@@ -166,7 +166,7 @@
 
 # The marking phase. We walk the list 'objects_to_trace' of all gray objects
 # and mark all of the things they point to gray. This step lasts until there
-# are no more gray objects.
+# are no more gray objects.  ('objects_to_trace' never contains pinned objs.)
 STATE_MARKING = 1
 
 # here we kill all the unvisited objects
@@ -1146,6 +1146,9 @@
   "raw_malloc_might_sweep must be empty outside SWEEPING")
 
 if self.gc_state == STATE_MARKING:
+self.objects_to_trace.foreach(self._check_not_in_nursery, None)
+self.more_objects_to_trace.foreach(self._check_not_in_nursery,
+   None)
 self._debug_objects_to_trace_dict1 = \
 self.objects_to_trace.stack2dict()
 self._debug_objects_to_trace_dict2 = \
@@ -1156,6 +1159,10 @@
 else:
 MovingGCBase.debug_check_consistency(self)
 
+def _check_not_in_nursery(self, obj, ignore):
+ll_assert(not self.is_in_nursery(obj),
+  "'objects_to_trace' contains a nursery object")
+
 def debug_check_object(self, obj):
 # We are after a minor collection, and possibly after a major
 # collection step.  No object should be in the nursery (except
@@ -1789,6 +1796,8 @@
 # If we're incrementally marking right now, sorry, we also
 # need to add the object to 'more_objects_to_trace' and have
 # it fully traced once at the end of the current marking phase.
+ll_assert(not self.is_in_nursery(obj),
+  "expected nursery obj in 
collect_cardrefs_to_nursery")
 if self.gc_state == STATE_MARKING:
 self.header(obj).tid &= ~GCFLAG_VISITED
 self.more_objects_to_trace.append(obj)
@@ -1845,8 +1854,11 @@
 # need to record the not-visited-yet (white) old objects.  So
 # as a conservative approximation, we need to add the object to
 # the list if and only if it doesn't have GCFLAG_VISITED yet.
+#
+# Additionally, ignore pinned objects.
+#
 obj = root.address[0]
-if not self.header(obj).tid & GCFLAG_VISITED:
+if (self.header(obj).tid & (GCFLAG_VISITED | GCFLAG_PINNED)) == 0:
 self.more_objects_to_trace.append(obj)
 
 def _trace_drag_out(self, root, parent):
@@ -1899,7 +1911,7 @@
 #
 self.old_objects_pointing_to_pinned.append(parent)
 self.updated_old_objects_pointing_to_pinned = True
-self.header(parent).tid |= GCFLAG_PINNED
+self.header(parent).tid |= GCFLAG_PINNED_OBJECT_PARENT_KNOWN
 #
 if hdr.tid & GCFLAG_VISITED:
 return
@@ -2033,6 +2045,7 @@
 new.delete()
 
 def _add_to_more_objects_to_trace(self, obj, ignored):
+ll_assert(not self.is_in_nursery(obj), "unexpected nursery obj here")
 self.header(obj).tid &= ~GCFLAG_VISITED
 self.more_objects_to_trace.append(obj)
 
@@ -2287,8 +2300,7 @@
 def collect_roots(self):
 # Collect all roots.  Starts from all the objects
 # from 'prebuilt_root_objects'.
-self.prebuilt_root_objects.foreach(self._collect_obj,
-   self.objects_to_trace)
+self.prebuilt_root_objects.foreach(self._collect_obj, None)
 #
 # Add the roots from the other sources.
 self.root_walker.walk_roots(
@@ -2298,43 +2310,48 @@
 #
 # If we are in an inner collection caused by a call to a finalizer,
 # the 'run_finalizers' objects also need to be kept alive.
-self.run_finalizers.foreach(self._collect_obj,
-self.objects_to_trace)
+self.run_finalizers.foreach(self._collect_obj, None)
 
 def enumerate_all_roots(self, callback, arg):
 self.prebuilt_root_objects.foreach(callback, arg)
 MovingGCBase.enumerate_all_roots(self, callback, arg)
 enumerate_all_roots._annspecialcase_ = 'specialize:arg(1)'
 
-@staticmethod
-def _collect_obj(obj, objects_to_trace):
-objects_to_trace.append(obj)
+def _collect_obj(self, obj, ignored):
+# Ignore pinned objects, which are the ones still in the nursery here.
+# 

[pypy-commit] pypy py3k: hg merge d12dfd19fd86

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: py3k
Changeset: r79910:bed78253f4c5
Date: 2015-09-30 18:41 +0200
http://bitbucket.org/pypy/pypy/changeset/bed78253f4c5/

Log:hg merge d12dfd19fd86

same as 4fa19970ddeb for the py3k branch

diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
--- a/rpython/memory/gc/incminimark.py
+++ b/rpython/memory/gc/incminimark.py
@@ -166,7 +166,7 @@
 
 # The marking phase. We walk the list 'objects_to_trace' of all gray objects
 # and mark all of the things they point to gray. This step lasts until there
-# are no more gray objects.
+# are no more gray objects.  ('objects_to_trace' never contains pinned objs.)
 STATE_MARKING = 1
 
 # here we kill all the unvisited objects
@@ -1146,6 +1146,9 @@
   "raw_malloc_might_sweep must be empty outside SWEEPING")
 
 if self.gc_state == STATE_MARKING:
+self.objects_to_trace.foreach(self._check_not_in_nursery, None)
+self.more_objects_to_trace.foreach(self._check_not_in_nursery,
+   None)
 self._debug_objects_to_trace_dict1 = \
 self.objects_to_trace.stack2dict()
 self._debug_objects_to_trace_dict2 = \
@@ -1156,6 +1159,10 @@
 else:
 MovingGCBase.debug_check_consistency(self)
 
+def _check_not_in_nursery(self, obj, ignore):
+ll_assert(not self.is_in_nursery(obj),
+  "'objects_to_trace' contains a nursery object")
+
 def debug_check_object(self, obj):
 # We are after a minor collection, and possibly after a major
 # collection step.  No object should be in the nursery (except
@@ -1789,6 +1796,8 @@
 # If we're incrementally marking right now, sorry, we also
 # need to add the object to 'more_objects_to_trace' and have
 # it fully traced once at the end of the current marking phase.
+ll_assert(not self.is_in_nursery(obj),
+  "expected nursery obj in 
collect_cardrefs_to_nursery")
 if self.gc_state == STATE_MARKING:
 self.header(obj).tid &= ~GCFLAG_VISITED
 self.more_objects_to_trace.append(obj)
@@ -1845,8 +1854,11 @@
 # need to record the not-visited-yet (white) old objects.  So
 # as a conservative approximation, we need to add the object to
 # the list if and only if it doesn't have GCFLAG_VISITED yet.
+#
+# Additionally, ignore pinned objects.
+#
 obj = root.address[0]
-if not self.header(obj).tid & GCFLAG_VISITED:
+if (self.header(obj).tid & (GCFLAG_VISITED | GCFLAG_PINNED)) == 0:
 self.more_objects_to_trace.append(obj)
 
 def _trace_drag_out(self, root, parent):
@@ -1899,7 +1911,7 @@
 #
 self.old_objects_pointing_to_pinned.append(parent)
 self.updated_old_objects_pointing_to_pinned = True
-self.header(parent).tid |= GCFLAG_PINNED
+self.header(parent).tid |= GCFLAG_PINNED_OBJECT_PARENT_KNOWN
 #
 if hdr.tid & GCFLAG_VISITED:
 return
@@ -2033,6 +2045,7 @@
 new.delete()
 
 def _add_to_more_objects_to_trace(self, obj, ignored):
+ll_assert(not self.is_in_nursery(obj), "unexpected nursery obj here")
 self.header(obj).tid &= ~GCFLAG_VISITED
 self.more_objects_to_trace.append(obj)
 
@@ -2287,8 +2300,7 @@
 def collect_roots(self):
 # Collect all roots.  Starts from all the objects
 # from 'prebuilt_root_objects'.
-self.prebuilt_root_objects.foreach(self._collect_obj,
-   self.objects_to_trace)
+self.prebuilt_root_objects.foreach(self._collect_obj, None)
 #
 # Add the roots from the other sources.
 self.root_walker.walk_roots(
@@ -2298,43 +2310,48 @@
 #
 # If we are in an inner collection caused by a call to a finalizer,
 # the 'run_finalizers' objects also need to be kept alive.
-self.run_finalizers.foreach(self._collect_obj,
-self.objects_to_trace)
+self.run_finalizers.foreach(self._collect_obj, None)
 
 def enumerate_all_roots(self, callback, arg):
 self.prebuilt_root_objects.foreach(callback, arg)
 MovingGCBase.enumerate_all_roots(self, callback, arg)
 enumerate_all_roots._annspecialcase_ = 'specialize:arg(1)'
 
-@staticmethod
-def _collect_obj(obj, objects_to_trace):
-objects_to_trace.append(obj)
+def _collect_obj(self, obj, ignored):
+# Ignore pinned objects, which are the ones still in the nursery here.
+# Cache effects: don't read any flag out of 'obj' at this point.
+# But only checking if it is in the nursery or not 

[pypy-commit] cffi default: Maybe a fix for multiple interpreters

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: 
Changeset: r2293:3789df86dd71
Date: 2015-09-30 16:57 +0200
http://bitbucket.org/cffi/cffi/changeset/3789df86dd71/

Log:Maybe a fix for multiple interpreters

diff --git a/c/cffi1_module.c b/c/cffi1_module.c
--- a/c/cffi1_module.c
+++ b/c/cffi1_module.c
@@ -21,33 +21,38 @@
 {
 PyObject *x;
 int i;
+static int init_done = 0;
 
 if (PyType_Ready(_Type) < 0)
 return -1;
 if (PyType_Ready(_Type) < 0)
 return -1;
-if (init_global_types_dict(FFI_Type.tp_dict) < 0)
-return -1;
 
-FFIError = PyErr_NewException("ffi.error", NULL, NULL);
-if (FFIError == NULL)
-return -1;
-if (PyDict_SetItemString(FFI_Type.tp_dict, "error", FFIError) < 0)
-return -1;
-if (PyDict_SetItemString(FFI_Type.tp_dict, "CType",
- (PyObject *)_Type) < 0)
-return -1;
-if (PyDict_SetItemString(FFI_Type.tp_dict, "CData",
- (PyObject *)_Type) < 0)
-return -1;
+if (!init_done) {
+if (init_global_types_dict(FFI_Type.tp_dict) < 0)
+return -1;
 
-for (i = 0; all_dlopen_flags[i].name != NULL; i++) {
-x = PyInt_FromLong(all_dlopen_flags[i].value);
-if (x == NULL || PyDict_SetItemString(FFI_Type.tp_dict,
-  all_dlopen_flags[i].name,
-  x) < 0)
+FFIError = PyErr_NewException("ffi.error", NULL, NULL);
+if (FFIError == NULL)
 return -1;
-Py_DECREF(x);
+if (PyDict_SetItemString(FFI_Type.tp_dict, "error", FFIError) < 0)
+return -1;
+if (PyDict_SetItemString(FFI_Type.tp_dict, "CType",
+ (PyObject *)_Type) < 0)
+return -1;
+if (PyDict_SetItemString(FFI_Type.tp_dict, "CData",
+ (PyObject *)_Type) < 0)
+return -1;
+
+for (i = 0; all_dlopen_flags[i].name != NULL; i++) {
+x = PyInt_FromLong(all_dlopen_flags[i].value);
+if (x == NULL || PyDict_SetItemString(FFI_Type.tp_dict,
+  all_dlopen_flags[i].name,
+  x) < 0)
+return -1;
+Py_DECREF(x);
+}
+init_done = 1;
 }
 
 x = (PyObject *)_Type;
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi default: More attempts at fixes for multiple interpreters

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: 
Changeset: r2294:e312e28ae4b2
Date: 2015-09-30 17:06 +0200
http://bitbucket.org/cffi/cffi/changeset/e312e28ae4b2/

Log:More attempts at fixes for multiple interpreters

diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -6289,6 +6289,7 @@
 {
 PyObject *m, *v;
 int i;
+static char init_done = 0;
 
 v = PySys_GetObject("version");
 if (v == NULL || !PyText_Check(v) ||
@@ -6331,14 +6332,17 @@
 if (PyType_Ready(_Type) < 0)
 INITERROR;
 
-v = PyText_FromString("_cffi_backend");
-if (v == NULL || PyDict_SetItemString(CData_Type.tp_dict,
-  "__module__", v) < 0)
-INITERROR;
-v = PyText_FromString("");
-if (v == NULL || PyDict_SetItemString(CData_Type.tp_dict,
-  "__name__", v) < 0)
-INITERROR;
+if (!init_done) {
+v = PyText_FromString("_cffi_backend");
+if (v == NULL || PyDict_SetItemString(CData_Type.tp_dict,
+  "__module__", v) < 0)
+INITERROR;
+v = PyText_FromString("");
+if (v == NULL || PyDict_SetItemString(CData_Type.tp_dict,
+  "__name__", v) < 0)
+INITERROR;
+init_done = 1;
+}
 
 /* this is for backward compatibility only */
 v = PyCapsule_New((void *)cffi_exports, "cffi", NULL);
@@ -6377,6 +6381,8 @@
 }
 
 init_errno();
+if (PyErr_Occurred())
+INITERROR;
 
 if (init_ffi_lib(m) < 0)
 INITERROR;
diff --git a/c/cffi1_module.c b/c/cffi1_module.c
--- a/c/cffi1_module.c
+++ b/c/cffi1_module.c
@@ -21,7 +21,7 @@
 {
 PyObject *x;
 int i;
-static int init_done = 0;
+static char init_done = 0;
 
 if (PyType_Ready(_Type) < 0)
 return -1;
diff --git a/c/file_emulator.h b/c/file_emulator.h
--- a/c/file_emulator.h
+++ b/c/file_emulator.h
@@ -5,12 +5,14 @@
 
 static int init_file_emulator(void)
 {
-PyObject *io = PyImport_ImportModule("_io");
-if (io == NULL)
-return -1;
-PyIOBase_TypeObj = PyObject_GetAttrString(io, "_IOBase");
-if (PyIOBase_TypeObj == NULL)
-return -1;
+if (PyIOBase_TypeObj == NULL) {
+PyObject *io = PyImport_ImportModule("_io");
+if (io == NULL)
+return -1;
+PyIOBase_TypeObj = PyObject_GetAttrString(io, "_IOBase");
+if (PyIOBase_TypeObj == NULL)
+return -1;
+}
 return 0;
 }
 
diff --git a/c/misc_win32.h b/c/misc_win32.h
--- a/c/misc_win32.h
+++ b/c/misc_win32.h
@@ -8,13 +8,15 @@
 int saved_lasterror;
 };
 
-static DWORD cffi_tls_index;
+static DWORD cffi_tls_index = TLS_OUT_OF_INDEXES;
 
 static void init_errno(void)
 {
-cffi_tls_index = TlsAlloc();
-if (cffi_tls_index == TLS_OUT_OF_INDEXES)
-PyErr_SetString(PyExc_WindowsError, "TlsAlloc() failed");
+if (cffi_tls_index == TLS_OUT_OF_INDEXES) {
+cffi_tls_index = TlsAlloc();
+if (cffi_tls_index == TLS_OUT_OF_INDEXES)
+PyErr_SetString(PyExc_WindowsError, "TlsAlloc() failed");
+}
 }
 
 static struct cffi_errno_s *_geterrno_object(void)
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi default: One more place

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: 
Changeset: r2295:d775ed0dc13c
Date: 2015-09-30 17:07 +0200
http://bitbucket.org/cffi/cffi/changeset/d775ed0dc13c/

Log:One more place

diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c
--- a/c/_cffi_backend.c
+++ b/c/_cffi_backend.c
@@ -6309,9 +6309,11 @@
 if (m == NULL)
 INITERROR;
 
-unique_cache = PyDict_New();
-if (unique_cache == NULL)
-INITERROR;
+if (unique_cache == NULL) {
+unique_cache = PyDict_New();
+if (unique_cache == NULL)
+INITERROR;
+}
 
 if (PyType_Ready(_type) < 0)
 INITERROR;
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] cffi default: test fix

2015-09-30 Thread arigo
Author: Armin Rigo 
Branch: 
Changeset: r2297:059aca3cb3dc
Date: 2015-09-30 17:19 +0200
http://bitbucket.org/cffi/cffi/changeset/059aca3cb3dc/

Log:test fix

diff --git a/testing/cffi0/test_model.py b/testing/cffi0/test_model.py
--- a/testing/cffi0/test_model.py
+++ b/testing/cffi0/test_model.py
@@ -59,7 +59,7 @@
 
 def test_qual_pointer_type():
 ptr_type = PointerType(PrimitiveType("long long"), Q_RESTRICT)
-assert ptr_type.get_c_name("") == "long long restrict *"
+assert ptr_type.get_c_name("") == "long long __restrict *"
 assert const_voidp_type.get_c_name("") == "void const *"
 
 def test_unknown_pointer_type():
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit