[pypy-commit] pypy refactor-buffer-api: hg merge default
Manuel Jacob
noreply at buildbot.pypy.org
Tue Jan 28 04:20:30 CET 2014
Author: Manuel Jacob
Branch: refactor-buffer-api
Changeset: r68973:94dca846c67a
Date: 2014-01-28 04:14 +0100
http://bitbucket.org/pypy/pypy/changeset/94dca846c67a/
Log: hg merge default
diff too long, truncating to 2000 out of 30700 lines
diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -28,7 +28,7 @@
DEALINGS IN THE SOFTWARE.
-PyPy Copyright holders 2003-2013
+PyPy Copyright holders 2003-2014
-----------------------------------
Except when otherwise stated (look for LICENSE files or information at
diff --git a/lib-python/2.7/ctypes/__init__.py b/lib-python/2.7/ctypes/__init__.py
--- a/lib-python/2.7/ctypes/__init__.py
+++ b/lib-python/2.7/ctypes/__init__.py
@@ -371,10 +371,9 @@
self._handle = handle
def __repr__(self):
- return "<%s '%s', handle %r at %x>" % \
- (self.__class__.__name__, self._name,
- (self._handle),
- id(self) & (_sys.maxint*2 + 1))
+ return "<%s '%s', handle %r at 0x%x>" % (
+ self.__class__.__name__, self._name, self._handle,
+ id(self) & (_sys.maxint * 2 + 1))
def __getattr__(self, name):
diff --git a/lib-python/2.7/ctypes/test/test_python_api.py b/lib-python/2.7/ctypes/test/test_python_api.py
--- a/lib-python/2.7/ctypes/test/test_python_api.py
+++ b/lib-python/2.7/ctypes/test/test_python_api.py
@@ -73,6 +73,7 @@
del pyobj
self.assertEqual(grc(s), ref)
+ @xfail
def test_PyOS_snprintf(self):
PyOS_snprintf = pythonapi.PyOS_snprintf
PyOS_snprintf.argtypes = POINTER(c_char), c_size_t, c_char_p
diff --git a/lib-python/2.7/socket.py b/lib-python/2.7/socket.py
--- a/lib-python/2.7/socket.py
+++ b/lib-python/2.7/socket.py
@@ -335,9 +335,10 @@
s = self._sock
self._sock = None
if s is not None:
- s._drop()
if self._close:
s.close()
+ else:
+ s._drop()
def __del__(self):
try:
diff --git a/lib-python/2.7/test/test_memoryview.py b/lib-python/2.7/test/test_memoryview.py
--- a/lib-python/2.7/test/test_memoryview.py
+++ b/lib-python/2.7/test/test_memoryview.py
@@ -166,11 +166,18 @@
self.assertTrue(m[0:6] == m[:])
self.assertFalse(m[0:5] == m)
- # Comparison with objects which don't support the buffer API
- self.assertFalse(m == u"abcdef")
- self.assertTrue(m != u"abcdef")
- self.assertFalse(u"abcdef" == m)
- self.assertTrue(u"abcdef" != m)
+ if test_support.check_impl_detail(cpython=True):
+ # what is supported and what is not supported by memoryview is
+ # very inconsisten on CPython. In PyPy, memoryview supports
+ # the buffer interface, and thus the following comparison
+ # succeeds. See also the comment in
+ # pypy.modules.__builtin__.interp_memoryview.W_MemoryView.descr_buffer
+ #
+ # Comparison with objects which don't support the buffer API
+ self.assertFalse(m == u"abcdef", "%s %s" % (self, tp))
+ self.assertTrue(m != u"abcdef")
+ self.assertFalse(u"abcdef" == m)
+ self.assertTrue(u"abcdef" != m)
# Unordered comparisons are unimplemented, and therefore give
# arbitrary results (they raise a TypeError in py3k)
diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py
--- a/lib-python/2.7/test/test_ssl.py
+++ b/lib-python/2.7/test/test_ssl.py
@@ -993,7 +993,7 @@
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED)
- try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True)
+ try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False)
diff --git a/lib-python/conftest.py b/lib-python/conftest.py
--- a/lib-python/conftest.py
+++ b/lib-python/conftest.py
@@ -109,7 +109,7 @@
RegrTest('test_asynchat.py', usemodules='select fcntl'),
RegrTest('test_asyncore.py', usemodules='select fcntl'),
RegrTest('test_atexit.py', core=True),
- RegrTest('test_audioop.py', skip="unsupported extension module"),
+ RegrTest('test_audioop.py', skip="incomplete module"),
RegrTest('test_augassign.py', core=True),
RegrTest('test_base64.py', usemodules='struct'),
RegrTest('test_bastion.py'),
diff --git a/lib_pypy/_ctypes/array.py b/lib_pypy/_ctypes/array.py
--- a/lib_pypy/_ctypes/array.py
+++ b/lib_pypy/_ctypes/array.py
@@ -1,4 +1,4 @@
-import _ffi
+from _rawffi import alt as _ffi
import _rawffi
from _ctypes.basics import _CData, cdata_from_address, _CDataMeta, sizeof
@@ -20,10 +20,13 @@
# we don't want to have buffers here
if len(val) > self._length_:
raise ValueError("%r too long" % (val,))
- for i in range(len(val)):
- self[i] = val[i]
+ if isinstance(val, str):
+ _rawffi.rawstring2charp(self._buffer.buffer, val)
+ else:
+ for i in range(len(val)):
+ self[i] = val[i]
if len(val) < self._length_:
- self[len(val)] = '\x00'
+ self._buffer[len(val)] = '\x00'
res.value = property(getvalue, setvalue)
def getraw(self):
@@ -33,8 +36,7 @@
def setraw(self, buffer):
if len(buffer) > self._length_:
raise ValueError("%r too long" % (buffer,))
- for i in range(len(buffer)):
- self[i] = buffer[i]
+ _rawffi.rawstring2charp(self._buffer.buffer, buffer)
res.raw = property(getraw, setraw)
elif subletter == 'u':
def getvalue(self):
@@ -45,10 +47,14 @@
# we don't want to have buffers here
if len(val) > self._length_:
raise ValueError("%r too long" % (val,))
+ if isinstance(val, unicode):
+ target = self._buffer
+ else:
+ target = self
for i in range(len(val)):
- self[i] = val[i]
+ target[i] = val[i]
if len(val) < self._length_:
- self[len(val)] = '\x00'
+ target[len(val)] = u'\x00'
res.value = property(getvalue, setvalue)
if '_length_' in typedict:
diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py
--- a/lib_pypy/_ctypes/basics.py
+++ b/lib_pypy/_ctypes/basics.py
@@ -1,6 +1,6 @@
import _rawffi
-import _ffi
+from _rawffi import alt as _ffi
import sys
try: from __pypy__ import builtinify
diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py
--- a/lib_pypy/_ctypes/function.py
+++ b/lib_pypy/_ctypes/function.py
@@ -5,7 +5,7 @@
from _ctypes.basics import is_struct_shape
from _ctypes.builtin import get_errno, set_errno, get_last_error, set_last_error
import _rawffi
-import _ffi
+from _rawffi import alt as _ffi
import sys
import traceback
@@ -328,21 +328,23 @@
raise ValueError(
"native COM method call without 'this' parameter"
)
- thisarg = cast(args[0], POINTER(POINTER(c_void_p)))
- keepalives, newargs, argtypes, outargs = self._convert_args(argtypes,
- args[1:], kwargs)
- newargs.insert(0, args[0].value)
+ thisvalue = args.pop(0)
+ thisarg = cast(thisvalue, POINTER(POINTER(c_void_p)))
+ keepalives, newargs, argtypes, outargs, errcheckargs = (
+ self._convert_args(argtypes, args, kwargs))
+ args.insert(0, thisvalue)
+ newargs.insert(0, thisvalue.value)
argtypes.insert(0, c_void_p)
else:
thisarg = None
- keepalives, newargs, argtypes, outargs = self._convert_args(argtypes,
- args, kwargs)
+ keepalives, newargs, argtypes, outargs, errcheckargs = (
+ self._convert_args(argtypes, args, kwargs))
funcptr = self._getfuncptr(argtypes, self._restype_, thisarg)
result = self._call_funcptr(funcptr, *newargs)
- result = self._do_errcheck(result, args)
+ result, forced = self._do_errcheck(result, errcheckargs)
- if not outargs:
+ if not outargs or forced:
return result
from ctypes import c_void_p
@@ -377,22 +379,22 @@
set_last_error(tmp)
#
try:
- return self._build_result(self._restype_, result, newargs)
+ return self._build_result(self._restype_, result)
finally:
funcptr.free_temp_buffers()
def _do_errcheck(self, result, args):
# The 'errcheck' protocol
if self._errcheck_:
- v = self._errcheck_(result, self, args)
+ v = self._errcheck_(result, self, tuple(args))
# If the errcheck funtion failed, let it throw
# If the errcheck function returned newargs unchanged,
# continue normal processing.
# If the errcheck function returned something else,
# use that as result.
if v is not args:
- return v
- return result
+ return v, True
+ return result, False
def _getfuncptr_fromaddress(self, argtypes, restype):
address = self._get_address()
@@ -495,16 +497,16 @@
newargtypes = []
total = len(args)
paramflags = self._paramflags
- inargs_idx = 0
if not paramflags and total < len(argtypes):
raise TypeError("not enough arguments")
- for i, argtype in enumerate(argtypes):
- flag = 0
- name = None
- defval = marker
- if paramflags:
+ if paramflags:
+ errcheckargs = []
+ inargs_idx = 0
+ for i, argtype in enumerate(argtypes):
+ flag = 0
+ defval = marker
paramflag = paramflags[i]
paramlen = len(paramflag)
name = None
@@ -519,6 +521,7 @@
val = defval
if val is marker:
val = 0
+ errcheckargs.append(val)
keepalive, newarg, newargtype = self._conv_param(argtype, val)
keepalives.append(keepalive)
newargs.append(newarg)
@@ -536,27 +539,31 @@
raise TypeError("required argument '%s' missing" % name)
else:
raise TypeError("not enough arguments")
+ errcheckargs.append(val)
keepalive, newarg, newargtype = self._conv_param(argtype, val)
keepalives.append(keepalive)
newargs.append(newarg)
newargtypes.append(newargtype)
elif flag == PARAMFLAG_FOUT:
if defval is not marker:
- outargs.append(defval)
+ val = defval
keepalive, newarg, newargtype = self._conv_param(argtype, defval)
else:
import ctypes
val = argtype._type_()
- outargs.append(val)
keepalive = None
newarg = ctypes.byref(val)
newargtype = type(newarg)
+ errcheckargs.append(val)
+ outargs.append(val)
keepalives.append(keepalive)
newargs.append(newarg)
newargtypes.append(newargtype)
else:
raise ValueError("paramflag %d not yet implemented" % flag)
- else:
+ else:
+ errcheckargs = args
+ for i, argtype in enumerate(argtypes):
try:
keepalive, newarg, newargtype = self._conv_param(argtype, args[i])
except (UnicodeError, TypeError, ValueError), e:
@@ -564,7 +571,6 @@
keepalives.append(keepalive)
newargs.append(newarg)
newargtypes.append(newargtype)
- inargs_idx += 1
if len(newargs) < len(args):
extra = args[len(newargs):]
@@ -576,7 +582,7 @@
keepalives.append(keepalive)
newargs.append(newarg)
newargtypes.append(newargtype)
- return keepalives, newargs, newargtypes, outargs
+ return keepalives, newargs, newargtypes, outargs, errcheckargs
@staticmethod
def _is_primitive(argtype):
@@ -601,7 +607,7 @@
retval = restype._CData_retval(buf)
return retval
- def _build_result(self, restype, result, argsandobjs):
+ def _build_result(self, restype, result):
"""Build the function result:
If there is no OUT parameter, return the actual function result
If there is one OUT parameter, return it
@@ -611,11 +617,6 @@
# i.e. an array of ints. Now it takes a result, which is already a
# python object. All places that do "resbuffer[0]" should check that
# result is actually an int and just use it.
- #
- # Also, argsandobjs used to be "args" in __call__, now it's "newargs"
- # (i.e., the already unwrapped objects). It's used only when we have a
- # PARAMFLAG_FOUT and it's probably wrong, I'll fix it when I find a
- # failing test
retval = None
@@ -704,7 +705,7 @@
funcptr = self._getfuncptr(argtypes, restype, thisarg)
try:
result = self._call_funcptr(funcptr, *args)
- result = self._do_errcheck(result, args)
+ result, _ = self._do_errcheck(result, args)
except (TypeError, ArgumentError, UnicodeDecodeError):
assert self._slowpath_allowed
return CFuncPtr.__call__(self, *args)
diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py
--- a/lib_pypy/_ctypes/pointer.py
+++ b/lib_pypy/_ctypes/pointer.py
@@ -1,6 +1,6 @@
import _rawffi
-import _ffi
+from _rawffi import alt as _ffi
from _ctypes.basics import _CData, _CDataMeta, cdata_from_address, ArgumentError
from _ctypes.basics import keepalive_key, store_reference, ensure_objects
from _ctypes.basics import sizeof, byref, as_ffi_pointer
diff --git a/lib_pypy/_ctypes/primitive.py b/lib_pypy/_ctypes/primitive.py
--- a/lib_pypy/_ctypes/primitive.py
+++ b/lib_pypy/_ctypes/primitive.py
@@ -1,4 +1,4 @@
-import _ffi
+from _rawffi import alt as _ffi
import _rawffi
import weakref
import sys
diff --git a/lib_pypy/_ctypes/structure.py b/lib_pypy/_ctypes/structure.py
--- a/lib_pypy/_ctypes/structure.py
+++ b/lib_pypy/_ctypes/structure.py
@@ -2,6 +2,8 @@
import _rawffi
from _ctypes.basics import _CData, _CDataMeta, keepalive_key,\
store_reference, ensure_objects, CArgObject
+from _ctypes.array import Array
+from _ctypes.pointer import _Pointer
import inspect
def names_and_fields(self, _fields_, superclass, anonymous_fields=None):
@@ -104,8 +106,11 @@
def __set__(self, obj, value):
fieldtype = self.ctype
cobj = fieldtype.from_param(value)
- if ensure_objects(cobj) is not None:
- key = keepalive_key(self.num)
+ key = keepalive_key(self.num)
+ if issubclass(fieldtype, _Pointer) and isinstance(cobj, Array):
+ # if our value is an Array we need the whole thing alive
+ store_reference(obj, key, cobj)
+ elif ensure_objects(cobj) is not None:
store_reference(obj, key, cobj._objects)
arg = cobj._get_buffer_value()
if fieldtype._fficompositesize is not None:
diff --git a/lib_pypy/_ffi.py b/lib_pypy/_ffi.py
new file mode 100644
--- /dev/null
+++ b/lib_pypy/_ffi.py
@@ -0,0 +1,2 @@
+# Backward compatibility hack
+from _rawffi.alt import *
diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py
--- a/lib_pypy/_pypy_testcapi.py
+++ b/lib_pypy/_pypy_testcapi.py
@@ -33,14 +33,13 @@
# set link options
output_filename = modulename + _get_c_extension_suffix()
if sys.platform == 'win32':
- # XXX libpypy-c.lib is currently not installed automatically
- library = os.path.join(thisdir, '..', 'include', 'libpypy-c')
+ # XXX pyconfig.h uses a pragma to link to the import library,
+ # which is currently python27.lib
+ library = os.path.join(thisdir, '..', 'include', 'python27')
if not os.path.exists(library + '.lib'):
- #For a nightly build
- library = os.path.join(thisdir, '..', 'include', 'python27')
- if not os.path.exists(library + '.lib'):
- # For a local translation
- library = os.path.join(thisdir, '..', 'pypy', 'goal', 'libpypy-c')
+ # For a local translation or nightly build
+ library = os.path.join(thisdir, '..', 'pypy', 'goal', 'python27')
+ assert os.path.exists(library + '.lib'),'Could not find import library "%s"' % library
libraries = [library, 'oleaut32']
extra_ldargs = ['/MANIFEST', # needed for VC10
'/EXPORT:init' + modulename]
diff --git a/lib_pypy/_sha.py b/lib_pypy/_sha.py
--- a/lib_pypy/_sha.py
+++ b/lib_pypy/_sha.py
@@ -115,14 +115,14 @@
]
class sha:
- "An implementation of the MD5 hash function in pure Python."
+ "An implementation of the SHA hash function in pure Python."
digest_size = digestsize = 20
- block_size = 1
+ block_size = 512 // 8
def __init__(self):
"Initialisation."
-
+
# Initial message length in bits(!).
self.length = 0
self.count = [0, 0]
@@ -209,7 +209,7 @@
self.H2 = (self.H2 + C) & 0xffffffff
self.H3 = (self.H3 + D) & 0xffffffff
self.H4 = (self.H4 + E) & 0xffffffff
-
+
# Down from here all methods follow the Python Standard Library
# API of the sha module.
@@ -295,13 +295,13 @@
_long2bytesBigEndian(self.H3, 4) + \
_long2bytesBigEndian(self.H4, 4)
- self.H0 = H0
- self.H1 = H1
+ self.H0 = H0
+ self.H1 = H1
self.H2 = H2
self.H3 = H3
self.H4 = H4
- self.input = input
- self.count = count
+ self.input = input
+ self.count = count
return digest
diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py
--- a/lib_pypy/_sqlite3.py
+++ b/lib_pypy/_sqlite3.py
@@ -330,6 +330,14 @@
# SQLite version information
sqlite_version = str(_ffi.string(_lib.sqlite3_libversion()).decode('ascii'))
+_STMT_TYPE_UPDATE = 0
+_STMT_TYPE_DELETE = 1
+_STMT_TYPE_INSERT = 2
+_STMT_TYPE_REPLACE = 3
+_STMT_TYPE_OTHER = 4
+_STMT_TYPE_SELECT = 5
+_STMT_TYPE_INVALID = 6
+
class Error(StandardError):
pass
@@ -992,13 +1000,18 @@
self.__statement = self.__connection._statement_cache.get(sql)
if self.__connection._isolation_level is not None:
- if self.__statement._type in ("UPDATE", "DELETE", "INSERT", "REPLACE"):
+ if self.__statement._type in (
+ _STMT_TYPE_UPDATE,
+ _STMT_TYPE_DELETE,
+ _STMT_TYPE_INSERT,
+ _STMT_TYPE_REPLACE
+ ):
if not self.__connection._in_transaction:
self.__connection._begin()
- elif self.__statement._type == "OTHER":
+ elif self.__statement._type == _STMT_TYPE_OTHER:
if self.__connection._in_transaction:
self.__connection.commit()
- elif self.__statement._type == "SELECT":
+ elif self.__statement._type == _STMT_TYPE_SELECT:
if multiple:
raise ProgrammingError("You cannot execute SELECT "
"statements in executemany().")
@@ -1021,12 +1034,17 @@
self.__statement._reset()
raise self.__connection._get_exception(ret)
- if self.__statement._type in ("UPDATE", "DELETE", "INSERT", "REPLACE"):
+ if self.__statement._type in (
+ _STMT_TYPE_UPDATE,
+ _STMT_TYPE_DELETE,
+ _STMT_TYPE_INSERT,
+ _STMT_TYPE_REPLACE
+ ):
if self.__rowcount == -1:
self.__rowcount = 0
self.__rowcount += _lib.sqlite3_changes(self.__connection._db)
- if not multiple and self.__statement._type == "INSERT":
+ if not multiple and self.__statement._type == _STMT_TYPE_INSERT:
self.__lastrowid = _lib.sqlite3_last_insert_rowid(self.__connection._db)
else:
self.__lastrowid = None
@@ -1176,11 +1194,19 @@
first_word = sql.lstrip().split(" ")[0].upper()
if first_word == "":
- self._type = "INVALID"
- elif first_word in ("SELECT", "INSERT", "UPDATE", "DELETE", "REPLACE"):
- self._type = first_word
+ self._type = _STMT_TYPE_INVALID
+ elif first_word == "SELECT":
+ self._type = _STMT_TYPE_SELECT
+ elif first_word == "INSERT":
+ self._type = _STMT_TYPE_INSERT
+ elif first_word == "UPDATE":
+ self._type = _STMT_TYPE_UPDATE
+ elif first_word == "DELETE":
+ self._type = _STMT_TYPE_DELETE
+ elif first_word == "REPLACE":
+ self._type = _STMT_TYPE_REPLACE
else:
- self._type = "OTHER"
+ self._type = _STMT_TYPE_OTHER
if isinstance(sql, unicode):
sql = sql.encode('utf-8')
@@ -1193,7 +1219,7 @@
if ret == _lib.SQLITE_OK and not self._statement:
# an empty statement, work around that, as it's the least trouble
- self._type = "SELECT"
+ self._type = _STMT_TYPE_SELECT
c_sql = _ffi.new("char[]", b"select 42")
ret = _lib.sqlite3_prepare_v2(self.__con._db, c_sql, -1,
statement_star, next_char)
@@ -1312,7 +1338,12 @@
raise ValueError("parameters are of unsupported type")
def _get_description(self):
- if self._type in ("INSERT", "UPDATE", "DELETE", "REPLACE"):
+ if self._type in (
+ _STMT_TYPE_INSERT,
+ _STMT_TYPE_UPDATE,
+ _STMT_TYPE_DELETE,
+ _STMT_TYPE_REPLACE
+ ):
return None
desc = []
for i in xrange(_lib.sqlite3_column_count(self._statement)):
diff --git a/lib_pypy/audioop.py b/lib_pypy/audioop.py
new file mode 100644
--- /dev/null
+++ b/lib_pypy/audioop.py
@@ -0,0 +1,29 @@
+
+import struct
+
+
+class error(Exception):
+ pass
+
+
+def _check_size(size):
+ if size != 1 and size != 2 and size != 4:
+ raise error("Size should be 1, 2 or 4")
+
+
+def _check_params(length, size):
+ _check_size(size)
+ if length % size != 0:
+ raise error("not a whole number of frames")
+
+
+def getsample(cp, size, i):
+ _check_params(len(cp), size)
+ if not (0 <= i < len(cp) / size):
+ raise error("Index out of range")
+ if size == 1:
+ return struct.unpack_from("B", buffer(cp)[i:])[0]
+ elif size == 2:
+ return struct.unpack_from("H", buffer(cp)[i * 2:])[0]
+ elif size == 4:
+ return struct.unpack_from("I", buffer(cp)[i * 4:])[0]
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -4,5 +4,5 @@
from .api import FFI, CDefError, FFIError
from .ffiplatform import VerificationError, VerificationMissing
-__version__ = "0.8"
-__version_info__ = (0, 8)
+__version__ = "0.8.1"
+__version_info__ = (0, 8, 1)
diff --git a/lib_pypy/datetime.py b/lib_pypy/datetime.py
--- a/lib_pypy/datetime.py
+++ b/lib_pypy/datetime.py
@@ -878,7 +878,6 @@
month = self._month
if day is None:
day = self._day
- year, month, day = _check_date_fields(year, month, day)
return date(year, month, day)
# Comparisons of date objects with other.
@@ -1389,8 +1388,6 @@
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
- hour, minute, second, microsecond = _check_time_fields(hour, minute, second, microsecond)
- _check_tzinfo_arg(tzinfo)
return time(hour, minute, second, microsecond, tzinfo)
def __nonzero__(self):
@@ -1608,9 +1605,6 @@
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
- year, month, day = _check_date_fields(year, month, day)
- hour, minute, second, microsecond = _check_time_fields(hour, minute, second, microsecond)
- _check_tzinfo_arg(tzinfo)
return datetime(year, month, day, hour, minute, second, microsecond,
tzinfo)
diff --git a/lib_pypy/pyrepl/simple_interact.py b/lib_pypy/pyrepl/simple_interact.py
--- a/lib_pypy/pyrepl/simple_interact.py
+++ b/lib_pypy/pyrepl/simple_interact.py
@@ -63,3 +63,6 @@
except KeyboardInterrupt:
console.write("\nKeyboardInterrupt\n")
console.resetbuffer()
+ except MemoryError:
+ console.write("\nMemoryError\n")
+ console.resetbuffer()
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -34,14 +34,14 @@
"struct", "_hashlib", "_md5", "_sha", "_minimal_curses", "cStringIO",
"thread", "itertools", "pyexpat", "_ssl", "cpyext", "array",
"binascii", "_multiprocessing", '_warnings',
- "_collections", "_multibytecodec", "micronumpy", "_ffi",
+ "_collections", "_multibytecodec", "micronumpy",
"_continuation", "_cffi_backend", "_csv", "cppyy", "_pypyjson"]
))
translation_modules = default_modules.copy()
translation_modules.update(dict.fromkeys(
["fcntl", "rctime", "select", "signal", "_rawffi", "zlib",
- "struct", "_md5", "cStringIO", "array", "_ffi",
+ "struct", "_md5", "cStringIO", "array",
"binascii",
# the following are needed for pyrepl (and hence for the
# interactive prompt/pdb)
@@ -96,7 +96,6 @@
# no _rawffi if importing rpython.rlib.clibffi raises ImportError
# or CompilationError or py.test.skip.Exception
"_rawffi" : ["rpython.rlib.clibffi"],
- "_ffi" : ["rpython.rlib.clibffi"],
"zlib" : ["rpython.rlib.rzlib"],
"bz2" : ["pypy.module.bz2.interp_bz2"],
diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt
--- a/pypy/doc/_ref.txt
+++ b/pypy/doc/_ref.txt
@@ -109,6 +109,4 @@
.. _`rpython/translator/c/`: https://bitbucket.org/pypy/pypy/src/default/rpython/translator/c/
.. _`rpython/translator/c/src/stacklet/`: https://bitbucket.org/pypy/pypy/src/default/rpython/translator/c/src/stacklet/
.. _`rpython/translator/c/src/stacklet/stacklet.h`: https://bitbucket.org/pypy/pypy/src/default/rpython/translator/c/src/stacklet/stacklet.h
-.. _`rpython/translator/cli/`: https://bitbucket.org/pypy/pypy/src/default/rpython/translator/cli/
-.. _`rpython/translator/jvm/`: https://bitbucket.org/pypy/pypy/src/default/rpython/translator/jvm/
.. _`rpython/translator/tool/`: https://bitbucket.org/pypy/pypy/src/default/rpython/translator/tool/
diff --git a/pypy/doc/conf.py b/pypy/doc/conf.py
--- a/pypy/doc/conf.py
+++ b/pypy/doc/conf.py
@@ -38,7 +38,7 @@
# General information about the project.
project = u'PyPy'
-copyright = u'2013, The PyPy Project'
+copyright = u'2014, The PyPy Project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -47,7 +47,7 @@
# The short X.Y version.
version = '2.2'
# The full version, including alpha/beta/rc tags.
-release = '2.2.0'
+release = '2.2.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/pypy/doc/config/translation.lldebug0.txt b/pypy/doc/config/translation.lldebug0.txt
new file mode 100644
--- /dev/null
+++ b/pypy/doc/config/translation.lldebug0.txt
@@ -0,0 +1,1 @@
+Like lldebug, but in addition compile C files with -O0
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -83,7 +83,7 @@
_winreg
-* Supported by being rewritten in pure Python (possibly using ``ctypes``):
+* Supported by being rewritten in pure Python (possibly using ``cffi``):
see the `lib_pypy/`_ directory. Examples of modules that we
support this way: ``ctypes``, ``cPickle``, ``cmath``, ``dbm``, ``datetime``...
Note that some modules are both in there and in the list above;
@@ -316,5 +316,4 @@
type and vice versa. For builtin types, a dictionary will be returned that
cannot be changed (but still looks and behaves like a normal dictionary).
-
.. include:: _ref.txt
diff --git a/pypy/doc/ctypes-implementation.rst b/pypy/doc/ctypes-implementation.rst
--- a/pypy/doc/ctypes-implementation.rst
+++ b/pypy/doc/ctypes-implementation.rst
@@ -72,7 +72,13 @@
Here is a list of the limitations and missing features of the
current implementation:
-* No support for ``PyXxx`` functions from ``libpython``, for obvious reasons.
+* ``ctypes.pythonapi`` lets you access the CPython C API emulation layer
+ of PyPy, at your own risks and without doing anything sensible about
+ the GIL. Since PyPy 2.3, these functions are also named with an extra
+ "Py", for example ``PyPyInt_FromLong()``. Basically, don't use this,
+ but it might more or less work in simple cases if you do. (Obviously,
+ assuming the PyObject pointers you get have any particular fields in
+ any particular order is just going to crash.)
* We copy Python strings instead of having pointers to raw buffers
diff --git a/pypy/doc/extending.rst b/pypy/doc/extending.rst
--- a/pypy/doc/extending.rst
+++ b/pypy/doc/extending.rst
@@ -5,119 +5,68 @@
This document tries to explain how to interface the PyPy python interpreter
with any external library.
-Note: We try to describe state-of-the art, but it
-might fade out of date as this is the front on which things are changing
-in pypy rapidly.
+Right now, there are the following possibilities of providing
+third-party modules for the PyPy python interpreter (in order of
+usefulness):
-Possibilities
-=============
+* Write them in pure Python and use CFFI_.
-Right now, there are three possibilities of providing third-party modules
-for the PyPy python interpreter (in order of usefulness):
+* Write them in pure Python and use ctypes_.
-* Write them in pure python and use ctypes, see ctypes_
- section
+* Write them in C++ and bind them through Reflex_.
-* Write them in pure python and use direct libffi low-level bindings, See
- \_ffi_ module description.
+* Write them in as `RPython mixed modules`_.
-* Write them in RPython as mixedmodule_, using *rffi* as bindings.
-* Write them in C++ and bind them through Reflex_
+CFFI
+====
-.. _ctypes: #CTypes
-.. _\_ffi: #LibFFI
-.. _mixedmodule: #Mixed Modules
+CFFI__ is the recommended way. It is a way to write pure Python code
+that accesses C libraries. The idea is to support either ABI- or
+API-level access to C --- so that you can sanely access C libraries
+without depending on details like the exact field order in the C
+structures or the numerical value of all the constants. It works on
+both CPython (as a separate ``pip install cffi``) and on PyPy, where it
+is included by default.
+
+PyPy's JIT does a quite reasonable job on the Python code that call C
+functions or manipulate C pointers with CFFI. (As of PyPy 2.2.1, it
+could still be improved, but is already good.)
+
+See the documentation here__.
+
+.. __: http://cffi.readthedocs.org/
+.. __: http://cffi.readthedocs.org/
+
CTypes
======
-The ctypes module in PyPy is ready to use.
-It's goal is to be as-compatible-as-possible with the
-`CPython ctypes`_ version. Right now it's able to support large examples,
-such as pyglet. PyPy is planning to have a 100% compatible ctypes
-implementation, without the CPython C-level API bindings (so it is very
-unlikely that direct object-manipulation trickery through this API will work).
+The goal of the ctypes module of PyPy is to be as compatible as possible
+with the `CPython ctypes`_ version. It works for large examples, such
+as pyglet. PyPy's implementation is not strictly 100% compatible with
+CPython, but close enough for most cases.
-We also provide a `ctypes-configure`_ for overcoming the platform dependencies,
-not relying on the ctypes codegen. This tool works by querying gcc about
-platform-dependent details (compiling small snippets of C code and running
-them), so it'll benefit not pypy-related ctypes-based modules as well.
+We also used to provide ``ctypes-configure`` for some API-level access.
+This is now viewed as a precursor of CFFI, which you should use instead.
+More (but older) information is available here__.
+Also, ctypes' performance is not as good as CFFI's.
-ctypes call are optimized by the JIT and the resulting machine code contains a
-direct call to the target C function. However, due to the very dynamic nature
-of ctypes, some overhead over a bare C call is still present, in particular to
-check/convert the types of the parameters. Moreover, even if most calls are
-optimized, some cannot and thus need to follow the slow path, not optimized by
-the JIT.
+.. _`CPython ctypes`: http://docs.python.org/library/ctypes.html
+.. __: ctypes-implementation.html
-.. _`ctypes-configure`: ctypes-implementation.html#ctypes-configure
-.. _`CPython ctypes`: http://docs.python.org/library/ctypes.html
+PyPy implements ctypes as pure Python code around two built-in modules
+called ``_ffi`` and ``_rawffi``, which give a very low-level binding to
+the C library libffi_. Nowadays it is not recommended to use directly
+these two modules.
-Pros
-----
+.. _libffi: http://sourceware.org/libffi/
-Stable, CPython-compatible API. Most calls are fast, optimized by JIT.
-
-Cons
-----
-
-Problems with platform-dependency (although we partially solve
-those). Although the JIT optimizes ctypes calls, some overhead is still
-present. The slow-path is very slow.
-
-
-LibFFI
-======
-
-Mostly in order to be able to write a ctypes module, we developed a very
-low-level libffi bindings called ``_ffi``. (libffi is a C-level library for dynamic calling,
-which is used by CPython ctypes). This library provides stable and usable API,
-although it's API is a very low-level one. It does not contain any
-magic. It is also optimized by the JIT, but has much less overhead than ctypes.
-
-Pros
-----
-
-It Works. Probably more suitable for a delicate code where ctypes magic goes
-in a way. All calls are optimized by the JIT, there is no slow path as in
-ctypes.
-
-Cons
-----
-
-It combines disadvantages of using ctypes with disadvantages of using mixed
-modules. CPython-incompatible API, very rough and low-level.
-
-Mixed Modules
-=============
-
-This is the most advanced and powerful way of writing extension modules.
-It has some serious disadvantages:
-
-* a mixed module needs to be written in RPython, which is far more
- complicated than Python (XXX link)
-
-* due to lack of separate compilation (as of July 2011), each
- compilation-check requires to recompile whole PyPy python interpreter,
- which takes 0.5-1h. We plan to solve this at some point in near future.
-
-* although rpython is a garbage-collected language, the border between
- C and RPython needs to be managed by hand (each object that goes into the
- C level must be explicitly freed).
-
-Some documentation is available `here`_
-
-.. _`here`: rffi.html
-
-XXX we should provide detailed docs about lltype and rffi, especially if we
- want people to follow that way.
Reflex
======
-This method is still experimental and is being exercised on a branch,
-`reflex-support`_, which adds the `cppyy`_ module.
+This method is still experimental. It adds the `cppyy`_ module.
The method works by using the `Reflex package`_ to provide reflection
information of the C++ code, which is then used to automatically generate
bindings at runtime.
@@ -168,3 +117,15 @@
to work around it in python or with a C++ helper function.
Although Reflex works on various platforms, the bindings with PyPy have only
been tested on Linux.
+
+
+RPython Mixed Modules
+=====================
+
+This is the internal way to write built-in extension modules in PyPy.
+It cannot be used by any 3rd-party module: the extension modules are
+*built-in*, not independently loadable DLLs.
+
+This is reserved for special cases: it gives direct access to e.g. the
+details of the JIT, allowing us to tweak its interaction with user code.
+This is how the numpy module is being developed.
diff --git a/pypy/doc/extradoc.rst b/pypy/doc/extradoc.rst
--- a/pypy/doc/extradoc.rst
+++ b/pypy/doc/extradoc.rst
@@ -72,13 +72,13 @@
.. _bibtex: https://bitbucket.org/pypy/extradoc/raw/tip/talk/bibtex.bib
.. _`Runtime Feedback in a Meta-Tracing JIT for Efficient Dynamic Languages`: https://bitbucket.org/pypy/extradoc/raw/extradoc/talk/icooolps2011/jit-hints.pdf
-.. _`Allocation Removal by Partial Evaluation in a Tracing JIT`: http://codespeak.net/svn/pypy/extradoc/talk/pepm2011/bolz-allocation-removal.pdf
-.. _`Towards a Jitting VM for Prolog Execution`: http://www.stups.uni-duesseldorf.de/publications/bolz-prolog-jit.pdf
+.. _`Allocation Removal by Partial Evaluation in a Tracing JIT`: https://bitbucket.org/pypy/extradoc/raw/extradoc/talk/pepm2011/bolz-allocation-removal.pdf
+.. _`Towards a Jitting VM for Prolog Execution`: http://www.stups.uni-duesseldorf.de/mediawiki/images/a/a7/Pub-BoLeSch2010.pdf
.. _`High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`: http://buildbot.pypy.org/misc/antocuni-thesis.pdf
.. _`How to *not* write Virtual Machines for Dynamic Languages`: https://bitbucket.org/pypy/extradoc/raw/tip/talk/dyla2007/dyla.pdf
.. _`Tracing the Meta-Level: PyPy's Tracing JIT Compiler`: https://bitbucket.org/pypy/extradoc/raw/tip/talk/icooolps2009/bolz-tracing-jit.pdf
.. _`Faster than C#: Efficient Implementation of Dynamic Languages on .NET`: https://bitbucket.org/pypy/extradoc/raw/tip/talk/icooolps2009-dotnet/cli-jit.pdf
-.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://www.stups.uni-duesseldorf.de/thesis/final-master.pdf
+.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://wwwold.cobra.cs.uni-duesseldorf.de/thesis/final-master.pdf
.. _`RPython: A Step towards Reconciling Dynamically and Statically Typed OO Languages`: http://www.disi.unige.it/person/AnconaD/papers/Recent_abstracts.html#AACM-DLS07
.. _`EU Reports`: index-report.html
.. _`Hardware Transactional Memory Support for Lightweight Dynamic Language Evolution`: http://sabi.net/nriley/pubs/dls6-riley.pdf
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -426,25 +426,12 @@
Could we use LLVM?
------------------
-In theory yes. But we tried to use it 5 or 6 times already, as a
-translation backend or as a JIT backend --- and failed each time.
+There is a (static) translation backend using LLVM in the branch
+``llvm-translation-backend``. It can translate PyPy with or without the JIT on
+Linux.
-In more details: using LLVM as a (static) translation backend is
-pointless nowadays because you can generate C code and compile it with
-clang. (Note that compiling PyPy with clang gives a result that is not
-faster than compiling it with gcc.) We might in theory get extra
-benefits from LLVM's GC integration, but this requires more work on the
-LLVM side before it would be remotely useful. Anyway, it could be
-interfaced via a custom primitive in the C code.
-
-On the other hand, using LLVM as our JIT backend looks interesting as
-well --- but again we made an attempt, and it failed: LLVM has no way to
-patch the generated machine code.
-
-So the position of the core PyPy developers is that if anyone wants to
-make an N+1'th attempt with LLVM, they are welcome, and will be happy to
-provide help in the IRC channel, but they are left with the burden of proof
-that it works.
+Using LLVM as our JIT backend looks interesting as well -- we made an attempt,
+but it failed: LLVM has no way to patch the generated machine code.
----------------------
How do I compile PyPy?
diff --git a/pypy/doc/garbage_collection.rst b/pypy/doc/garbage_collection.rst
--- a/pypy/doc/garbage_collection.rst
+++ b/pypy/doc/garbage_collection.rst
@@ -210,4 +210,12 @@
are preserved. If the object dies then the pre-reserved location
becomes free garbage, to be collected at the next major collection.
+The exact name of this GC is either `minimark` or `incminimark`. The
+latter is a version that does major collections incrementally (i.e. one
+major collection is split along some number of minor collections, rather
+than being done all at once after a specific minor collection). The
+default is `incminimark`, as it seems to have a very minimal impact on
+performance and memory usage at the benefit of avoiding the long pauses
+of `minimark`.
+
.. include:: _ref.txt
diff --git a/pypy/doc/gc_info.rst b/pypy/doc/gc_info.rst
--- a/pypy/doc/gc_info.rst
+++ b/pypy/doc/gc_info.rst
@@ -6,7 +6,7 @@
Minimark
--------
-PyPy's default ``minimark`` garbage collector is configurable through
+PyPy's default ``incminimark`` garbage collector is configurable through
several environment variables:
``PYPY_GC_NURSERY``
@@ -14,6 +14,17 @@
Defaults to 1/2 of your cache or ``4M``.
Small values (like 1 or 1KB) are useful for debugging.
+``PYPY_GC_NURSERY_CLEANUP``
+ The interval at which nursery is cleaned up. Must
+ be smaller than the nursery size and bigger than the
+ biggest object we can allotate in the nursery.
+
+``PYPY_GC_INCREMENT_STEP``
+ The size of memory marked during the marking step. Default is size of
+ nursery times 2. If you mark it too high your GC is not incremental at
+ all. The minimum is set to size that survives minor collection times
+ 1.5 so we reclaim anything all the time.
+
``PYPY_GC_MAJOR_COLLECT``
Major collection memory factor.
Default is ``1.82``, which means trigger a major collection when the
diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst
--- a/pypy/doc/index.rst
+++ b/pypy/doc/index.rst
@@ -40,7 +40,7 @@
* `FAQ`_: some frequently asked questions.
-* `Release 2.2.0`_: the latest official release
+* `Release 2.2.1`_: the latest official release
* `PyPy Blog`_: news and status info about PyPy
@@ -110,7 +110,7 @@
.. _`Getting Started`: getting-started.html
.. _`Papers`: extradoc.html
.. _`Videos`: video-index.html
-.. _`Release 2.2.0`: http://pypy.org/download.html
+.. _`Release 2.2.1`: http://pypy.org/download.html
.. _`speed.pypy.org`: http://speed.pypy.org
.. _`RPython toolchain`: translation.html
.. _`potential project ideas`: project-ideas.html
diff --git a/pypy/doc/project-ideas.rst b/pypy/doc/project-ideas.rst
--- a/pypy/doc/project-ideas.rst
+++ b/pypy/doc/project-ideas.rst
@@ -74,6 +74,10 @@
The actual details would be rather differen in PyPy, but we would like to have
the same optimization implemented.
+Or maybe not. We can also play around with the idea of using a single
+representation: as a byte string in utf-8. (This idea needs some extra logic
+for efficient indexing, like a cache.)
+
.. _`optimized unicode representation`: http://www.python.org/dev/peps/pep-0393/
Translation Toolchain
diff --git a/pypy/doc/release-2.2.1.rst b/pypy/doc/release-2.2.1.rst
new file mode 100644
--- /dev/null
+++ b/pypy/doc/release-2.2.1.rst
@@ -0,0 +1,47 @@
+=======================================
+PyPy 2.2.1 - Incrementalism.1
+=======================================
+
+We're pleased to announce PyPy 2.2.1, which targets version 2.7.3 of the Python
+language. This is a bugfix release over 2.2.
+
+You can download the PyPy 2.2.1 release here:
+
+ http://pypy.org/download.html
+
+What is PyPy?
+=============
+
+PyPy is a very compliant Python interpreter, almost a drop-in replacement for
+CPython 2.7. It's fast (`pypy 2.2 and cpython 2.7.2`_ performance comparison)
+due to its integrated tracing JIT compiler.
+
+This release supports x86 machines running Linux 32/64, Mac OS X 64, Windows
+32, or ARM (ARMv6 or ARMv7, with VFPv3).
+
+Work on the native Windows 64 is still stalling, we would welcome a volunteer
+to handle that.
+
+.. _`pypy 2.2 and cpython 2.7.2`: http://speed.pypy.org
+
+Highlights
+==========
+
+This is a bugfix release. The most important bugs fixed are:
+
+* an issue in sockets' reference counting emulation, showing up
+ notably when using the ssl module and calling ``makefile()``.
+
+* Tkinter support on Windows.
+
+* If sys.maxunicode==65535 (on Windows and maybe OS/X), the json
+ decoder incorrectly decoded surrogate pairs.
+
+* some FreeBSD fixes.
+
+Note that CFFI 0.8.1 was released. Both versions 0.8 and 0.8.1 are
+compatible with both PyPy 2.2 and 2.2.1.
+
+
+Cheers,
+Armin Rigo & everybody
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -15,3 +15,45 @@
.. branch: armhf-singlefloat
JIT support for singlefloats on ARM using the hardfloat ABI
+
+.. branch: voidtype_strformat
+Better support for record numpy arrays
+
+.. branch: osx-eci-frameworks-makefile
+OSX: Ensure frameworks end up in Makefile when specified in External compilation info
+
+.. branch: less-stringly-ops
+Use subclasses of SpaceOperation instead of SpaceOperator objects.
+Random cleanups in flowspace and annotator.
+
+.. branch: ndarray-buffer
+adds support for the buffer= argument to the ndarray ctor
+
+.. branch: better_ftime_detect2
+On OpenBSD do not pull in libcompat.a as it is about to be removed.
+And more generally, if you have gettimeofday(2) you will not need ftime(3).
+
+.. branch: timeb_h
+Remove dependency upon <sys/timeb.h> on OpenBSD. This will be disappearing
+along with libcompat.a.
+
+.. branch: OlivierBlanvillain/fix-3-broken-links-on-pypy-published-pap-1386250839215
+Fix 3 broken links on PyPy published papers in docs.
+
+.. branch: jit-ordereddict
+
+.. branch: refactor-str-types
+Remove multimethods on str/unicode/bytearray and make the implementations share code.
+
+.. branch: remove-del-from-generatoriterator
+Speed up generators that don't yield inside try or wait blocks by skipping
+unnecessary cleanup.
+
+.. branch: annotator
+Remove FlowObjSpace.
+Improve cohesion between rpython.flowspace and rpython.annotator.
+
+.. branch: detect-immutable-fields
+mapdicts keep track of whether or not an attribute is every assigned to
+multiple times. If it's only assigned once then an elidable lookup is used when
+possible.
diff --git a/pypy/goal/getnightly.py b/pypy/goal/getnightly.py
--- a/pypy/goal/getnightly.py
+++ b/pypy/goal/getnightly.py
@@ -26,7 +26,12 @@
if branch == 'default':
branch = 'trunk'
-filename = 'pypy-c-jit-latest-%s.tar.bz2' % arch
+if '--nojit' in sys.argv:
+ kind = 'nojit'
+else:
+ kind = 'jit'
+
+filename = 'pypy-c-%s-latest-%s.tar.bz2' % (kind, arch)
url = 'http://buildbot.pypy.org/nightly/%s/%s' % (branch, filename)
tmp = py.path.local.mkdtemp()
mydir = tmp.chdir()
diff --git a/pypy/interpreter/astcompiler/codegen.py b/pypy/interpreter/astcompiler/codegen.py
--- a/pypy/interpreter/astcompiler/codegen.py
+++ b/pypy/interpreter/astcompiler/codegen.py
@@ -1234,6 +1234,8 @@
flags |= consts.CO_NESTED
if scope.is_generator:
flags |= consts.CO_GENERATOR
+ if scope.has_yield_inside_try:
+ flags |= consts.CO_YIELD_INSIDE_TRY
if scope.has_variable_arg:
flags |= consts.CO_VARARGS
if scope.has_keywords_arg:
diff --git a/pypy/interpreter/astcompiler/consts.py b/pypy/interpreter/astcompiler/consts.py
--- a/pypy/interpreter/astcompiler/consts.py
+++ b/pypy/interpreter/astcompiler/consts.py
@@ -17,6 +17,7 @@
CO_FUTURE_UNICODE_LITERALS = 0x20000
#pypy specific:
CO_KILL_DOCSTRING = 0x100000
+CO_YIELD_INSIDE_TRY = 0x200000
PyCF_SOURCE_IS_UTF8 = 0x0100
PyCF_DONT_IMPLY_DEDENT = 0x0200
diff --git a/pypy/interpreter/astcompiler/symtable.py b/pypy/interpreter/astcompiler/symtable.py
--- a/pypy/interpreter/astcompiler/symtable.py
+++ b/pypy/interpreter/astcompiler/symtable.py
@@ -43,6 +43,7 @@
self.child_has_free = False
self.nested = False
self.doc_removable = False
+ self._in_try_body_depth = 0
def lookup(self, name):
"""Find the scope of identifier 'name'."""
@@ -75,6 +76,14 @@
self.varnames.append(mangled)
return mangled
+ def note_try_start(self, try_node):
+ """Called when a try is found, before visiting the body."""
+ self._in_try_body_depth += 1
+
+ def note_try_end(self, try_node):
+ """Called after visiting a try body."""
+ self._in_try_body_depth -= 1
+
def note_yield(self, yield_node):
"""Called when a yield is found."""
raise SyntaxError("'yield' outside function", yield_node.lineno,
@@ -210,6 +219,7 @@
self.has_variable_arg = False
self.has_keywords_arg = False
self.is_generator = False
+ self.has_yield_inside_try = False
self.optimized = True
self.return_with_value = False
self.import_star = None
@@ -220,6 +230,8 @@
raise SyntaxError("'return' with argument inside generator",
self.ret.lineno, self.ret.col_offset)
self.is_generator = True
+ if self._in_try_body_depth > 0:
+ self.has_yield_inside_try = True
def note_return(self, ret):
if ret.value:
@@ -463,7 +475,12 @@
self.scope.new_temporary_name()
if wih.optional_vars:
self.scope.new_temporary_name()
- ast.GenericASTVisitor.visit_With(self, wih)
+ wih.context_expr.walkabout(self)
+ if wih.optional_vars:
+ wih.optional_vars.walkabout(self)
+ self.scope.note_try_start(wih)
+ self.visit_sequence(wih.body)
+ self.scope.note_try_end(wih)
def visit_arguments(self, arguments):
scope = self.scope
@@ -505,3 +522,16 @@
else:
role = SYM_ASSIGNED
self.note_symbol(name.id, role)
+
+ def visit_TryExcept(self, node):
+ self.scope.note_try_start(node)
+ self.visit_sequence(node.body)
+ self.scope.note_try_end(node)
+ self.visit_sequence(node.handlers)
+ self.visit_sequence(node.orelse)
+
+ def visit_TryFinally(self, node):
+ self.scope.note_try_start(node)
+ self.visit_sequence(node.body)
+ self.scope.note_try_end(node)
+ self.visit_sequence(node.finalbody)
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -1,4 +1,4 @@
-import py
+import py, sys
from pypy.interpreter.astcompiler import codegen, astbuilder, symtable, optimize
from pypy.interpreter.pyparser import pyparse
from pypy.interpreter.pyparser.test import expressions
@@ -867,6 +867,9 @@
class AppTestCompiler:
+ def setup_class(cls):
+ cls.w_maxunicode = cls.space.wrap(sys.maxunicode)
+
def test_docstring_not_loaded(self):
import StringIO, dis, sys
ns = {}
@@ -911,7 +914,17 @@
l = [a for a in Foo()]
assert hint_called[0]
assert l == list(range(5))
-
+
+ def test_unicode_in_source(self):
+ import sys
+ d = {}
+ exec '# -*- coding: utf-8 -*-\n\nu = u"\xf0\x9f\x92\x8b"' in d
+ if sys.maxunicode > 65535 and self.maxunicode > 65535:
+ expected_length = 1
+ else:
+ expected_length = 2
+ assert len(d['u']) == expected_length
+
class TestOptimizations:
def count_instructions(self, source):
diff --git a/pypy/interpreter/astcompiler/test/test_symtable.py b/pypy/interpreter/astcompiler/test/test_symtable.py
--- a/pypy/interpreter/astcompiler/test/test_symtable.py
+++ b/pypy/interpreter/astcompiler/test/test_symtable.py
@@ -346,6 +346,25 @@
assert exc.msg == "'return' with argument inside generator"
scp = self.func_scope("def f():\n return\n yield x")
+ def test_yield_inside_try(self):
+ scp = self.func_scope("def f(): yield x")
+ assert not scp.has_yield_inside_try
+ scp = self.func_scope("def f():\n try:\n yield x\n except: pass")
+ assert scp.has_yield_inside_try
+ scp = self.func_scope("def f():\n try:\n yield x\n finally: pass")
+ assert scp.has_yield_inside_try
+ scp = self.func_scope("def f():\n with x: yield y")
+ assert scp.has_yield_inside_try
+
+ def test_yield_outside_try(self):
+ for input in ("try: pass\n except: pass",
+ "try: pass\n except: yield y",
+ "try: pass\n finally: pass",
+ "try: pass\n finally: yield y",
+ "with x: pass"):
+ input = "def f():\n yield y\n %s\n yield y" % (input,)
+ assert not self.func_scope(input).has_yield_inside_try
+
def test_return(self):
for input in ("class x: return", "return"):
exc = py.test.raises(SyntaxError, self.func_scope, input).value
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -240,6 +240,10 @@
msg = "__int__ returned non-int (type '%T')"
raise operationerrfmt(space.w_TypeError, msg, w_result)
+ def ord(self, space):
+ msg = "ord() expected string of length 1, but %T found"
+ raise operationerrfmt(space.w_TypeError, msg, self)
+
def __spacebind__(self, space):
return self
@@ -914,7 +918,7 @@
"""
return self.unpackiterable(w_iterable, expected_length)
- def listview_str(self, w_list):
+ def listview_bytes(self, w_list):
""" Return a list of unwrapped strings out of a list of strings. If the
argument is not a list or does not contain only strings, return None.
May return None anyway.
@@ -948,7 +952,7 @@
"""
return (None, None)
- def newlist_str(self, list_s):
+ def newlist_bytes(self, list_s):
return self.newlist([self.wrap(s) for s in list_s])
def newlist_unicode(self, list_u):
@@ -1402,6 +1406,9 @@
# This is here mostly just for gateway.int_unwrapping_space_method().
return bool(self.int_w(w_obj))
+ def ord(self, w_obj):
+ return w_obj.ord(self)
+
# This is all interface for gateway.py.
def gateway_int_w(self, w_obj):
if self.isinstance_w(w_obj, self.w_float):
diff --git a/pypy/interpreter/buffer.py b/pypy/interpreter/buffer.py
--- a/pypy/interpreter/buffer.py
+++ b/pypy/interpreter/buffer.py
@@ -30,11 +30,17 @@
raise ValueError("no raw buffer")
+ def is_writable(self):
+ return False
+
class RWBuffer(Buffer):
"""Abstract base class for read-write buffers."""
__slots__ = () # no extra slot here
+ def is_writable(self):
+ return True
+
def setitem(self, index, char):
"Write a character into the buffer."
raise NotImplementedError # Must be overriden. No bounds checks.
diff --git a/pypy/interpreter/error.py b/pypy/interpreter/error.py
--- a/pypy/interpreter/error.py
+++ b/pypy/interpreter/error.py
@@ -6,7 +6,7 @@
from errno import EINTR
from rpython.rlib import jit
-from rpython.rlib.objectmodel import we_are_translated
+from rpython.rlib.objectmodel import we_are_translated, specialize
from pypy.interpreter import debug
@@ -40,12 +40,11 @@
self.debug_excs = []
def clear(self, space):
- # for sys.exc_clear()
- self.w_type = space.w_None
- self._w_value = space.w_None
- self._application_traceback = None
- if not we_are_translated():
- del self.debug_excs[:]
+ # XXX remove this method. The point is that we cannot always
+ # hack at 'self' to clear w_type and _w_value, because in some
+ # corner cases the OperationError will be used again: see
+ # test_interpreter.py:test_with_statement_and_sys_clear.
+ pass
def match(self, space, w_check_class):
"Check if this application-level exception matches 'w_check_class'."
@@ -300,6 +299,10 @@
"""
self._application_traceback = traceback
+ at specialize.memo()
+def get_cleared_operation_error(space):
+ return OperationError(space.w_None, space.w_None)
+
# ____________________________________________________________
# optimization only: avoid the slowest operation -- the string
# formatting with '%' -- in the common case were we don't
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -1,5 +1,5 @@
import sys
-from pypy.interpreter.error import OperationError
+from pypy.interpreter.error import OperationError, get_cleared_operation_error
from rpython.rlib.unroll import unrolling_iterable
from rpython.rlib import jit
@@ -217,6 +217,17 @@
if frame: # else, the exception goes nowhere and is lost
frame.last_exception = operror
+ def clear_sys_exc_info(self):
+ # Find the frame out of which sys_exc_info() would return its result,
+ # and hack this frame's last_exception to become the cleared
+ # OperationError (which is different from None!).
+ frame = self.gettopframe_nohidden()
+ while frame:
+ if frame.last_exception is not None:
+ frame.last_exception = get_cleared_operation_error(self.space)
+ break
+ frame = self.getnextframe_nohidden(frame)
+
@jit.dont_look_inside
def settrace(self, w_func):
"""Set the global trace function."""
diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py
--- a/pypy/interpreter/gateway.py
+++ b/pypy/interpreter/gateway.py
@@ -520,12 +520,13 @@
# When a BuiltinCode is stored in a Function object,
# you get the functionality of CPython's built-in function type.
- def __init__(self, func, unwrap_spec=None, self_type=None, descrmismatch=None):
+ def __init__(self, func, unwrap_spec=None, self_type=None,
+ descrmismatch=None, doc=None):
"NOT_RPYTHON"
# 'implfunc' is the interpreter-level function.
# Note that this uses a lot of (construction-time) introspection.
Code.__init__(self, func.__name__)
- self.docstring = func.__doc__
+ self.docstring = doc or func.__doc__
self.identifier = "%s-%s-%s" % (func.__module__, func.__name__,
getattr(self_type, '__name__', '*'))
@@ -805,8 +806,8 @@
raise TypeError("Varargs and keywords not supported in unwrap_spec")
argspec = ', '.join([arg for arg in args.args[1:]])
func_code = py.code.Source("""
- def f(w_obj, %(args)s):
- return w_obj.%(func_name)s(%(args)s)
+ def f(self, %(args)s):
+ return self.%(func_name)s(%(args)s)
""" % {'args': argspec, 'func_name': func.func_name})
d = {}
exec func_code.compile() in d
@@ -821,7 +822,7 @@
else:
assert isinstance(unwrap_spec, dict)
unwrap_spec = unwrap_spec.copy()
- unwrap_spec['w_obj'] = base_cls
+ unwrap_spec['self'] = base_cls
return interp2app(globals()['unwrap_spec'](**unwrap_spec)(f))
class interp2app(W_Root):
@@ -832,7 +833,7 @@
instancecache = {}
def __new__(cls, f, app_name=None, unwrap_spec=None, descrmismatch=None,
- as_classmethod=False):
+ as_classmethod=False, doc=None):
"NOT_RPYTHON"
# f must be a function whose name does NOT start with 'app_'
@@ -861,7 +862,8 @@
cls.instancecache[key] = self
self._code = BuiltinCode(f, unwrap_spec=unwrap_spec,
self_type=self_type,
- descrmismatch=descrmismatch)
+ descrmismatch=descrmismatch,
+ doc=doc)
self.__name__ = f.func_name
self.name = app_name
self.as_classmethod = as_classmethod
diff --git a/pypy/interpreter/generator.py b/pypy/interpreter/generator.py
--- a/pypy/interpreter/generator.py
+++ b/pypy/interpreter/generator.py
@@ -155,20 +155,6 @@
code_name = self.pycode.co_name
return space.wrap(code_name)
- def __del__(self):
- # Only bother enqueuing self to raise an exception if the frame is
- # still not finished and finally or except blocks are present.
- self.clear_all_weakrefs()
- if self.frame is not None:
- block = self.frame.lastblock
- while block is not None:
- if not isinstance(block, LoopBlock):
- self.enqueue_for_destruction(self.space,
- GeneratorIterator.descr_close,
- "interrupting generator of ")
- break
- block = block.previous
-
# Results can be either an RPython list of W_Root, or it can be an
# app-level W_ListObject, which also has an append() method, that's why we
# generate 2 versions of the function and 2 jit drivers.
@@ -211,3 +197,20 @@
return unpack_into
unpack_into = _create_unpack_into()
unpack_into_w = _create_unpack_into()
+
+
+class GeneratorIteratorWithDel(GeneratorIterator):
+
+ def __del__(self):
+ # Only bother enqueuing self to raise an exception if the frame is
+ # still not finished and finally or except blocks are present.
+ self.clear_all_weakrefs()
+ if self.frame is not None:
+ block = self.frame.lastblock
+ while block is not None:
+ if not isinstance(block, LoopBlock):
+ self.enqueue_for_destruction(self.space,
+ GeneratorIterator.descr_close,
+ "interrupting generator of ")
+ break
+ block = block.previous
diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
--- a/pypy/interpreter/pycode.py
+++ b/pypy/interpreter/pycode.py
@@ -12,7 +12,7 @@
from pypy.interpreter.gateway import unwrap_spec
from pypy.interpreter.astcompiler.consts import (
CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS, CO_NESTED,
- CO_GENERATOR, CO_KILL_DOCSTRING)
+ CO_GENERATOR, CO_KILL_DOCSTRING, CO_YIELD_INSIDE_TRY)
from pypy.tool.stdlib_opcode import opcodedesc, HAVE_ARGUMENT
from rpython.rlib.rarithmetic import intmask
from rpython.rlib.objectmodel import compute_hash
@@ -31,7 +31,7 @@
# Magic numbers for the bytecode version in code objects.
# See comments in pypy/module/imp/importing.
cpython_magic, = struct.unpack("<i", imp.get_magic()) # host magic number
-default_magic = (0xf303 + 6) | 0x0a0d0000 # this PyPy's magic
+default_magic = (0xf303 + 7) | 0x0a0d0000 # this PyPy's magic
# (from CPython 2.7.0)
# cpython_code_signature helper
diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
--- a/pypy/interpreter/pyframe.py
+++ b/pypy/interpreter/pyframe.py
@@ -167,8 +167,12 @@
def run(self):
"""Start this frame's execution."""
if self.getcode().co_flags & pycode.CO_GENERATOR:
- from pypy.interpreter.generator import GeneratorIterator
- return self.space.wrap(GeneratorIterator(self))
+ if self.getcode().co_flags & pycode.CO_YIELD_INSIDE_TRY:
+ from pypy.interpreter.generator import GeneratorIteratorWithDel
+ return self.space.wrap(GeneratorIteratorWithDel(self))
+ else:
+ from pypy.interpreter.generator import GeneratorIterator
+ return self.space.wrap(GeneratorIterator(self))
else:
return self.execute_frame()
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -744,6 +744,9 @@
else:
raise OperationError(space.w_TypeError,
space.wrap("raise: no active exception to re-raise"))
+ if operror.w_type is space.w_None:
+ raise OperationError(space.w_TypeError,
+ space.wrap("raise: the exception to re-raise was cleared"))
# re-raise, no new traceback obj will be attached
self.last_exception = operror
raise RaiseWithExplicitTraceback(operror)
diff --git a/pypy/interpreter/pyparser/parsestring.py b/pypy/interpreter/pyparser/parsestring.py
--- a/pypy/interpreter/pyparser/parsestring.py
+++ b/pypy/interpreter/pyparser/parsestring.py
@@ -15,7 +15,6 @@
Yes, it's very inefficient.
Yes, CPython has very similar code.
"""
-
# we use ps as "pointer to s"
# q is the virtual last char index of the string
ps = 0
@@ -54,42 +53,10 @@
if unicode_literal: # XXX Py_UnicodeFlag is ignored for now
if encoding is None or encoding == "iso-8859-1":
# 'unicode_escape' expects latin-1 bytes, string is ready.
- buf = s
- bufp = ps
- bufq = q
- u = None
+ assert 0 <= ps <= q
+ substr = s[ps:q]
else:
- # String is utf8-encoded, but 'unicode_escape' expects
- # latin-1; So multibyte sequences must be escaped.
- lis = [] # using a list to assemble the value
- end = q
- # Worst case: "\XX" may become "\u005c\uHHLL" (12 bytes)
- while ps < end:
- if s[ps] == '\\':
- lis.append(s[ps])
- ps += 1
- if ord(s[ps]) & 0x80:
- # A multibyte sequence will follow, it will be
- # escaped like \u1234. To avoid confusion with
- # the backslash we just wrote, we emit "\u005c"
- # instead.
- lis.append("u005c")
- if ord(s[ps]) & 0x80: # XXX inefficient
- w, ps = decode_utf8(space, s, ps, end, "utf-16-be")
- rn = len(w)
- assert rn % 2 == 0
- for i in range(0, rn, 2):
- lis.append('\\u')
- lis.append(hexbyte(ord(w[i])))
- lis.append(hexbyte(ord(w[i+1])))
- else:
- lis.append(s[ps])
- ps += 1
- buf = ''.join(lis)
- bufp = 0
- bufq = len(buf)
- assert 0 <= bufp <= bufq
- substr = buf[bufp:bufq]
+ substr = decode_unicode_utf8(space, s, ps, q)
if rawmode:
v = unicodehelper.decode_raw_unicode_escape(space, substr)
else:
@@ -121,6 +88,39 @@
result = "0" + result
return result
+def decode_unicode_utf8(space, s, ps, q):
+ # ****The Python 2.7 version, producing UTF-32 escapes****
+ # String is utf8-encoded, but 'unicode_escape' expects
+ # latin-1; So multibyte sequences must be escaped.
+ lis = [] # using a list to assemble the value
+ end = q
+ # Worst case:
+ # "<92><195><164>" may become "\u005c\U000000E4" (16 bytes)
+ while ps < end:
+ if s[ps] == '\\':
+ lis.append(s[ps])
+ ps += 1
+ if ord(s[ps]) & 0x80:
+ # A multibyte sequence will follow, it will be
+ # escaped like \u1234. To avoid confusion with
+ # the backslash we just wrote, we emit "\u005c"
+ # instead.
+ lis.append("u005c")
+ if ord(s[ps]) & 0x80: # XXX inefficient
+ w, ps = decode_utf8(space, s, ps, end, "utf-32-be")
+ rn = len(w)
+ assert rn % 4 == 0
+ for i in range(0, rn, 4):
+ lis.append('\\U')
+ lis.append(hexbyte(ord(w[i])))
+ lis.append(hexbyte(ord(w[i+1])))
+ lis.append(hexbyte(ord(w[i+2])))
+ lis.append(hexbyte(ord(w[i+3])))
+ else:
+ lis.append(s[ps])
+ ps += 1
+ return ''.join(lis)
+
def PyString_DecodeEscape(space, s, recode_encoding):
"""
Unescape a backslash-escaped string. If recode_encoding is non-zero,
diff --git a/pypy/interpreter/pyparser/test/test_parsestring.py b/pypy/interpreter/pyparser/test/test_parsestring.py
--- a/pypy/interpreter/pyparser/test/test_parsestring.py
+++ b/pypy/interpreter/pyparser/test/test_parsestring.py
@@ -1,10 +1,10 @@
from pypy.interpreter.pyparser import parsestring
-import py
+import py, sys
class TestParsetring:
- def parse_and_compare(self, literal, value):
+ def parse_and_compare(self, literal, value, encoding=None):
space = self.space
- w_ret = parsestring.parsestr(space, None, literal)
+ w_ret = parsestring.parsestr(space, encoding, literal)
if isinstance(value, str):
assert space.type(w_ret) == space.w_str
assert space.str_w(w_ret) == value
@@ -91,3 +91,18 @@
input = ["'", 'x', ' ', chr(0xc3), chr(0xa9), ' ', chr(92), 'n', "'"]
w_ret = parsestring.parsestr(space, 'utf8', ''.join(input))
assert space.str_w(w_ret) == ''.join(expected)
+
+ def test_wide_unicode_in_source(self):
+ if sys.maxunicode == 65535:
+ py.test.skip("requires a wide-unicode host")
+ self.parse_and_compare('u"\xf0\x9f\x92\x8b"',
+ unichr(0x1f48b),
+ encoding='utf-8')
+
+ def test_decode_unicode_utf8(self):
+ buf = parsestring.decode_unicode_utf8(self.space,
+ 'u"\xf0\x9f\x92\x8b"', 2, 6)
+ if sys.maxunicode == 65535:
+ assert buf == r"\U0000d83d\U0000dc8b"
+ else:
+ assert buf == r"\U0001f48b"
diff --git a/pypy/interpreter/test/test_gateway.py b/pypy/interpreter/test/test_gateway.py
--- a/pypy/interpreter/test/test_gateway.py
+++ b/pypy/interpreter/test/test_gateway.py
@@ -708,6 +708,18 @@
never_called
py.test.raises(AssertionError, space.wrap, gateway.interp2app_temp(g))
+ def test_interp2app_doc(self):
+ space = self.space
+ def f(space, w_x):
+ """foo"""
+ w_f = space.wrap(gateway.interp2app_temp(f))
+ assert space.unwrap(space.getattr(w_f, space.wrap('__doc__'))) == 'foo'
+ #
+ def g(space, w_x):
+ never_called
+ w_g = space.wrap(gateway.interp2app_temp(g, doc='bar'))
+ assert space.unwrap(space.getattr(w_g, space.wrap('__doc__'))) == 'bar'
+
class AppTestPyTestMark:
@py.test.mark.unlikely_to_exist
diff --git a/pypy/interpreter/test/test_interpreter.py b/pypy/interpreter/test/test_interpreter.py
--- a/pypy/interpreter/test/test_interpreter.py
+++ b/pypy/interpreter/test/test_interpreter.py
@@ -311,3 +311,73 @@
assert str(e) == "maximum recursion depth exceeded"
else:
assert 0, "should have raised!"
+
+ def test_with_statement_and_sys_clear(self):
+ import sys
+ class CM(object):
+ def __enter__(self):
+ return self
+ def __exit__(self, exc_type, exc_value, tb):
+ sys.exc_clear()
+ try:
+ with CM():
+ 1 / 0
+ raise AssertionError("should not be reached")
+ except ZeroDivisionError:
+ pass
+
+ def test_sys_clear_while_handling_exception(self):
+ import sys
+ def f():
+ try:
+ some_missing_name
+ except NameError:
+ g()
+ assert sys.exc_info()[0] is NameError
+ def g():
+ assert sys.exc_info()[0] is NameError
+ try:
+ 1 / 0
+ except ZeroDivisionError:
+ assert sys.exc_info()[0] is ZeroDivisionError
+ sys.exc_clear()
+ assert sys.exc_info()[0] is None
+ h()
+ assert sys.exc_info()[0] is None
+ def h():
+ assert sys.exc_info()[0] is None
+ f()
+
+ def test_sys_clear_while_handling_exception_nested(self):
+ import sys
+ def f():
+ try:
+ some_missing_name
+ except NameError:
+ g()
+ assert sys.exc_info()[0] is NameError
+ def g():
+ assert sys.exc_info()[0] is NameError
+ try:
+ 1 / 0
+ except ZeroDivisionError:
+ assert sys.exc_info()[0] is ZeroDivisionError
+ h1()
+ assert sys.exc_info()[0] is None
+ h()
+ assert sys.exc_info()[0] is None
+ def h():
+ assert sys.exc_info()[0] is None
+ def h1():
+ sys.exc_clear()
+ f()
+
+ def test_sys_clear_reraise(self):
+ import sys
+ def f():
+ try:
+ 1 / 0
+ except ZeroDivisionError:
+ sys.exc_clear()
+ raise
+ raises(TypeError, f)
diff --git a/pypy/interpreter/unicodehelper.py b/pypy/interpreter/unicodehelper.py
--- a/pypy/interpreter/unicodehelper.py
+++ b/pypy/interpreter/unicodehelper.py
@@ -30,7 +30,7 @@
# ____________________________________________________________
def encode(space, w_data, encoding=None, errors='strict'):
- from pypy.objspace.std.unicodetype import encode_object
+ from pypy.objspace.std.unicodeobject import encode_object
return encode_object(space, w_data, encoding, errors)
# These functions take and return unwrapped rpython strings and unicodes
diff --git a/pypy/module/__builtin__/app_operation.py b/pypy/module/__builtin__/app_operation.py
--- a/pypy/module/__builtin__/app_operation.py
+++ b/pypy/module/__builtin__/app_operation.py
@@ -1,4 +1,5 @@
+import operator
+
def bin(x):
- if not isinstance(x, (int, long)):
- raise TypeError("must be int or long")
- return x.__format__("#b")
+ value = operator.index(x)
+ return value.__format__("#b")
diff --git a/pypy/module/__builtin__/interp_memoryview.py b/pypy/module/__builtin__/interp_memoryview.py
--- a/pypy/module/__builtin__/interp_memoryview.py
+++ b/pypy/module/__builtin__/interp_memoryview.py
@@ -229,10 +229,14 @@
return W_MemoryView(buf)
def descr_buffer(self, space):
- """Note that memoryview() objects in PyPy support buffer(), whereas
- not in CPython; but CPython supports passing memoryview() to most
- built-in functions that accept buffers, with the notable exception
- of the buffer() built-in."""
+ """
+ Note that memoryview() is very inconsistent in CPython: it does not
+ support the buffer interface but does support the new buffer
+ interface: as a result, it is possible to pass memoryview to
+ e.g. socket.send() but not to file.write(). For simplicity and
+ consistency, in PyPy memoryview DOES support buffer(), which means
+ that it is accepted in more places than CPython.
+ """
return space.wrap(self.buf)
def descr_tobytes(self, space):
diff --git a/pypy/module/__builtin__/test/test_builtin.py b/pypy/module/__builtin__/test/test_builtin.py
--- a/pypy/module/__builtin__/test/test_builtin.py
+++ b/pypy/module/__builtin__/test/test_builtin.py
@@ -46,6 +46,15 @@
assert bin(2L) == "0b10"
assert bin(-2L) == "-0b10"
raises(TypeError, bin, 0.)
+ class C(object):
+ def __index__(self):
+ return 42
+ assert bin(C()) == bin(42)
+ class D(object):
+ def __int__(self):
+ return 42
+ exc = raises(TypeError, bin, D())
+ assert "index" in exc.value.message
def test_unichr(self):
import sys
diff --git a/pypy/module/__builtin__/test/test_classobj.py b/pypy/module/__builtin__/test/test_classobj.py
--- a/pypy/module/__builtin__/test/test_classobj.py
+++ b/pypy/module/__builtin__/test/test_classobj.py
@@ -1061,14 +1061,14 @@
assert (D() >= A()) == 'D:A.ge'
-class AppTestOldStyleClassStrDict(object):
+class AppTestOldStyleClassBytesDict(object):
def setup_class(cls):
if cls.runappdirect:
py.test.skip("can only be run on py.py")
def is_strdict(space, w_class):
- from pypy.objspace.std.dictmultiobject import StringDictStrategy
+ from pypy.objspace.std.dictmultiobject import BytesDictStrategy
w_d = w_class.getdict(space)
- return space.wrap(isinstance(w_d.strategy, StringDictStrategy))
+ return space.wrap(isinstance(w_d.strategy, BytesDictStrategy))
cls.w_is_strdict = cls.space.wrap(gateway.interp2app(is_strdict))
diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -3,7 +3,7 @@
from rpython.rlib.objectmodel import we_are_translated
from pypy.objspace.std.listobject import W_ListObject
from pypy.objspace.std.typeobject import MethodCache
-from pypy.objspace.std.mapdict import IndexCache
+from pypy.objspace.std.mapdict import MapAttrCache
from rpython.rlib import rposix, rgc
@@ -35,7 +35,7 @@
cache.misses = {}
cache.hits = {}
if space.config.objspace.std.withmapdict:
- cache = space.fromcache(IndexCache)
+ cache = space.fromcache(MapAttrCache)
cache.misses = {}
cache.hits = {}
@@ -45,7 +45,7 @@
in the mapdict cache with the given attribute name."""
assert space.config.objspace.std.withmethodcachecounter
assert space.config.objspace.std.withmapdict
More information about the pypy-commit
mailing list