From pypy.commits at gmail.com Thu Aug 1 11:08:32 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 01 Aug 2019 08:08:32 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Issue #3034 Message-ID: <5d430070.1c69fb81.39205.5229@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97041:ad5f870b5e2e Date: 2019-08-01 17:07 +0200 http://bitbucket.org/pypy/pypy/changeset/ad5f870b5e2e/ Log: Issue #3034 Fix inside this code path of the import logic, which is not used any more except of course when it is (by zipimport only) diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -266,6 +266,20 @@ w_cpathname = space.w_None space.setitem(w_dict, space.newtext("__file__"), w_pathname) space.setitem(w_dict, space.newtext("__cached__"), w_cpathname) + # + # like PyImport_ExecCodeModuleObject(), we invoke + # _bootstrap_external._fix_up_module() here, which should try to + # fix a few more attributes (also __file__ and __cached__, but + # let's keep the logic that also sets them explicitly above, just + # in case) + space.appexec([w_dict, w_pathname, w_cpathname], + """(d, pathname, cpathname): + from importlib._bootstrap_external import _fix_up_module + name = d.get('__name__') + if name is not None: + _fix_up_module(d, name, pathname, cpathname) + """) + # code_w.exec_code(space, w_dict, w_dict) def rightmost_sep(filename): diff --git a/pypy/module/zipimport/test/test_zipimport.py b/pypy/module/zipimport/test/test_zipimport.py --- a/pypy/module/zipimport/test/test_zipimport.py +++ b/pypy/module/zipimport/test/test_zipimport.py @@ -462,6 +462,11 @@ foo = __import__('foo.bar.one', None, None, []) assert foo.bar.one.attr == 'portion1 foo one' + def test___spec__(self): + self.writefile('uvwv.py', 'spec = __spec__') + mod = __import__('uvwv', globals(), locals(), []) + assert mod.spec is not None + if os.sep != '/': class AppTestNativePathSep(AppTestZipimport): From pypy.commits at gmail.com Thu Aug 1 11:44:41 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 01 Aug 2019 08:44:41 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d4308e9.1c69fb81.80c96.3f22@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97042:c1cbd6d85ee4 Date: 2019-08-01 16:43 +0100 http://bitbucket.org/pypy/pypy/changeset/c1cbd6d85ee4/ Log: hg merge default diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -1021,6 +1021,12 @@ deadlock_error(pname) rgil.acquire() assert cpyext_glob_tid_ptr[0] == 0 + if gil_auto_workaround: + # while we're in workaround-land, detect when a regular PyXxx() + # function is invoked at .so load-time, e.g. by a C++ global + # variable with an initializer, and in this case make sure we + # initialize things. + space.fromcache(State).make_sure_cpyext_is_imported() elif pygilstate_ensure: if cpyext_glob_tid_ptr[0] == tid: cpyext_glob_tid_ptr[0] = 0 @@ -1749,8 +1755,8 @@ from rpython.rlib import rdynload from pypy.module.cpyext.pyobject import get_w_obj_and_decref - space.getbuiltinmodule("cpyext") # mandatory to init cpyext state = space.fromcache(State) + state.make_sure_cpyext_is_imported() w_mod = state.find_extension(name, path) if w_mod is not None: rdynload.dlclose(dll) diff --git a/pypy/module/cpyext/state.py b/pypy/module/cpyext/state.py --- a/pypy/module/cpyext/state.py +++ b/pypy/module/cpyext/state.py @@ -45,6 +45,13 @@ # XXX will leak if _PyDateTime_Import already called self.datetimeAPI = [] + self.cpyext_is_imported = False + + def make_sure_cpyext_is_imported(self): + if not self.cpyext_is_imported: + self.space.getbuiltinmodule("cpyext") # mandatory to init cpyext + self.cpyext_is_imported = True + def set_exception(self, operror): self.clear_exception() ec = self.space.getexecutioncontext() diff --git a/rpython/rlib/rfile.py b/rpython/rlib/rfile.py --- a/rpython/rlib/rfile.py +++ b/rpython/rlib/rfile.py @@ -312,6 +312,10 @@ if not self._ll_file: raise ValueError("I/O operation on closed file") + @property + def closed(self): + return not self._ll_file + def _fread(self, buf, n, stream): if not self._univ_newline: return c_fread(buf, 1, n, stream) diff --git a/rpython/rlib/test/test_rfile.py b/rpython/rlib/test/test_rfile.py --- a/rpython/rlib/test/test_rfile.py +++ b/rpython/rlib/test/test_rfile.py @@ -411,7 +411,10 @@ def f(): with open(fname, "w") as f: f.write("dupa") + assert not f.closed + try: + assert f.closed f.write("dupb") except ValueError: pass From pypy.commits at gmail.com Thu Aug 1 12:33:38 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 01 Aug 2019 09:33:38 -0700 (PDT) Subject: [pypy-commit] pypy apptest-file: Close branch apptest-file Message-ID: <5d431462.1c69fb81.02bd.6cbd@mx.google.com> Author: Ronan Lamy Branch: apptest-file Changeset: r97043:1d528e9a0502 Date: 2019-08-01 16:33 +0000 http://bitbucket.org/pypy/pypy/changeset/1d528e9a0502/ Log: Close branch apptest-file From pypy.commits at gmail.com Thu Aug 1 12:34:05 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 01 Aug 2019 09:34:05 -0700 (PDT) Subject: [pypy-commit] pypy default: Merged in apptest-file (pull request #659) Message-ID: <5d43147d.1c69fb81.3caa6.ebd4@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97044:c5d1731e67ad Date: 2019-08-01 16:33 +0000 http://bitbucket.org/pypy/pypy/changeset/c5d1731e67ad/ Log: Merged in apptest-file (pull request #659) New mechanism for app-level testing diff too long, truncating to 2000 out of 7533 lines diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -213,6 +213,10 @@ BoolOption("newshortcut", "cache and shortcut calling __new__ from builtin types", default=False), + BoolOption("reinterpretasserts", + "Perform reinterpretation when an assert fails " + "(only relevant for tests)", + default=False), ]), ]) diff --git a/pypy/conftest.py b/pypy/conftest.py --- a/pypy/conftest.py +++ b/pypy/conftest.py @@ -1,6 +1,8 @@ import py, pytest, sys, textwrap from inspect import isclass +APPLEVEL_FN = 'apptest_*.py' + # pytest settings rsyncdirs = ['.', '../lib-python', '../lib_pypy', '../demo'] rsyncignore = ['_cache'] @@ -36,28 +38,39 @@ def pytest_report_header(): return "pytest-%s from %s" % (pytest.__version__, pytest.__file__) -def pytest_addhooks(pluginmanager): - from rpython.conftest import LeakFinder - pluginmanager.register(LeakFinder()) + at pytest.hookimpl(tryfirst=True) +def pytest_cmdline_preparse(config, args): + if not (set(args) & {'-D', '--direct-apptest'}): + args.append('--assert=reinterp') def pytest_configure(config): global option option = config.option + mode_A = config.getoption('runappdirect') + mode_D = config.getoption('direct_apptest') + if mode_D or not mode_A: + config.addinivalue_line('python_files', APPLEVEL_FN) + if not mode_A and not mode_D: # 'own' tests + from rpython.conftest import LeakFinder + config.pluginmanager.register(LeakFinder()) def pytest_addoption(parser): - from rpython.conftest import pytest_addoption - pytest_addoption(parser) - group = parser.getgroup("pypy options") group.addoption('-A', '--runappdirect', action="store_true", default=False, dest="runappdirect", - help="run applevel tests directly on python interpreter (not through PyPy)") + help="run legacy applevel tests directly on python interpreter (not through PyPy)") + group.addoption('-D', '--direct-apptest', action="store_true", + default=False, dest="direct_apptest", + help="run applevel_XXX.py tests directly on host interpreter") group.addoption('--direct', action="store_true", default=False, dest="rundirect", help="run pexpect tests directly") group.addoption('--raise-operr', action="store_true", default=False, dest="raise_operr", help="Show the interp-level OperationError in app-level tests") + group.addoption('--applevel-rewrite', action="store_true", + default=False, dest="applevel_rewrite", + help="Use assert rewriting in app-level test files (slow)") @pytest.fixture(scope='function') def space(request): @@ -88,14 +101,21 @@ ensure_pytest_builtin_helpers() def pytest_pycollect_makemodule(path, parent): - return PyPyModule(path, parent) + if path.fnmatch(APPLEVEL_FN): + if parent.config.getoption('direct_apptest'): + return + from pypy.tool.pytest.apptest2 import AppTestModule + rewrite = parent.config.getoption('applevel_rewrite') + return AppTestModule(path, parent, rewrite_asserts=rewrite) + else: + return PyPyModule(path, parent) def is_applevel(item): from pypy.tool.pytest.apptest import AppTestFunction return isinstance(item, AppTestFunction) def pytest_collection_modifyitems(config, items): - if config.option.runappdirect: + if config.getoption('runappdirect') or config.getoption('direct_apptest'): return for item in items: if isinstance(item, py.test.Function): @@ -104,17 +124,17 @@ else: item.add_marker('interplevel') -class PyPyModule(py.test.collect.Module): + +class PyPyModule(pytest.Module): """ we take care of collecting classes both at app level and at interp-level (because we need to stick a space at the class) ourselves. """ def accept_regular_test(self): if self.config.option.runappdirect: - # only collect regular tests if we are in an 'app_test' directory, - # or in test_lib_pypy + # only collect regular tests if we are in test_lib_pypy for name in self.listnames(): - if "app_test" in name or "test_lib_pypy" in name: + if "test_lib_pypy" in name: return True return False return True @@ -186,6 +206,8 @@ appclass.obj.space = LazyObjSpaceGetter() appclass.obj.runappdirect = option.runappdirect - -def pytest_ignore_collect(path): +def pytest_ignore_collect(path, config): + if (config.getoption('direct_apptest') and not path.isdir() + and not path.fnmatch(APPLEVEL_FN)): + return True return path.check(link=1) diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst --- a/pypy/doc/coding-guide.rst +++ b/pypy/doc/coding-guide.rst @@ -456,13 +456,10 @@ Testing modules in ``lib_pypy/`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can go to the :source:`pypy/module/test_lib_pypy/` directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in :source:`pypy/module/test_lib_pypy/` are allowed -and encouraged to let their tests run at interpreter level although -:source:`lib_pypy/` modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. +You can go to the :source:`pypy/module/test_lib_pypy/` directory and invoke the +testing tool ("py.test" or "python ../../pypy/test_all.py") to run tests +against the lib_pypy hierarchy. This allows us to quickly test our +python-coded reimplementations against CPython. Testing modules in ``pypy/module`` @@ -585,25 +582,42 @@ module global level and use plain 'assert' statements thanks to the usage of the `py.test`_ tool. - -Application Level tests +Application level tests ~~~~~~~~~~~~~~~~~~~~~~~ For testing the conformance and well-behavedness of PyPy it is often sufficient to write "normal" application-level Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: +coding style or restrictions. If we have a choice we often +use application level tests which are in files whose name starts with the +`apptest_` prefix and look like this:: - def app_test_something(): + def test_this(): # application level test code +These application level test functions will run on top +of PyPy, i.e. they have no access to interpreter details. + +By default, they run on top of an untranslated PyPy which runs on top of the +host interpreter. When passing the `-D` option, they run directly on top of the +host interpreter, which is usually a translated pypy executable in this case:: + + pypy3 -m pytest -D pypy/ + +Note that in interpreted mode, only a small subset of pytest's functionality is +available. + +Mixed-level tests (deprecated) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Mixed-level tests are similar to application-level tests, the difference being +that they're just snippets of app-level code embedded in an interp-level test +file, like this:: + class AppTestSomething(object): def test_this(self): # application level test code -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. You cannot use imported modules from global level because they are imported at interpreter-level while you test code runs at application level. If you need to use modules diff --git a/pypy/doc/contributing.rst b/pypy/doc/contributing.rst --- a/pypy/doc/contributing.rst +++ b/pypy/doc/contributing.rst @@ -329,11 +329,18 @@ Testing After Translation ^^^^^^^^^^^^^^^^^^^^^^^^^ -While the usual invocation of `pytest` translates a piece of RPython code and -runs it, we have a test extension to run tests without translation, directly -on the host python. This is very convenient for modules such as `cpyext`, to -compare and contrast test results between CPython and PyPy. Untranslated tests -are invoked by using the `-A` or `--runappdirect` option to `pytest`:: +While the usual invocation of `pytest` runs app-level tests on an untranslated +PyPy that runs on top of CPython, we have a test extension to run tests +directly on the host python. This is very convenient for modules such as +`cpyext`, to compare and contrast test results between CPython and PyPy. + +App-level tests run directly on the host interpreter when passing `-D` or +`--direct-apptest` to `pytest`:: + + pypy3 -m pytest -D pypy/interpreter/test/apptest_pyframe.py + +Mixed-level tests are invoked by using the `-A` or `--runappdirect` option to +`pytest`:: python2 pytest.py -A pypy/module/cpyext/test diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -328,10 +328,10 @@ translate.log_config(config.objspace, "PyPy config object") # obscure hack to stuff the translation options into the translated PyPy - import pypy.module.sys + from pypy.module.sys.moduledef import Module as SysModule options = make_dict(config) - wrapstr = 'space.wrap(%r)' % (options) # import time - pypy.module.sys.Module.interpleveldefs['pypy_translation_info'] = wrapstr + wrapstr = 'space.wrap(%r)' % (options) # import time + SysModule.interpleveldefs['pypy_translation_info'] = wrapstr if config.objspace.usemodules._cffi_backend: self.hack_for_cffi_modules(driver) diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -520,13 +520,14 @@ return self.__class__.__name__ @not_rpython - def setbuiltinmodule(self, importname): + def setbuiltinmodule(self, pkgname): """load a lazy pypy/module and put it into sys.modules""" - if '.' in importname: - fullname = importname - importname = fullname.rsplit('.', 1)[1] + if '.' in pkgname: + fullname = "%s.moduledef" % (pkgname,) + importname = pkgname.rsplit('.', 1)[1] else: - fullname = "pypy.module.%s" % importname + fullname = "pypy.module.%s.moduledef" % pkgname + importname = pkgname Module = __import__(fullname, None, None, ["Module"]).Module @@ -623,22 +624,22 @@ def make_builtins(self): "only for initializing the space." - from pypy.module.exceptions import Module + from pypy.module.exceptions.moduledef import Module w_name = self.newtext('exceptions') self.exceptions_module = Module(self, w_name) self.exceptions_module.install() - from pypy.module.sys import Module + from pypy.module.imp.moduledef import Module + w_name = self.newtext('imp') + mod = Module(self, w_name) + mod.install() + + from pypy.module.sys.moduledef import Module w_name = self.newtext('sys') self.sys = Module(self, w_name) self.sys.install() - from pypy.module.imp import Module - w_name = self.newtext('imp') - mod = Module(self, w_name) - mod.install() - - from pypy.module.__builtin__ import Module + from pypy.module.__builtin__.moduledef import Module w_name = self.newtext('__builtin__') self.builtin = Module(self, w_name) w_builtin = self.wrap(self.builtin) diff --git a/pypy/interpreter/main.py b/pypy/interpreter/main.py --- a/pypy/interpreter/main.py +++ b/pypy/interpreter/main.py @@ -32,7 +32,7 @@ try: if space is None: - from pypy.objspace.std import StdObjSpace + from pypy.objspace.std.objspace import StdObjSpace space = StdObjSpace() pycode = compilecode(space, source, filename or '', cmd) @@ -79,7 +79,7 @@ implementation. """ if space is None: - from pypy.objspace.std import StdObjSpace + from pypy.objspace.std.objspace import StdObjSpace space = StdObjSpace() argv = [module_name] if args is not None: diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -28,17 +28,21 @@ @not_rpython def install(self): - """install this module, and it's submodules into + """Install this module, and its submodules into space.builtin_modules""" Module.install(self) if hasattr(self, "submodules"): space = self.space - name = space.unwrap(self.w_name) + pkgname = space.text_w(self.w_name) for sub_name, module_cls in self.submodules.iteritems(): if module_cls.submodule_name is None: module_cls.submodule_name = sub_name - module_name = space.newtext("%s.%s" % (name, sub_name)) - m = module_cls(space, module_name) + else: + assert module_cls.submodule_name == sub_name + name = "%s.%s" % (pkgname, sub_name) + module_cls.applevel_name = name + w_name = space.newtext(name) + m = module_cls(space, w_name) m.install() self.submodules_w.append(m) @@ -70,8 +74,7 @@ if cls.applevel_name is not None: return cls.applevel_name else: - pkgroot = cls.__module__ - return pkgroot.split('.')[-1] + return cls.__module__.split('.')[-2] def get(self, name): space = self.space @@ -140,10 +143,8 @@ # build a constant dictionary out of # applevel/interplevel definitions cls.loaders = loaders = {} - pkgroot = cls.__module__ + pkgroot = cls.__module__.rsplit('.', 1)[0] appname = cls.get_applevel_name() - if cls.submodule_name is not None: - appname += '.%s' % (cls.submodule_name,) for name, spec in cls.interpleveldefs.items(): loaders[name] = getinterpevalloader(pkgroot, spec) for name, spec in cls.appleveldefs.items(): @@ -154,7 +155,7 @@ def extra_interpdef(self, name, spec): cls = self.__class__ - pkgroot = cls.__module__ + pkgroot = cls.__module__.rsplit('.', 1)[0] loader = getinterpevalloader(pkgroot, spec) space = self.space w_obj = loader(space) diff --git a/pypy/interpreter/test/apptest_pyframe.py b/pypy/interpreter/test/apptest_pyframe.py new file mode 100644 --- /dev/null +++ b/pypy/interpreter/test/apptest_pyframe.py @@ -0,0 +1,575 @@ +import pytest + + at pytest.fixture +def tempfile(tmpdir): + return str(tmpdir / 'tempfile1') + +def test_f_locals(): + import sys + f = sys._getframe() + assert f.f_locals is locals() + +def test_f_globals(): + import sys + f = sys._getframe() + assert f.f_globals is globals() + pytest.raises(TypeError, "f.f_globals = globals()") + +def test_f_builtins(): + import sys, __builtin__ + f = sys._getframe() + assert f.f_builtins is __builtin__.__dict__ + +def test_f_code(): + def g(): + import sys + f = sys._getframe() + return f.f_code + assert g() is g.func_code + +def test_f_trace_del(): + import sys + f = sys._getframe() + del f.f_trace + assert f.f_trace is None + +def test_f_lineno(): + def g(): + import sys + f = sys._getframe() + x = f.f_lineno + y = f.f_lineno + z = f.f_lineno + return [x, y, z] + origin = g.func_code.co_firstlineno + assert g() == [origin+3, origin+4, origin+5] + +def test_f_lineno_set(tempfile): + def tracer(f, *args): + def y(f, *args): + return y + def x(f, *args): + f.f_lineno += 1 + return y # "return None" should have the same effect, but see + # test_local_trace_function_returning_None_ignored + return x + + open # force fetching of this name now + + def function(): + xyz + with open(tempfile, 'w') as f: + pass + return 3 + + import sys + sys.settrace(tracer) + function() + sys.settrace(None) + # assert did not crash + +def test_f_lineno_set_firstline(): + seen = [] + def tracer(f, event, *args): + seen.append((event, f.f_lineno)) + if len(seen) == 5: + f.f_lineno = 1 # bug shown only when setting lineno to 1 + return tracer + + def g(): + import sys + sys.settrace(tracer) + exec "x=1\ny=x+1\nz=y+1\nt=z+1\ns=t+1\n" in {} + sys.settrace(None) + + g() + assert seen == [('call', 1), + ('line', 1), + ('line', 2), + ('line', 3), + ('line', 4), + ('line', 2), + ('line', 3), + ('line', 4), + ('line', 5), + ('return', 5)] + +def test_f_back(): + import sys + def f(): + assert sys._getframe().f_code.co_name == g() + def g(): + return sys._getframe().f_back.f_code.co_name + f() + +def test_f_back_virtualref(): + import sys + def f(): + return g() + def g(): + return sys._getframe() + frame = f() + assert frame.f_back.f_code.co_name == 'f' + +def test_f_exc_xxx(): + import sys + + class OuterException(Exception): + pass + class InnerException(Exception): + pass + + def g(exc_info): + f = sys._getframe() + assert f.f_exc_type is None + assert f.f_exc_value is None + assert f.f_exc_traceback is None + try: + raise InnerException + except: + assert f.f_exc_type is exc_info[0] + assert f.f_exc_value is exc_info[1] + assert f.f_exc_traceback is exc_info[2] + try: + raise OuterException + except: + g(sys.exc_info()) + +def test_virtualref_through_traceback(): + import sys + def g(): + try: + raise ValueError + except: + _, _, tb = sys.exc_info() + return tb + def f(): + return g() + # + tb = f() + assert tb.tb_frame.f_code.co_name == 'g' + assert tb.tb_frame.f_back.f_code.co_name == 'f' + +def test_trace_basic(): + import sys + l = [] + class Tracer: + def __init__(self, i): + self.i = i + def trace(self, frame, event, arg): + l.append((self.i, frame.f_code.co_name, event, arg)) + if frame.f_code.co_name == 'g2': + return None # don't trace g2 + return Tracer(self.i+1).trace + def g3(n): + n -= 5 + return n + def g2(n): + n += g3(2) + n += g3(7) + return n + def g(n): + n += g2(3) + return n + def f(n): + n = g(n) + return n * 7 + sys.settrace(Tracer(0).trace) + x = f(4) + sys.settrace(None) + assert x == 42 + print l + assert l == [(0, 'f', 'call', None), + (1, 'f', 'line', None), + (0, 'g', 'call', None), + (1, 'g', 'line', None), + (0, 'g2', 'call', None), + (0, 'g3', 'call', None), + (1, 'g3', 'line', None), + (2, 'g3', 'line', None), + (3, 'g3', 'return', -3), + (0, 'g3', 'call', None), + (1, 'g3', 'line', None), + (2, 'g3', 'line', None), + (3, 'g3', 'return', 2), + (2, 'g', 'line', None), + (3, 'g', 'return', 6), + (2, 'f', 'line', None), + (3, 'f', 'return', 42)] + +def test_trace_exc(): + import sys + l = [] + def ltrace(a,b,c): + if b == 'exception': + l.append(c) + return ltrace + def trace(a,b,c): return ltrace + def f(): + try: + raise Exception + except: + pass + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 1 + assert isinstance(l[0][1], Exception) + +def test_trace_ignore_hidden(): + import sys + import _testing + + l = [] + def trace(a,b,c): + l.append((a,b,c)) + + def f(): + h = _testing.Hidden() + r = h.meth() + return r + + sys.settrace(trace) + res = f() + sys.settrace(None) + assert len(l) == 1 + assert l[0][1] == 'call' + assert res == 'hidden' # sanity + +def test_trace_hidden_prints(tempfile): + import sys + + l = [] + def trace(a,b,c): + l.append((a,b,c)) + return trace + + outputf = open(tempfile, 'w') + def f(): + print >> outputf, 1 + print >> outputf, 2 + print >> outputf, 3 + return "that's the return value" + + sys.settrace(trace) + f() + sys.settrace(None) + outputf.close() + # should get 1 "call", 3 "line" and 1 "return" events, and no call + # or return for the internal app-level implementation of 'print' + assert len(l) == 6 + assert [what for (frame, what, arg) in l] == [ + 'call', 'line', 'line', 'line', 'line', 'return'] + assert l[-1][2] == "that's the return value" + +def test_trace_return_exc(): + import sys + l = [] + def trace(a,b,c): + if b in ('exception', 'return'): + l.append((b, c)) + return trace + + def g(): + raise Exception + def f(): + try: + g() + except: + pass + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 4 + assert l[0][0] == 'exception' + assert isinstance(l[0][1][1], Exception) + assert l[1] == ('return', None) + assert l[2][0] == 'exception' + assert isinstance(l[2][1][1], Exception) + assert l[3] == ('return', None) + +def test_trace_raises_on_return(): + import sys + def trace(frame, event, arg): + if event == 'return': + raise ValueError + else: + return trace + + def f(): return 1 + + for i in xrange(sys.getrecursionlimit() + 1): + sys.settrace(trace) + try: + f() + except ValueError: + pass + +def test_trace_try_finally(): + import sys + l = [] + def trace(frame, event, arg): + if event == 'exception': + l.append(arg) + return trace + + def g(): + try: + raise Exception + finally: + pass + + def f(): + try: + g() + except: + pass + + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 2 + assert issubclass(l[0][0], Exception) + assert issubclass(l[1][0], Exception) + +def test_trace_raise_three_arg(): + import sys + l = [] + def trace(frame, event, arg): + if event == 'exception': + l.append(arg) + return trace + + def g(): + try: + raise Exception + except Exception as e: + import sys + raise Exception, e, sys.exc_info()[2] + + def f(): + try: + g() + except: + pass + + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 2 + assert issubclass(l[0][0], Exception) + assert issubclass(l[1][0], Exception) + +def test_trace_generator_finalisation(): + import sys + l = [] + got_exc = [] + def trace(frame, event, arg): + l.append((frame.f_lineno, event)) + if event == 'exception': + got_exc.append(arg) + return trace + + d = {} + exec """if 1: + def g(): + try: + yield True + finally: + pass + + def f(): + try: + gen = g() + gen.next() + gen.close() + except: + pass + """ in d + f = d['f'] + + sys.settrace(trace) + f() + sys.settrace(None) + assert len(got_exc) == 1 + assert issubclass(got_exc[0][0], GeneratorExit) + assert l == [(8, 'call'), + (9, 'line'), + (10, 'line'), + (11, 'line'), + (2, 'call'), + (3, 'line'), + (4, 'line'), + (4, 'return'), + (12, 'line'), + (4, 'call'), + (4, 'exception'), + (6, 'line'), + (6, 'return'), + (12, 'return')] + +def test_dont_trace_on_reraise(): + import sys + l = [] + def ltrace(a,b,c): + if b == 'exception': + l.append(c) + return ltrace + def trace(a,b,c): return ltrace + def f(): + try: + 1/0 + except: + try: + raise + except: + pass + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 1 + assert issubclass(l[0][0], Exception) + +def test_dont_trace_on_raise_with_tb(): + import sys + l = [] + def ltrace(a,b,c): + if b == 'exception': + l.append(c) + return ltrace + def trace(a,b,c): return ltrace + def f(): + try: + raise Exception + except: + return sys.exc_info() + def g(): + exc, val, tb = f() + try: + raise exc, val, tb + except: + pass + sys.settrace(trace) + g() + sys.settrace(None) + assert len(l) == 1 + assert isinstance(l[0][1], Exception) + +def test_trace_changes_locals(): + import sys + def trace(frame, what, arg): + frame.f_locals['x'] = 42 + return trace + def f(x): + return x + sys.settrace(trace) + res = f(1) + sys.settrace(None) + assert res == 42 + +def test_set_unset_f_trace(): + import sys + seen = [] + def trace1(frame, what, arg): + seen.append((1, frame, frame.f_lineno, what, arg)) + return trace1 + def trace2(frame, what, arg): + seen.append((2, frame, frame.f_lineno, what, arg)) + return trace2 + def set_the_trace(f): + f.f_trace = trace1 + sys.settrace(trace2) + len(seen) # take one line: should not be traced + f = sys._getframe() + set_the_trace(f) + len(seen) # take one line: should not be traced + len(seen) # take one line: should not be traced + sys.settrace(None) # and this line should be the last line traced + len(seen) # take one line + del f.f_trace + len(seen) # take one line + firstline = set_the_trace.func_code.co_firstlineno + assert seen == [(1, f, firstline + 6, 'line', None), + (1, f, firstline + 7, 'line', None), + (1, f, firstline + 8, 'line', None)] + +def test_locals2fast_freevar_bug(): + import sys + def f(n): + class A(object): + def g(self): + return n + n = 42 + return A() + res = f(10).g() + assert res == 10 + # + def trace(*args): + return trace + sys.settrace(trace) + res = f(10).g() + sys.settrace(None) + assert res == 10 + +def test_throw_trace_bug(): + import sys + def f(): + yield 5 + gen = f() + assert next(gen) == 5 + seen = [] + def trace_func(frame, event, *args): + seen.append(event) + return trace_func + sys.settrace(trace_func) + try: + gen.throw(ValueError) + except ValueError: + pass + sys.settrace(None) + assert seen == ['call', 'exception', 'return'] + +def test_generator_trace_stopiteration(): + import sys + def f(): + yield 5 + gen = f() + assert next(gen) == 5 + seen = [] + def trace_func(frame, event, *args): + print('TRACE:', frame, event, args) + seen.append(event) + return trace_func + def g(): + for x in gen: + never_entered + sys.settrace(trace_func) + g() + sys.settrace(None) + print 'seen:', seen + # on Python 3 we get an extra 'exception' when 'for' catches + # StopIteration + assert seen == ['call', 'line', 'call', 'return', 'return'] + +def test_local_trace_function_returning_None_ignored(): + # behave the same as CPython does, and in contradiction with + # the documentation. + def tracer(f, event, arg): + assert event == 'call' + return local_tracer + + seen = [] + def local_tracer(f, event, arg): + seen.append(event) + return None # but 'local_tracer' will be called again + + def function(): + a = 1 + a = 2 + a = 3 + + import sys + sys.settrace(tracer) + function() + sys.settrace(None) + assert seen == ["line", "line", "line", "return"] diff --git a/pypy/interpreter/test/demomixedmod/__init__.py b/pypy/interpreter/test/demomixedmod/__init__.py --- a/pypy/interpreter/test/demomixedmod/__init__.py +++ b/pypy/interpreter/test/demomixedmod/__init__.py @@ -1,15 +0,0 @@ -from pypy.interpreter.mixedmodule import MixedModule - -class Module(MixedModule): - interpleveldefs = { - '__name__' : '(space.wrap("mixedmodule"))', - '__doc__' : '(space.wrap("mixedmodule doc"))', - 'somefunc' : 'file1.somefunc', - 'value' : '(space.w_None)', - 'path' : 'file1.initpath(space)', - 'cpypath' : 'space.wrap(sys.path)' - } - - appleveldefs = { - 'someappfunc' : 'file2_app.someappfunc', - } diff --git a/pypy/interpreter/test/demomixedmod/__init__.py b/pypy/interpreter/test/demomixedmod/moduledef.py copy from pypy/interpreter/test/demomixedmod/__init__.py copy to pypy/interpreter/test/demomixedmod/moduledef.py diff --git a/pypy/interpreter/test/fixtures.py b/pypy/interpreter/test/fixtures.py new file mode 100644 --- /dev/null +++ b/pypy/interpreter/test/fixtures.py @@ -0,0 +1,5 @@ +from _pytest.tmpdir import TempdirFactory + +def tempfile(space, config): + tmpdir = TempdirFactory(config).getbasetemp() + return space.newtext(str(tmpdir / 'tempfile1')) diff --git a/pypy/interpreter/test/test_appinterp.py b/pypy/interpreter/test/test_appinterp.py --- a/pypy/interpreter/test/test_appinterp.py +++ b/pypy/interpreter/test/test_appinterp.py @@ -3,32 +3,32 @@ from pypy.interpreter.gateway import appdef, ApplevelClass, applevel_temp from pypy.interpreter.error import OperationError -def test_execwith_novars(space): - val = space.appexec([], """ - (): - return 42 - """) +def test_execwith_novars(space): + val = space.appexec([], """ + (): + return 42 + """) assert space.eq_w(val, space.wrap(42)) -def test_execwith_withvars(space): +def test_execwith_withvars(space): val = space.appexec([space.wrap(7)], """ - (x): - y = 6 * x - return y - """) + (x): + y = 6 * x + return y + """) assert space.eq_w(val, space.wrap(42)) -def test_execwith_compile_error(space): +def test_execwith_compile_error(space): excinfo = py.test.raises(OperationError, space.appexec, [], """ - (): - y y + (): + y y """) # NOTE: the following test only works because excinfo.value is not # normalized so far - assert str(excinfo.value.get_w_value(space)).find('y y') != -1 + assert str(excinfo.value.get_w_value(space)).find('y y') != -1 def test_simple_applevel(space): - app = appdef("""app(x,y): + app = appdef("""app(x,y): return x + y """) assert app.func_name == 'app' @@ -36,15 +36,15 @@ assert space.eq_w(w_result, space.wrap(42)) def test_applevel_with_one_default(space): - app = appdef("""app(x,y=1): + app = appdef("""app(x,y=1): return x + y """) assert app.func_name == 'app' - w_result = app(space, space.wrap(41)) + w_result = app(space, space.wrap(41)) assert space.eq_w(w_result, space.wrap(42)) def test_applevel_with_two_defaults(space): - app = appdef("""app(x=1,y=2): + app = appdef("""app(x=1,y=2): return x + y """) w_result = app(space, space.wrap(41), space.wrap(1)) @@ -58,19 +58,19 @@ def test_applevel_noargs(space): - app = appdef("""app(): - return 42 + app = appdef("""app(): + return 42 """) assert app.func_name == 'app' - w_result = app(space) + w_result = app(space) assert space.eq_w(w_result, space.wrap(42)) -def somefunc(arg2=42): - return arg2 +def somefunc(arg2=42): + return arg2 -def test_app2interp_somefunc(space): - app = appdef(somefunc) - w_result = app(space) +def test_app2interp_somefunc(space): + app = appdef(somefunc) + w_result = app(space) assert space.eq_w(w_result, space.wrap(42)) def test_applevel_functions(space, applevel_temp = applevel_temp): @@ -87,48 +87,49 @@ def test_applevel_class(space, applevel_temp = applevel_temp): app = applevel_temp(''' class C(object): - clsattr = 42 - def __init__(self, x=13): - self.attr = x + clsattr = 42 + def __init__(self, x=13): + self.attr = x ''') C = app.interphook('C') - c = C(space, space.wrap(17)) + c = C(space, space.wrap(17)) w_attr = space.getattr(c, space.wrap('clsattr')) assert space.eq_w(w_attr, space.wrap(42)) w_clsattr = space.getattr(c, space.wrap('attr')) assert space.eq_w(w_clsattr, space.wrap(17)) -def app_test_something_at_app_level(): +def app_test_something_at_app_level(): x = 2 assert x/2 == 1 -class AppTestMethods: - def test_some_app_test_method(self): +class AppTestMethods: + def test_some_app_test_method(self): assert 2 == 2 -class TestMixedModule: - def test_accesses(self): +class TestMixedModule: + def test_accesses(self): space = self.space - import demomixedmod - w_module = demomixedmod.Module(space, space.wrap('mixedmodule')) + from .demomixedmod.moduledef import Module + w_module = Module(space, space.wrap('mixedmodule')) space.appexec([w_module], """ - (module): - assert module.value is None + (module): + assert module.value is None assert module.__doc__ == 'mixedmodule doc' - assert module.somefunc is module.somefunc - result = module.somefunc() - assert result == True + assert module.somefunc is module.somefunc + result = module.somefunc() + assert result == True - assert module.someappfunc is module.someappfunc - appresult = module.someappfunc(41) - assert appresult == 42 + assert module.someappfunc is module.someappfunc + appresult = module.someappfunc(41) + assert appresult == 42 assert module.__dict__ is module.__dict__ - for name in ('somefunc', 'someappfunc', '__doc__', '__name__'): + for name in ('somefunc', 'someappfunc', '__doc__', '__name__'): assert name in module.__dict__ """) assert space.is_true(w_module.call('somefunc')) + assert Module.get_applevel_name() == 'demomixedmod' def test_whacking_at_loaders(self): """Some MixedModules change 'self.loaders' in __init__(), but doing diff --git a/pypy/interpreter/test/test_extmodules.py b/pypy/interpreter/test/test_extmodules.py --- a/pypy/interpreter/test/test_extmodules.py +++ b/pypy/interpreter/test/test_extmodules.py @@ -2,10 +2,10 @@ import pytest from pypy.config.pypyoption import get_pypy_config -from pypy.objspace.std import StdObjSpace +from pypy.objspace.std.objspace import StdObjSpace from rpython.tool.udir import udir -mod_init = """ +mod_def = """ from pypy.interpreter.mixedmodule import MixedModule import time @@ -45,8 +45,9 @@ pkg.join("__init__.py").write("# package") mod = pkg.join("extmod") mod.ensure(dir=True) - mod.join("__init__.py").write(mod_init) + mod.join("__init__.py").write("#") mod.join("interp_time.py").write(mod_interp) + mod.join("moduledef.py").write(mod_def) class AppTestExtModules(object): def setup_class(cls): diff --git a/pypy/interpreter/test/test_mixedmodule.py b/pypy/interpreter/test/test_mixedmodule.py --- a/pypy/interpreter/test/test_mixedmodule.py +++ b/pypy/interpreter/test/test_mixedmodule.py @@ -29,7 +29,10 @@ m.install() assert self.space.builtin_modules["test_module"] is m - assert isinstance(self.space.builtin_modules["test_module.sub"], SubModule) + submod = self.space.builtin_modules["test_module.sub"] + assert isinstance(submod, SubModule) + assert submod.get_applevel_name() == "test_module.sub" + class AppTestMixedModule(object): pytestmark = py.test.mark.skipif("config.option.runappdirect") diff --git a/pypy/interpreter/test/test_pyframe.py b/pypy/interpreter/test/test_pyframe.py --- a/pypy/interpreter/test/test_pyframe.py +++ b/pypy/interpreter/test/test_pyframe.py @@ -1,4 +1,3 @@ -from rpython.tool import udir from pypy.conftest import option from pypy.interpreter.gateway import interp2app @@ -9,8 +8,6 @@ def setup_class(cls): space = cls.space - cls.w_udir = cls.space.wrap(str(udir.udir)) - cls.w_tempfile1 = cls.space.wrap(str(udir.udir.join('tempfile1'))) if not option.runappdirect: w_call_further = cls.space.appexec([], """(): def call_further(f): @@ -25,113 +22,6 @@ # test for the presence of the attributes, not functionality - def test_f_locals(self): - import sys - f = sys._getframe() - assert f.f_locals is locals() - - def test_f_globals(self): - import sys - f = sys._getframe() - assert f.f_globals is globals() - raises(TypeError, "f.f_globals = globals()") - - def test_f_builtins(self): - import sys, __builtin__ - f = sys._getframe() - assert f.f_builtins is __builtin__.__dict__ - - def test_f_code(self): - def g(): - import sys - f = sys._getframe() - return f.f_code - assert g() is g.func_code - - def test_f_trace_del(self): - import sys - f = sys._getframe() - del f.f_trace - assert f.f_trace is None - - def test_f_lineno(self): - def g(): - import sys - f = sys._getframe() - x = f.f_lineno - y = f.f_lineno - z = f.f_lineno - return [x, y, z] - origin = g.func_code.co_firstlineno - assert g() == [origin+3, origin+4, origin+5] - - def test_f_lineno_set(self): - def tracer(f, *args): - def y(f, *args): - return y - def x(f, *args): - f.f_lineno += 1 - return y # "return None" should have the same effect, but see - # test_local_trace_function_returning_None_ignored - return x - - open # force fetching of this name now - - def function(): - xyz - with open(self.tempfile1, 'w') as f: - pass - return 3 - - import sys - sys.settrace(tracer) - function() - sys.settrace(None) - # assert did not crash - - def test_f_lineno_set_firstline(self): - seen = [] - def tracer(f, event, *args): - seen.append((event, f.f_lineno)) - if len(seen) == 5: - f.f_lineno = 1 # bug shown only when setting lineno to 1 - return tracer - - def g(): - import sys - sys.settrace(tracer) - exec "x=1\ny=x+1\nz=y+1\nt=z+1\ns=t+1\n" in {} - sys.settrace(None) - - g() - assert seen == [('call', 1), - ('line', 1), - ('line', 2), - ('line', 3), - ('line', 4), - ('line', 2), - ('line', 3), - ('line', 4), - ('line', 5), - ('return', 5)] - - def test_f_back(self): - import sys - def f(): - assert sys._getframe().f_code.co_name == g() - def g(): - return sys._getframe().f_back.f_code.co_name - f() - - def test_f_back_virtualref(self): - import sys - def f(): - return g() - def g(): - return sys._getframe() - frame = f() - assert frame.f_back.f_code.co_name == 'f' - def test_f_back_hidden(self): if not hasattr(self, 'call_further'): skip("not for runappdirect testing") @@ -148,362 +38,6 @@ assert f1bis is f1 assert f0.f_back is f1 - def test_f_exc_xxx(self): - import sys - - class OuterException(Exception): - pass - class InnerException(Exception): - pass - - def g(exc_info): - f = sys._getframe() - assert f.f_exc_type is None - assert f.f_exc_value is None - assert f.f_exc_traceback is None - try: - raise InnerException - except: - assert f.f_exc_type is exc_info[0] - assert f.f_exc_value is exc_info[1] - assert f.f_exc_traceback is exc_info[2] - try: - raise OuterException - except: - g(sys.exc_info()) - - def test_virtualref_through_traceback(self): - import sys - def g(): - try: - raise ValueError - except: - _, _, tb = sys.exc_info() - return tb - def f(): - return g() - # - tb = f() - assert tb.tb_frame.f_code.co_name == 'g' - assert tb.tb_frame.f_back.f_code.co_name == 'f' - - def test_trace_basic(self): - import sys - l = [] - class Tracer: - def __init__(self, i): - self.i = i - def trace(self, frame, event, arg): - l.append((self.i, frame.f_code.co_name, event, arg)) - if frame.f_code.co_name == 'g2': - return None # don't trace g2 - return Tracer(self.i+1).trace - def g3(n): - n -= 5 - return n - def g2(n): - n += g3(2) - n += g3(7) - return n - def g(n): - n += g2(3) - return n - def f(n): - n = g(n) - return n * 7 - sys.settrace(Tracer(0).trace) - x = f(4) - sys.settrace(None) - assert x == 42 - print l - assert l == [(0, 'f', 'call', None), - (1, 'f', 'line', None), - (0, 'g', 'call', None), - (1, 'g', 'line', None), - (0, 'g2', 'call', None), - (0, 'g3', 'call', None), - (1, 'g3', 'line', None), - (2, 'g3', 'line', None), - (3, 'g3', 'return', -3), - (0, 'g3', 'call', None), - (1, 'g3', 'line', None), - (2, 'g3', 'line', None), - (3, 'g3', 'return', 2), - (2, 'g', 'line', None), - (3, 'g', 'return', 6), - (2, 'f', 'line', None), - (3, 'f', 'return', 42)] - - def test_trace_exc(self): - import sys - l = [] - def ltrace(a,b,c): - if b == 'exception': - l.append(c) - return ltrace - def trace(a,b,c): return ltrace - def f(): - try: - raise Exception - except: - pass - sys.settrace(trace) - f() - sys.settrace(None) - assert len(l) == 1 - assert isinstance(l[0][1], Exception) - - def test_trace_ignore_hidden(self): - import sys - import _testing - - l = [] - def trace(a,b,c): - l.append((a,b,c)) - - def f(): - h = _testing.Hidden() - r = h.meth() - return r - - sys.settrace(trace) - res = f() - sys.settrace(None) - assert len(l) == 1 - assert l[0][1] == 'call' - assert res == 'hidden' # sanity - - def test_trace_hidden_prints(self): - import sys - - l = [] - def trace(a,b,c): - l.append((a,b,c)) - return trace - - outputf = open(self.tempfile1, 'w') - def f(): - print >> outputf, 1 - print >> outputf, 2 - print >> outputf, 3 - return "that's the return value" - - sys.settrace(trace) - f() - sys.settrace(None) - outputf.close() - # should get 1 "call", 3 "line" and 1 "return" events, and no call - # or return for the internal app-level implementation of 'print' - assert len(l) == 6 - assert [what for (frame, what, arg) in l] == [ - 'call', 'line', 'line', 'line', 'line', 'return'] - assert l[-1][2] == "that's the return value" - - def test_trace_return_exc(self): - import sys - l = [] - def trace(a,b,c): - if b in ('exception', 'return'): - l.append((b, c)) - return trace - - def g(): - raise Exception - def f(): - try: - g() - except: - pass - sys.settrace(trace) - f() - sys.settrace(None) - assert len(l) == 4 - assert l[0][0] == 'exception' - assert isinstance(l[0][1][1], Exception) - assert l[1] == ('return', None) - assert l[2][0] == 'exception' - assert isinstance(l[2][1][1], Exception) - assert l[3] == ('return', None) - - def test_trace_raises_on_return(self): - import sys - def trace(frame, event, arg): - if event == 'return': - raise ValueError - else: - return trace - - def f(): return 1 - - for i in xrange(sys.getrecursionlimit() + 1): - sys.settrace(trace) - try: - f() - except ValueError: - pass - - def test_trace_try_finally(self): - import sys - l = [] - def trace(frame, event, arg): - if event == 'exception': - l.append(arg) - return trace - - def g(): - try: - raise Exception - finally: - pass - - def f(): - try: - g() - except: - pass - - sys.settrace(trace) - f() - sys.settrace(None) - assert len(l) == 2 - assert issubclass(l[0][0], Exception) - assert issubclass(l[1][0], Exception) - - def test_trace_raise_three_arg(self): - import sys - l = [] - def trace(frame, event, arg): - if event == 'exception': - l.append(arg) - return trace - - def g(): - try: - raise Exception - except Exception as e: - import sys - raise Exception, e, sys.exc_info()[2] - - def f(): - try: - g() - except: - pass - - sys.settrace(trace) - f() - sys.settrace(None) - assert len(l) == 2 - assert issubclass(l[0][0], Exception) - assert issubclass(l[1][0], Exception) - - def test_trace_generator_finalisation(self): - import sys - l = [] - got_exc = [] - def trace(frame, event, arg): - l.append((frame.f_lineno, event)) - if event == 'exception': - got_exc.append(arg) - return trace - - d = {} - exec """if 1: - def g(): - try: - yield True - finally: - pass - - def f(): - try: - gen = g() - gen.next() - gen.close() - except: - pass - """ in d - f = d['f'] - - sys.settrace(trace) - f() - sys.settrace(None) - assert len(got_exc) == 1 - assert issubclass(got_exc[0][0], GeneratorExit) - assert l == [(8, 'call'), - (9, 'line'), - (10, 'line'), - (11, 'line'), - (2, 'call'), - (3, 'line'), - (4, 'line'), - (4, 'return'), - (12, 'line'), - (4, 'call'), - (4, 'exception'), - (6, 'line'), - (6, 'return'), - (12, 'return')] - - def test_dont_trace_on_reraise(self): - import sys - l = [] - def ltrace(a,b,c): - if b == 'exception': - l.append(c) - return ltrace - def trace(a,b,c): return ltrace - def f(): - try: - 1/0 - except: - try: - raise - except: - pass - sys.settrace(trace) - f() - sys.settrace(None) - assert len(l) == 1 - assert issubclass(l[0][0], Exception) - - def test_dont_trace_on_raise_with_tb(self): - import sys - l = [] - def ltrace(a,b,c): - if b == 'exception': - l.append(c) - return ltrace - def trace(a,b,c): return ltrace - def f(): - try: - raise Exception - except: - return sys.exc_info() - def g(): - exc, val, tb = f() - try: - raise exc, val, tb - except: - pass - sys.settrace(trace) - g() - sys.settrace(None) - assert len(l) == 1 - assert isinstance(l[0][1], Exception) - - def test_trace_changes_locals(self): - import sys - def trace(frame, what, arg): - frame.f_locals['x'] = 42 - return trace - def f(x): - return x - sys.settrace(trace) - res = f(1) - sys.settrace(None) - assert res == 42 - def test_fast2locals_called_lazily(self): import sys class FrameHolder: @@ -522,110 +56,3 @@ assert res == 2 if hasattr(self, "check_no_w_locals"): # not appdirect assert self.check_no_w_locals(fh.frame) - - def test_set_unset_f_trace(self): - import sys - seen = [] - def trace1(frame, what, arg): - seen.append((1, frame, frame.f_lineno, what, arg)) - return trace1 - def trace2(frame, what, arg): - seen.append((2, frame, frame.f_lineno, what, arg)) - return trace2 - def set_the_trace(f): - f.f_trace = trace1 - sys.settrace(trace2) - len(seen) # take one line: should not be traced - f = sys._getframe() - set_the_trace(f) - len(seen) # take one line: should not be traced - len(seen) # take one line: should not be traced - sys.settrace(None) # and this line should be the last line traced - len(seen) # take one line - del f.f_trace - len(seen) # take one line - firstline = set_the_trace.func_code.co_firstlineno - assert seen == [(1, f, firstline + 6, 'line', None), - (1, f, firstline + 7, 'line', None), - (1, f, firstline + 8, 'line', None)] - - def test_locals2fast_freevar_bug(self): - import sys - def f(n): - class A(object): - def g(self): - return n - n = 42 - return A() - res = f(10).g() - assert res == 10 - # - def trace(*args): - return trace - sys.settrace(trace) - res = f(10).g() - sys.settrace(None) - assert res == 10 - - def test_throw_trace_bug(self): - import sys - def f(): - yield 5 - gen = f() - assert next(gen) == 5 - seen = [] - def trace_func(frame, event, *args): - seen.append(event) - return trace_func - sys.settrace(trace_func) - try: - gen.throw(ValueError) - except ValueError: - pass - sys.settrace(None) - assert seen == ['call', 'exception', 'return'] - - def test_generator_trace_stopiteration(self): - import sys - def f(): - yield 5 - gen = f() - assert next(gen) == 5 - seen = [] - def trace_func(frame, event, *args): - print('TRACE:', frame, event, args) - seen.append(event) - return trace_func - def g(): - for x in gen: - never_entered - sys.settrace(trace_func) - g() - sys.settrace(None) - print 'seen:', seen - # on Python 3 we get an extra 'exception' when 'for' catches - # StopIteration - assert seen == ['call', 'line', 'call', 'return', 'return'] - - def test_local_trace_function_returning_None_ignored(self): - # behave the same as CPython does, and in contradiction with - # the documentation. - def tracer(f, event, arg): - assert event == 'call' - return local_tracer - - seen = [] - def local_tracer(f, event, arg): - seen.append(event) - return None # but 'local_tracer' will be called again - - def function(): - a = 1 - a = 2 - a = 3 - - import sys - sys.settrace(tracer) - function() - sys.settrace(None) - assert seen == ["line", "line", "line", "return"] diff --git a/pypy/interpreter/unicodehelper.py b/pypy/interpreter/unicodehelper.py --- a/pypy/interpreter/unicodehelper.py +++ b/pypy/interpreter/unicodehelper.py @@ -6,7 +6,7 @@ from rpython.rlib import rutf8 from rpython.rlib.rarithmetic import r_uint, intmask from rpython.rtyper.lltypesystem import rffi -from pypy.module.unicodedata import unicodedb +from pypy.module.unicodedata.interp_ucd import unicodedb @specialize.memo() def decode_error_handler(space): @@ -233,7 +233,7 @@ slen = len(s) res = runicode.unicode_encode_mbcs(s, slen, errors, errorhandler) return res - + def str_decode_mbcs(s, errors, final, errorhandler): from rpython.rlib import runicode slen = len(s) diff --git a/pypy/module/__builtin__/__init__.py b/pypy/module/__builtin__/__init__.py --- a/pypy/module/__builtin__/__init__.py +++ b/pypy/module/__builtin__/__init__.py @@ -1,132 +0,0 @@ -from pypy.interpreter.error import OperationError -from pypy.interpreter import module -from pypy.interpreter.mixedmodule import MixedModule -import pypy.module.imp.importing - -# put builtins here that should be optimized somehow - -class Module(MixedModule): - """Built-in functions, exceptions, and other objects.""" - - appleveldefs = { - 'execfile' : 'app_io.execfile', - 'raw_input' : 'app_io.raw_input', - 'input' : 'app_io.input', - 'print' : 'app_io.print_', - - 'apply' : 'app_functional.apply', - 'sorted' : 'app_functional.sorted', - 'any' : 'app_functional.any', - 'all' : 'app_functional.all', - 'sum' : 'app_functional.sum', - 'map' : 'app_functional.map', - 'reduce' : 'app_functional.reduce', - 'filter' : 'app_functional.filter', - 'zip' : 'app_functional.zip', - 'vars' : 'app_inspect.vars', - 'dir' : 'app_inspect.dir', - - 'bin' : 'app_operation.bin', - - } - - interpleveldefs = { - # constants - '__debug__' : '(space.w_True)', - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - 'True' : '(space.w_True)', - 'bytes' : '(space.w_bytes)', - - 'file' : 'state.get(space).w_file', - 'open' : 'state.get(space).w_file', - - # default __metaclass__: old-style class - '__metaclass__' : 'interp_classobj.W_ClassObject', - - # interp-level function definitions - 'abs' : 'operation.abs', - 'chr' : 'operation.chr', - 'unichr' : 'operation.unichr', - 'len' : 'operation.len', - 'ord' : 'operation.ord', - 'pow' : 'operation.pow', - 'repr' : 'operation.repr', - 'hash' : 'operation.hash', - 'oct' : 'operation.oct', - 'hex' : 'operation.hex', - 'round' : 'operation.round', - 'cmp' : 'operation.cmp', - 'coerce' : 'operation.coerce', - 'divmod' : 'operation.divmod', - 'format' : 'operation.format', - '_issubtype' : 'operation._issubtype', - 'issubclass' : 'abstractinst.app_issubclass', - 'isinstance' : 'abstractinst.app_isinstance', - 'getattr' : 'operation.getattr', - 'setattr' : 'operation.setattr', - 'delattr' : 'operation.delattr', - 'hasattr' : 'operation.hasattr', - 'iter' : 'operation.iter', - 'next' : 'operation.next', - 'id' : 'operation.id', - 'intern' : 'operation.intern', - 'callable' : 'operation.callable', - - 'compile' : 'compiling.compile', - 'eval' : 'compiling.eval', - - '__import__' : 'pypy.module.imp.importing.importhook', - 'reload' : 'pypy.module.imp.importing.reload', - - 'range' : 'functional.range_int', - 'xrange' : 'functional.W_XRange', - 'enumerate' : 'functional.W_Enumerate', - 'min' : 'functional.min', - 'max' : 'functional.max', - 'reversed' : 'functional.reversed', - 'super' : 'descriptor.W_Super', - 'staticmethod' : 'pypy.interpreter.function.StaticMethod', - 'classmethod' : 'pypy.interpreter.function.ClassMethod', - 'property' : 'descriptor.W_Property', - - 'globals' : 'interp_inspect.globals', - 'locals' : 'interp_inspect.locals', - - } - - def pick_builtin(self, w_globals): - "Look up the builtin module to use from the __builtins__ global" - # pick the __builtins__ roughly in the same way CPython does it - # this is obscure and slow - space = self.space - try: - w_builtin = space.getitem(w_globals, space.newtext('__builtins__')) - except OperationError as e: - if not e.match(space, space.w_KeyError): - raise - else: - if w_builtin is space.builtin: # common case - return space.builtin - if space.isinstance_w(w_builtin, space.w_dict): - return module.Module(space, None, w_builtin) - if isinstance(w_builtin, module.Module): - return w_builtin - # no builtin! make a default one. Give them None, at least. - builtin = module.Module(space, None) - space.setitem(builtin.w_dict, space.newtext('None'), space.w_None) - return builtin - - def setup_after_space_initialization(self): - """NOT_RPYTHON""" - space = self.space - # install the more general version of isinstance() & co. in the space - from pypy.module.__builtin__ import abstractinst as ab - space.abstract_isinstance_w = ab.abstract_isinstance_w.__get__(space) - space.abstract_issubclass_w = ab.abstract_issubclass_w.__get__(space) - space.abstract_isclass_w = ab.abstract_isclass_w.__get__(space) - space.abstract_getclass = ab.abstract_getclass.__get__(space) - space.exception_is_valid_class_w = ab.exception_is_valid_class_w.__get__(space) - space.exception_is_valid_obj_as_class_w = ab.exception_is_valid_obj_as_class_w.__get__(space) - space.exception_getclass = ab.exception_getclass.__get__(space) - space.exception_issubclass_w = ab.exception_issubclass_w.__get__(space) diff --git a/pypy/module/__builtin__/__init__.py b/pypy/module/__builtin__/moduledef.py copy from pypy/module/__builtin__/__init__.py copy to pypy/module/__builtin__/moduledef.py diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py --- a/pypy/module/__pypy__/__init__.py +++ b/pypy/module/__pypy__/__init__.py @@ -1,163 +0,0 @@ -import sys - -from pypy.interpreter.mixedmodule import MixedModule -from pypy.module.imp.importing import get_pyc_magic -from rpython.rlib import rtime - - -class BuildersModule(MixedModule): - """ Module containing string and unicode builders """ - - appleveldefs = {} - - interpleveldefs = { - "StringBuilder": "interp_builders.W_StringBuilder", - "UnicodeBuilder": "interp_builders.W_UnicodeBuilder", - } - -class TimeModule(MixedModule): - appleveldefs = {} - interpleveldefs = {} - if rtime.HAS_CLOCK_GETTIME: - interpleveldefs["clock_gettime"] = "interp_time.clock_gettime" - interpleveldefs["clock_getres"] = "interp_time.clock_getres" - for name in rtime.ALL_DEFINED_CLOCKS: - interpleveldefs[name] = "space.wrap(%d)" % getattr(rtime, name) - - -class ThreadModule(MixedModule): - appleveldefs = { From pypy.commits at gmail.com Thu Aug 1 12:37:49 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 01 Aug 2019 09:37:49 -0700 (PDT) Subject: [pypy-commit] pypy py3tests: Close branch py3tests Message-ID: <5d43155d.1c69fb81.8d330.9cff@mx.google.com> Author: Ronan Lamy Branch: py3tests Changeset: r97045:ad37e2494ad9 Date: 2019-08-01 16:37 +0000 http://bitbucket.org/pypy/pypy/changeset/ad37e2494ad9/ Log: Close branch py3tests From pypy.commits at gmail.com Thu Aug 1 12:38:15 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 01 Aug 2019 09:38:15 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Merged in py3tests (pull request #660) Message-ID: <5d431577.1c69fb81.2471f.ff06@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97046:62f51af92fd2 Date: 2019-08-01 16:37 +0000 http://bitbucket.org/pypy/pypy/changeset/62f51af92fd2/ Log: Merged in py3tests (pull request #660) New mechanism for app-level testing (py3 edition) diff too long, truncating to 2000 out of 8781 lines diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -223,6 +223,10 @@ BoolOption("newshortcut", "cache and shortcut calling __new__ from builtin types", default=False), + BoolOption("reinterpretasserts", + "Perform reinterpretation when an assert fails " + "(only relevant for tests)", + default=False), ]), ]) diff --git a/pypy/conftest.py b/pypy/conftest.py --- a/pypy/conftest.py +++ b/pypy/conftest.py @@ -9,6 +9,8 @@ PYTHON3 = os.getenv('PYTHON3') or py.path.local.sysfind(LOOK_FOR_PYTHON3) if PYTHON3 is not None: PYTHON3 = str(PYTHON3) +HOST_IS_PY3 = sys.version_info[0] > 2 +APPLEVEL_FN = 'apptest_*.py' # pytest settings rsyncdirs = ['.', '../lib-python', '../lib_pypy', '../demo'] @@ -45,35 +47,48 @@ def pytest_report_header(): return "pytest-%s from %s" % (pytest.__version__, pytest.__file__) -def pytest_addhooks(pluginmanager): - if sys.version_info < (3,): - from rpython.conftest import LeakFinder - pluginmanager.register(LeakFinder()) + at pytest.hookimpl(tryfirst=True) +def pytest_cmdline_preparse(config, args): + if not (set(args) & {'-D', '--direct-apptest'}): + args.append('--assert=reinterp') def pytest_configure(config): + if HOST_IS_PY3 and not config.getoption('direct_apptest'): + raise ValueError( + "On top of a Python 3 interpreter, the -D flag is mandatory") global option option = config.option + mode_A = config.getoption('runappdirect') + mode_D = config.getoption('direct_apptest') def py3k_skip(message): py.test.skip('[py3k] %s' % message) py.test.py3k_skip = py3k_skip + if mode_D or not mode_A: + config.addinivalue_line('python_files', APPLEVEL_FN) + if not mode_A and not mode_D: # 'own' tests + from rpython.conftest import LeakFinder + config.pluginmanager.register(LeakFinder()) def pytest_addoption(parser): - from rpython.conftest import pytest_addoption - pytest_addoption(parser) - group = parser.getgroup("pypy options") group.addoption('-A', '--runappdirect', action="store_true", default=False, dest="runappdirect", - help="run applevel tests directly on the python interpreter " + + help="run legacy applevel tests directly on the python interpreter " + "specified by --python") group.addoption('--python', type="string", default=PYTHON3, help="python interpreter to run appdirect tests with") + group.addoption('-D', '--direct-apptest', action="store_true", + default=False, dest="direct_apptest", + help="run applevel_XXX.py tests directly on host interpreter") group.addoption('--direct', action="store_true", default=False, dest="rundirect", help="run pexpect tests directly") group.addoption('--raise-operr', action="store_true", default=False, dest="raise_operr", help="Show the interp-level OperationError in app-level tests") + group.addoption('--applevel-rewrite', action="store_true", + default=False, dest="applevel_rewrite", + help="Use assert rewriting in app-level test files (slow)") @pytest.fixture(scope='function') def space(request): @@ -107,14 +122,21 @@ ensure_pytest_builtin_helpers() def pytest_pycollect_makemodule(path, parent): - return PyPyModule(path, parent) + if path.fnmatch(APPLEVEL_FN): + if parent.config.getoption('direct_apptest'): + return + from pypy.tool.pytest.apptest2 import AppTestModule + rewrite = parent.config.getoption('applevel_rewrite') + return AppTestModule(path, parent, rewrite_asserts=rewrite) + else: + return PyPyModule(path, parent) def is_applevel(item): from pypy.tool.pytest.apptest import AppTestFunction return isinstance(item, AppTestFunction) def pytest_collection_modifyitems(config, items): - if config.option.runappdirect: + if config.getoption('runappdirect') or config.getoption('direct_apptest'): return for item in items: if isinstance(item, py.test.Function): @@ -123,17 +145,17 @@ else: item.add_marker('interplevel') -class PyPyModule(py.test.collect.Module): + +class PyPyModule(pytest.Module): """ we take care of collecting classes both at app level and at interp-level (because we need to stick a space at the class) ourselves. """ def accept_regular_test(self): if self.config.option.runappdirect: - # only collect regular tests if we are in an 'app_test' directory, - # or in test_lib_pypy + # only collect regular tests if we are in test_lib_pypy for name in self.listnames(): - if "app_test" in name or "test_lib_pypy" in name: + if "test_lib_pypy" in name: return True return False return True @@ -205,6 +227,8 @@ appclass.obj.space = LazyObjSpaceGetter() appclass.obj.runappdirect = option.runappdirect - -def pytest_ignore_collect(path): +def pytest_ignore_collect(path, config): + if (config.getoption('direct_apptest') and not path.isdir() + and not path.fnmatch(APPLEVEL_FN)): + return True return path.check(link=1) diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -342,10 +342,10 @@ translate.log_config(config.objspace, "PyPy config object") # obscure hack to stuff the translation options into the translated PyPy - import pypy.module.sys + from pypy.module.sys.moduledef import Module as SysModule options = make_dict(config) - wrapstr = 'space.wrap(%r)' % (options) # import time - pypy.module.sys.Module.interpleveldefs['pypy_translation_info'] = wrapstr + wrapstr = 'space.wrap(%r)' % (options) # import time + SysModule.interpleveldefs['pypy_translation_info'] = wrapstr if config.objspace.usemodules._cffi_backend: self.hack_for_cffi_modules(driver) diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -493,13 +493,14 @@ return self.__class__.__name__ @not_rpython - def setbuiltinmodule(self, importname): + def setbuiltinmodule(self, pkgname): """load a lazy pypy/module and put it into sys.modules""" - if '.' in importname: - fullname = importname - importname = fullname.rsplit('.', 1)[1] + if '.' in pkgname: + fullname = "%s.moduledef" % (pkgname,) + importname = pkgname.rsplit('.', 1)[1] else: - fullname = "pypy.module.%s" % importname + fullname = "pypy.module.%s.moduledef" % pkgname + importname = pkgname Module = __import__(fullname, None, None, ["Module"]).Module @@ -578,22 +579,22 @@ def make_builtins(self): "only for initializing the space." - from pypy.module.exceptions import Module + from pypy.module.exceptions.moduledef import Module w_name = self.newtext('__exceptions__') self.exceptions_module = Module(self, w_name) self.exceptions_module.install() - from pypy.module.imp import Module + from pypy.module.imp.moduledef import Module w_name = self.newtext('_imp') mod = Module(self, w_name) mod.install() - from pypy.module.sys import Module + from pypy.module.sys.moduledef import Module w_name = self.newtext('sys') self.sys = Module(self, w_name) self.sys.install() - from pypy.module.__builtin__ import Module + from pypy.module.__builtin__.moduledef import Module w_name = self.newtext('builtins') self.builtin = Module(self, w_name) w_builtin = self.builtin diff --git a/pypy/interpreter/main.py b/pypy/interpreter/main.py --- a/pypy/interpreter/main.py +++ b/pypy/interpreter/main.py @@ -34,7 +34,7 @@ try: if space is None: - from pypy.objspace.std import StdObjSpace + from pypy.objspace.std.objspace import StdObjSpace space = StdObjSpace() pycode = compilecode(space, source, filename or '', cmd) @@ -82,7 +82,7 @@ implementation. """ if space is None: - from pypy.objspace.std import StdObjSpace + from pypy.objspace.std.objspace import StdObjSpace space = StdObjSpace() argv = [module_name] if args is not None: diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -30,17 +30,21 @@ @not_rpython def install(self): - """install this module, and it's submodules into + """Install this module, and its submodules into space.builtin_modules""" Module.install(self) if hasattr(self, "submodules"): space = self.space - name = space.text_w(self.w_name) + pkgname = space.text_w(self.w_name) for sub_name, module_cls in self.submodules.iteritems(): if module_cls.submodule_name is None: module_cls.submodule_name = sub_name - module_name = space.newtext("%s.%s" % (name, sub_name)) - m = module_cls(space, module_name) + else: + assert module_cls.submodule_name == sub_name + name = "%s.%s" % (pkgname, sub_name) + module_cls.applevel_name = name + w_name = space.newtext(name) + m = module_cls(space, w_name) m.install() self.submodules_w.append(m) @@ -75,8 +79,7 @@ if cls.applevel_name is not None: return cls.applevel_name else: - pkgroot = cls.__module__ - return pkgroot.split('.')[-1] + return cls.__module__.split('.')[-2] def get(self, name): space = self.space @@ -173,10 +176,8 @@ # build a constant dictionary out of # applevel/interplevel definitions cls.loaders = loaders = {} - pkgroot = cls.__module__ + pkgroot = cls.__module__.rsplit('.', 1)[0] appname = cls.get_applevel_name() - if cls.submodule_name is not None: - appname += '.%s' % (cls.submodule_name,) for name, spec in cls.interpleveldefs.items(): loaders[name] = getinterpevalloader(pkgroot, spec) for name, spec in cls.appleveldefs.items(): @@ -187,7 +188,7 @@ def extra_interpdef(self, name, spec): cls = self.__class__ - pkgroot = cls.__module__ + pkgroot = cls.__module__.rsplit('.', 1)[0] loader = getinterpevalloader(pkgroot, spec) space = self.space w_obj = loader(space) diff --git a/pypy/interpreter/test/apptest_coroutine.py b/pypy/interpreter/test/apptest_coroutine.py new file mode 100644 --- /dev/null +++ b/pypy/interpreter/test/apptest_coroutine.py @@ -0,0 +1,704 @@ +import pytest +from pytest import raises + + +def test_cannot_iterate(): + async def f(x): + pass + pytest.raises(TypeError, "for i in f(5): pass") + pytest.raises(TypeError, iter, f(5)) + pytest.raises(TypeError, next, f(5)) + + +def test_async_for(): + class X: + def __aiter__(self): + return MyAIter() + class MyAIter: + async def __anext__(self): + return 42 + async def f(x): + sum = 0 + async for a in x: + sum += a + if sum > 100: + break + return sum + cr = f(X()) + try: + cr.send(None) + except StopIteration as e: + assert e.value == 42 * 3 + else: + assert False, "should have raised" + + +def test_StopAsyncIteration(): + class X: + def __aiter__(self): + return MyAIter() + class MyAIter: + count = 0 + async def __anext__(self): + if self.count == 3: + raise StopAsyncIteration + self.count += 1 + return 42 + async def f(x): + sum = 0 + async for a in x: + sum += a + return sum + cr = f(X()) + try: + cr.send(None) + except StopIteration as e: + assert e.value == 42 * 3 + else: + assert False, "should have raised" + + +def test_async_for_old_style(): + class X: + def __aiter__(self): + return MyAIter() + class MyAIter: + def __await__(self): + return iter([20, 30]) + async def f(x): + sum = 0 + async for a in x: + sum += a + if sum > 100: + break + return sum + cr = f(X()) + assert next(cr.__await__()) == 20 + + +def test_for_error_cause(): + class F: + def __aiter__(self): + return self + def __anext__(self): + return self + def __await__(self): + 1 / 0 + + async def main(): + async for _ in F(): + pass + + c = pytest.raises(TypeError, main().send, None) + assert 'an invalid object from __anext__' in c.value.args[0], c.value + assert isinstance(c.value.__cause__, ZeroDivisionError) + +def test_set_coroutine_wrapper(): + import sys + async def f(): + pass + seen = [] + def my_wrapper(cr): + seen.append(cr) + return 42 + assert sys.get_coroutine_wrapper() is None + sys.set_coroutine_wrapper(my_wrapper) + assert sys.get_coroutine_wrapper() is my_wrapper + cr = f() + assert cr == 42 + sys.set_coroutine_wrapper(None) + assert sys.get_coroutine_wrapper() is None + + +def test_async_with(): + seen = [] + class X: + async def __aenter__(self): + seen.append('aenter') + async def __aexit__(self, *args): + seen.append('aexit') + async def f(x): + async with x: + return 42 + c = f(X()) + try: + c.send(None) + except StopIteration as e: + assert e.value == 42 + else: + assert False, "should have raised" + assert seen == ['aenter', 'aexit'] + +def test_async_with_exit_True(): + seen = [] + class X: + async def __aenter__(self): + seen.append('aenter') + async def __aexit__(self, *args): + seen.append('aexit') + return True + async def f(x): + async with x: + return 42 + c = f(X()) + try: + c.send(None) + except StopIteration as e: + assert e.value == 42 + else: + assert False, "should have raised" + assert seen == ['aenter', 'aexit'] + +def test_await(): + class X: + def __await__(self): + i1 = yield 40 + assert i1 == 82 + i2 = yield 41 + assert i2 == 93 + async def f(): + await X() + await X() + c = f() + assert c.send(None) == 40 + assert c.send(82) == 41 + assert c.send(93) == 40 + assert c.send(82) == 41 + pytest.raises(StopIteration, c.send, 93) + + +def test_await_error(): + async def f(): + await [42] + c = f() + try: + c.send(None) + except TypeError as e: + assert str(e) == "object list can't be used in 'await' expression" + else: + assert False, "should have raised" + + +def test_async_with_exception_context(): + class CM: + async def __aenter__(self): + pass + async def __aexit__(self, *e): + 1/0 + async def f(): + async with CM(): + raise ValueError + c = f() + try: + c.send(None) + except ZeroDivisionError as e: + assert e.__context__ is not None + assert isinstance(e.__context__, ValueError) + else: + assert False, "should have raised" + + +def test_runtime_warning(): + import gc, warnings # XXX: importing warnings is expensive untranslated + async def foobaz(): + pass + with warnings.catch_warnings(record=True) as l: + foobaz() + gc.collect() + gc.collect() + gc.collect() + + assert len(l) == 1, repr(l) + w = l[0].message + assert isinstance(w, RuntimeWarning) + assert str(w).startswith("coroutine ") + assert str(w).endswith("foobaz' was never awaited") + + +def test_async_for_with_tuple_subclass(): + class Done(Exception): pass + + class AIter(tuple): + i = 0 + def __aiter__(self): + return self + async def __anext__(self): + if self.i >= len(self): + raise StopAsyncIteration + self.i += 1 + return self[self.i - 1] + + result = [] + async def foo(): + async for i in AIter([42]): + result.append(i) + raise Done + + try: + foo().send(None) + except Done: + pass + assert result == [42] + +def test_async_yield(): + class Done(Exception): pass + + async def mygen(): + yield 5 + + result = [] + async def foo(): + async for i in mygen(): + result.append(i) + raise Done + + try: + foo().send(None) + except Done: + pass + assert result == [5] + +def test_async_yield_already_finished(): + class Done(Exception): pass + + async def mygen(): + yield 5 + + result = [] + async def foo(): + g = mygen() + async for i in g: + result.append(i) + async for i in g: + assert False # should not be reached + raise Done + + try: + foo().send(None) + except Done: + pass + assert result == [5] + +def test_async_yield_with_await(): + class Done(Exception): pass + + class X: + def __await__(self): + i1 = yield 40 + assert i1 == 82 + i2 = yield 41 + assert i2 == 93 + + async def mygen(): + yield 5 + await X() + yield 6 + + result = [] + async def foo(): + async for i in mygen(): + result.append(i) + raise Done + + co = foo() + x = co.send(None) + assert x == 40 + assert result == [5] + x = co.send(82) + assert x == 41 + assert result == [5] + raises(Done, co.send, 93) + assert result == [5, 6] + +def test_async_yield_with_explicit_send(): + class X: + def __await__(self): + i1 = yield 40 + assert i1 == 82 + i2 = yield 41 + assert i2 == 93 + + async def mygen(): + x = yield 5 + assert x == 2189 + await X() + y = yield 6 + assert y == 319 + + result = [] + async def foo(): + gen = mygen() + result.append(await gen.asend(None)) + result.append(await gen.asend(2189)) + try: + await gen.asend(319) + except StopAsyncIteration: + return 42 + else: + raise AssertionError + + co = foo() + x = co.send(None) + assert x == 40 + assert result == [5] + x = co.send(82) + assert x == 41 + assert result == [5] + e = raises(StopIteration, co.send, 93) + assert e.value.args == (42,) + assert result == [5, 6] + +def test_async_yield_explicit_asend_and_next(): + async def mygen(y): + assert y == 4983 + x = yield 5 + assert x == 2189 + yield "ok" + + g = mygen(4983) + raises(TypeError, g.asend(42).__next__) + e = raises(StopIteration, g.asend(None).__next__) + assert e.value.args == (5,) + e = raises(StopIteration, g.asend(2189).__next__) + assert e.value.args == ("ok",) + +def test_async_yield_explicit_asend_and_send(): + async def mygen(y): + assert y == 4983 + x = yield 5 + assert x == 2189 + yield "ok" + + g = mygen(4983) + e = raises(TypeError, g.asend(None).send, 42) + assert str(e.value) == ("can't send non-None value to a just-started " + "async generator") + e = raises(StopIteration, g.asend(None).send, None) + assert e.value.args == (5,) + e = raises(StopIteration, g.asend("IGNORED").send, 2189) # xxx + assert e.value.args == ("ok",) + +def test_async_yield_explicit_asend_used_several_times(): + class X: + def __await__(self): + r = yield -2 + assert r == "cont1" + r = yield -3 + assert r == "cont2" + return -4 + async def mygen(y): + x = await X() + assert x == -4 + r = yield -5 + assert r == "foo" + r = yield -6 + assert r == "bar" + + g = mygen(4983) + gs = g.asend(None) + r = gs.send(None) + assert r == -2 + r = gs.send("cont1") + assert r == -3 + e = raises(StopIteration, gs.send, "cont2") + assert e.value.args == (-5,) + e = raises(StopIteration, gs.send, None) + assert e.value.args == () + e = raises(StopIteration, gs.send, None) + assert e.value.args == () + # + gs = g.asend("foo") + e = raises(StopIteration, gs.send, None) + assert e.value.args == (-6,) + e = raises(StopIteration, gs.send, "bar") + assert e.value.args == () + +def test_async_yield_asend_notnone_throw(): + async def f(): + yield 123 + + raises(ValueError, f().asend(42).throw, ValueError) + +def test_async_yield_asend_none_throw(): + async def f(): + yield 123 + + raises(ValueError, f().asend(None).throw, ValueError) + +def test_async_yield_athrow_send_none(): + async def ag(): + yield 42 + + raises(ValueError, ag().athrow(ValueError).send, None) + +def test_async_yield_athrow_send_notnone(): + async def ag(): + yield 42 + + ex = raises(RuntimeError, ag().athrow(ValueError).send, 42) + expected = ("can't send non-None value to a just-started coroutine", ) + assert ex.value.args == expected + +def test_async_yield_athrow_send_after_exception(): + async def ag(): + yield 42 + + athrow_coro = ag().athrow(ValueError) + raises(ValueError, athrow_coro.send, None) + raises(StopIteration, athrow_coro.send, None) + +def test_async_yield_athrow_throw(): + async def ag(): + yield 42 + + raises(RuntimeError, ag().athrow(ValueError).throw, LookupError) + # CPython's message makes little sense; PyPy's message is different + +def test_async_yield_athrow_while_running(): + values = [] + async def ag(): + try: + received = yield 1 + except ValueError: + values.append(42) + return + yield 2 + + + async def run(): + running = ag() + x = await running.asend(None) + assert x == 1 + try: + await running.athrow(ValueError) + except StopAsyncIteration: + pass + + + try: + run().send(None) + except StopIteration: + assert values == [42] + +def test_async_aclose(): + raises_generator_exit = False + async def ag(): + nonlocal raises_generator_exit + try: + yield + except GeneratorExit: + raises_generator_exit = True + raise + + async def run(): + a = ag() + async for i in a: + break + await a.aclose() + try: + run().send(None) + except StopIteration: + pass + assert raises_generator_exit + +def test_async_aclose_ignore_generator_exit(): + async def ag(): + try: + yield + except GeneratorExit: + yield + + async def run(): + a = ag() + async for i in a: + break + await a.aclose() + raises(RuntimeError, run().send, None) + +def test_async_aclose_await_in_finally(): + import types + + @types.coroutine + def coro(): + yield 'coro' + + state = 0 + async def ag(): + nonlocal state + try: + yield + finally: + state = 1 + await coro() + state = 2 + + async def run(): + a = ag() + async for i in a: + break + await a.aclose() + a = run() + assert state == 0 + assert a.send(None) == 'coro' + assert state == 1 + try: + a.send(None) + except StopIteration: + pass + assert state == 2 + +def test_async_aclose_await_in_finally_with_exception(): + import types + + @types.coroutine + def coro(): + yield 'coro' + + state = 0 + async def ag(): + nonlocal state + try: + yield + finally: + state = 1 + try: + await coro() + except Exception as exc: + state = exc + + async def run(): + a = ag() + async for i in a: + break + await a.aclose() + a = run() + assert state == 0 + assert a.send(None) == 'coro' + assert state == 1 + exc = RuntimeError() + try: + a.throw(exc) + except StopIteration: + pass + assert state == exc + +def test_async_aclose_in_finalize_hook_await_in_finally(): + import gc + import sys + import types + + @types.coroutine + def coro(): + yield 'coro' + + state = 0 + async def ag(): + nonlocal state + try: + yield + finally: + state = 1 + await coro() + state = 2 + + async def run(): + a = ag() + async for i in a: + break + del a + gc.collect() + gc.collect() + gc.collect() + a = run() + + a2 = None + assert sys.get_asyncgen_hooks() == (None, None) + def _finalize(g): + nonlocal a2 + a2 = g.aclose() + sys.set_asyncgen_hooks(finalizer=_finalize) + assert state == 0 + try: + a.send(None) + except StopIteration: + pass + assert a2.send(None) == 'coro' + assert state == 1 + try: + a2.send(None) + except StopIteration: + pass + assert state == 2 + sys.set_asyncgen_hooks(None, None) + +def test_async_anext_close(): + async def ag(): + yield 42 + + an = ag().__anext__() + an.close() + try: + next(an) + except StopIteration as e: + assert e.value is None + else: + assert False, "didn't raise" + +def run_async(coro): + buffer = [] + result = None + while True: + try: + buffer.append(coro.send(None)) + except StopIteration as ex: + result = ex.args[0] if ex.args else None + break + return buffer, result + +def test_async_generator(): + async def f(i): + return i + + async def run_list(): + return [await c for c in [f(1), f(41)]] + + assert run_async(run_list()) == ([], [1, 41]) + +def test_async_genexpr(): + async def f(it): + for i in it: + yield i + + async def run_gen(): + gen = (i + 1 async for i in f([10, 20])) + return [g + 100 async for g in gen] + + assert run_async(run_gen()) == ([], [111, 121]) + +def test_anext_tuple(): + async def foo(): + try: + yield (1,) + except ZeroDivisionError: + yield (2,) + + async def run(): + it = foo().__aiter__() + return await it.__anext__() + + assert run_async(run()) == ([], (1,)) + +def test_asyncgen_yield_stopiteration(): + async def foo(): + yield 1 + yield StopIteration(2) + + async def run(): + it = foo().__aiter__() + val1 = await it.__anext__() + assert val1 == 1 + val2 = await it.__anext__() + assert isinstance(val2, StopIteration) + assert val2.value == 2 + + run_async(run()) diff --git a/pypy/interpreter/test/apptest_pyframe.py b/pypy/interpreter/test/apptest_pyframe.py new file mode 100644 --- /dev/null +++ b/pypy/interpreter/test/apptest_pyframe.py @@ -0,0 +1,787 @@ +import pytest + + at pytest.fixture +def tempfile(tmpdir): + return str(tmpdir / 'tempfile1') + +def test_f_locals(): + import sys + f = sys._getframe() + assert f.f_locals is locals() + +def test_f_globals(): + import sys + f = sys._getframe() + assert f.f_globals is globals() + pytest.raises(AttributeError, "f.f_globals = globals()") + +def test_f_builtins(): + import sys, builtins + f = sys._getframe() + assert f.f_builtins is builtins.__dict__ + +def test_f_code(): + def g(): + import sys + f = sys._getframe() + return f.f_code + assert g() is g.__code__ + +def test_f_trace_del(): + import sys + f = sys._getframe() + del f.f_trace + assert f.f_trace is None + +def test_f_lineno(): + def g(): + import sys + f = sys._getframe() + x = f.f_lineno + y = f.f_lineno + z = f.f_lineno + return [x, y, z] + origin = g.__code__.co_firstlineno + assert g() == [origin+3, origin+4, origin+5] + +def test_f_lineno_huge_jump(): + code = """def g(): + import sys + f = sys._getframe() + x = f.f_lineno + %s + y = f.f_lineno + %s + z = f.f_lineno + return [x, y, z]""" % ("\n" * 127, "\n" * 1000) + d = {} + exec(code, d) + g = d['g'] + origin = g.__code__.co_firstlineno + print(repr(g.__code__.co_lnotab)) + assert g() == [origin+3, origin+5+127, origin+7+127+1000] + +def test_f_lineno_set(tempfile): + def tracer(f, *args): + def y(f, *args): + return y + def x(f, *args): + f.f_lineno += 1 + return y # "return None" should have the same effect, but see + # test_local_trace_function_returning_None_ignored + return x + + # obscure: call open beforehand, py3k's open invokes some app + # level code that confuses our tracing (likely due to the + # testing env, otherwise it's not a problem) + f = open(tempfile, 'w') + def function(f=f): + xyz + with f as f: + pass + return 3 + + import sys + sys.settrace(tracer) + function() + sys.settrace(None) + # assert did not crash + +def test_f_lineno_set_2(tempfile): + counter = [0] + errors = [] + + def tracer(f, event, *args): + if event == 'line': + counter[0] += 1 + if counter[0] == 2: + try: + f.f_lineno += 2 + except ValueError as e: + errors.append(e) + return tracer + + # obscure: call open beforehand, py3k's open invokes some app + # level code that confuses our tracing (likely due to the + # testing env, otherwise it's not a problem) + f = open(tempfile, 'w') + def function(): + try: + raise ValueError + except ValueError: + x = 42 + return x + + import sys + sys.settrace(tracer) + x = function() + sys.settrace(None) + assert x == 42 + assert len(errors) == 1 + assert str(errors[0]).startswith( + "can't jump into or out of an 'expect' or 'finally' block") + +def test_f_lineno_set_3(): + def jump_in_nested_finally(output): + try: + output.append(2) + finally: + output.append(4) + try: + output.append(6) + finally: + output.append(8) + output.append(9) + output = [] + + def tracer(f, event, *args): + if event == 'line' and len(output) == 1: + f.f_lineno += 5 + return tracer + + import sys + sys.settrace(tracer) + jump_in_nested_finally(output) + sys.settrace(None) + assert output == [2, 9] + +def test_f_lineno_set_firstline(): + seen = [] + def tracer(f, event, *args): + if f.f_code.co_name == "decode": + return tracer + seen.append((event, f.f_lineno)) + if len(seen) == 5: + f.f_lineno = 1 # bug shown only when setting lineno to 1 + return tracer + + def g(): + import sys + source = "x=1\ny=x+1\nz=y+1\nt=z+1\ns=t+1\n" + # compile first to ensure that no spurious events appear in the trace + code = compile(source, '', 'exec') + sys.settrace(tracer) + exec(code, {}) + sys.settrace(None) + + g() + assert seen == [('call', 1), + ('line', 1), + ('line', 2), + ('line', 3), + ('line', 4), + ('line', 2), + ('line', 3), + ('line', 4), + ('line', 5), + ('return', 5)] + +def test_f_back(): + import sys + def f(): + assert sys._getframe().f_code.co_name == g() + def g(): + return sys._getframe().f_back.f_code.co_name + f() + +def test_f_back_virtualref(): + import sys + def f(): + return g() + def g(): + return sys._getframe() + frame = f() + assert frame.f_back.f_code.co_name == 'f' + +def test_virtualref_through_traceback(): + import sys + def g(): + try: + raise ValueError + except: + _, _, tb = sys.exc_info() + return tb + def f(): + return g() + # + tb = f() + assert tb.tb_frame.f_code.co_name == 'g' + assert tb.tb_frame.f_back.f_code.co_name == 'f' + +def test_trace_basic(): + import sys + l = [] + class Tracer: + def __init__(self, i): + self.i = i + def trace(self, frame, event, arg): + l.append((self.i, frame.f_code.co_name, event, arg)) + if frame.f_code.co_name == 'g2': + return None # don't trace g2 + return Tracer(self.i+1).trace + def g3(n): + n -= 5 + return n + def g2(n): + n += g3(2) + n += g3(7) + return n + def g(n): + n += g2(3) + return n + def f(n): + n = g(n) + return n * 7 + sys.settrace(Tracer(0).trace) + x = f(4) + sys.settrace(None) + assert x == 42 + print(l) + assert l == [(0, 'f', 'call', None), + (1, 'f', 'line', None), + (0, 'g', 'call', None), + (1, 'g', 'line', None), + (0, 'g2', 'call', None), + (0, 'g3', 'call', None), + (1, 'g3', 'line', None), + (2, 'g3', 'line', None), + (3, 'g3', 'return', -3), + (0, 'g3', 'call', None), + (1, 'g3', 'line', None), + (2, 'g3', 'line', None), + (3, 'g3', 'return', 2), + (2, 'g', 'line', None), + (3, 'g', 'return', 6), + (2, 'f', 'line', None), + (3, 'f', 'return', 42)] + +def test_trace_exc(): + import sys + l = [] + def ltrace(a,b,c): + if b == 'exception': + l.append(c) + return ltrace + def trace(a,b,c): return ltrace + def f(): + try: + raise Exception + except: + pass + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 1 + assert isinstance(l[0][1], Exception) + +def test_trace_ignore_hidden(): + import sys + import _testing + + l = [] + def trace(a,b,c): + if a.f_code.co_name != "decode": + l.append((a,b,c)) + + def f(): + h = _testing.Hidden() + r = h.meth() + return r + + sys.settrace(trace) + res = f() + sys.settrace(None) + assert len(l) == 1 + assert l[0][1] == 'call' + assert res == 'hidden' # sanity + +def test_trace_hidden_applevel_builtins(): + import sys + + l = [] + def trace(a,b,c): + l.append((a,b,c)) + return trace + + def f(): + sum([]) + sum([]) + sum([]) + return "that's the return value" + + sys.settrace(trace) + f() + sys.settrace(None) + # should get 1 "call", 3 "line" and 1 "return" events, and no call + # or return for the internal app-level implementation of sum + assert len(l) == 6 + assert [what for (frame, what, arg) in l] == [ + 'call', 'line', 'line', 'line', 'line', 'return'] + assert l[-1][2] == "that's the return value" + +def test_trace_return_exc(): + import sys + l = [] + def trace(a,b,c): + if b in ('exception', 'return'): + l.append((b, c)) + return trace + + def g(): + raise Exception + def f(): + try: + g() + except: + pass + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 4 + assert l[0][0] == 'exception' + assert isinstance(l[0][1][1], Exception) + assert l[1] == ('return', None) + assert l[2][0] == 'exception' + assert isinstance(l[2][1][1], Exception) + assert l[3] == ('return', None) + +def test_trace_raises_on_return(): + import sys + def trace(frame, event, arg): + if event == 'return': + raise ValueError + else: + return trace + + def f(): return 1 + + for i in range(sys.getrecursionlimit() + 1): + sys.settrace(trace) + try: + f() + except ValueError: + pass + +def test_trace_try_finally(): + import sys + l = [] + def trace(frame, event, arg): + if event == 'exception': + l.append(arg) + return trace + + def g(): + try: + raise Exception + finally: + pass + + def f(): + try: + g() + except: + pass + + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 2 + assert issubclass(l[0][0], Exception) + assert issubclass(l[1][0], Exception) + +def test_trace_generator_finalisation(): + import sys + l = [] + got_exc = [] + def trace(frame, event, arg): + l.append((frame.f_lineno, event)) + if event == 'exception': + got_exc.append(arg) + return trace + + d = {} + exec("""if 1: + def g(): + try: + yield True + finally: + pass + + def f(): + try: + gen = g() + next(gen) + gen.close() + except: + pass + """, d) + f = d['f'] + + sys.settrace(trace) + f() + sys.settrace(None) + assert len(got_exc) == 1 + assert issubclass(got_exc[0][0], GeneratorExit) + assert l == [(8, 'call'), + (9, 'line'), + (10, 'line'), + (11, 'line'), + (2, 'call'), + (3, 'line'), + (4, 'line'), + (4, 'return'), + (12, 'line'), + (4, 'call'), + (4, 'exception'), + (6, 'line'), + (6, 'return'), + (12, 'return')] + +def test_dont_trace_on_reraise(): + import sys + l = [] + def ltrace(a,b,c): + if b == 'exception': + l.append(c) + return ltrace + def trace(a,b,c): return ltrace + def f(): + try: + 1/0 + except: + try: + raise + except: + pass + sys.settrace(trace) + f() + sys.settrace(None) + assert len(l) == 1 + assert issubclass(l[0][0], Exception) + +def test_trace_changes_locals(): + import sys + def trace(frame, what, arg): + frame.f_locals['x'] = 42 + return trace + def f(x): + return x + sys.settrace(trace) + res = f(1) + sys.settrace(None) + assert res == 42 + +def test_trace_onliner_if(): + import sys + l = [] + def trace(frame, event, arg): + l.append((frame.f_lineno, event)) + return trace + def onliners(): + if True: False + else: True + return 0 + sys.settrace(trace) + onliners() + sys.settrace(None) + firstlineno = onliners.__code__.co_firstlineno + assert l == [(firstlineno + 0, 'call'), + (firstlineno + 1, 'line'), + (firstlineno + 3, 'line'), + (firstlineno + 3, 'return')] + +def test_set_unset_f_trace(): + import sys + seen = [] + def trace1(frame, what, arg): + seen.append((1, frame, frame.f_lineno, what, arg)) + return trace1 + def trace2(frame, what, arg): + seen.append((2, frame, frame.f_lineno, what, arg)) + return trace2 + def set_the_trace(f): + f.f_trace = trace1 + sys.settrace(trace2) + len(seen) # take one line: should not be traced + f = sys._getframe() + set_the_trace(f) + len(seen) # take one line: should not be traced + len(seen) # take one line: should not be traced + sys.settrace(None) # and this line should be the last line traced + len(seen) # take one line + del f.f_trace + len(seen) # take one line + firstline = set_the_trace.__code__.co_firstlineno + assert seen == [(1, f, firstline + 6, 'line', None), + (1, f, firstline + 7, 'line', None), + (1, f, firstline + 8, 'line', None)] + +def test_locals2fast_freevar_bug(): + import sys + def f(n): + class A(object): + def g(self): + return n + n = 42 + return A() + res = f(10).g() + assert res == 10 + # + def trace(*args): + return trace + sys.settrace(trace) + res = f(10).g() + sys.settrace(None) + assert res == 10 + +def test_preserve_exc_state_in_generators(): + import sys + def yield_raise(): + try: + raise KeyError("caught") + except KeyError: + yield sys.exc_info()[0] + yield sys.exc_info()[0] + + it = yield_raise() + assert next(it) is KeyError + assert next(it) is KeyError + +def test_frame_clear(): + import sys, gc, weakref + # + raises(RuntimeError, sys._getframe().clear) + def g(): + yield 5 + raises(RuntimeError, sys._getframe().clear) + yield 6 + assert list(g()) == [5, 6] + # + class A: + pass + a1 = A(); a1ref = weakref.ref(a1) + a2 = A(); a2ref = weakref.ref(a2) + seen = [] + def f(): + local_a1 = a1 + for loc in [5, 6, a2]: + try: + yield sys._getframe() + finally: + seen.append(42) + seen.append(43) + gen = f() + frame = next(gen) + a1 = a2 = None + gc.collect(); gc.collect() + assert a1ref() is not None + assert a2ref() is not None + assert seen == [] + frame.clear() + assert seen == [42] + gc.collect(); gc.collect() + assert a1ref() is None, "locals not cleared" + assert a2ref() is None, "stack not cleared" + # + raises(StopIteration, next, gen) + +def test_frame_clear_really(): + import sys + def f(x): + return sys._getframe() + frame = f(42) + assert frame.f_locals['x'] == 42 + frame.clear() + assert frame.f_locals == {} + +def test_throw_trace_bug(): + import sys + def f(): + yield 5 + gen = f() + assert next(gen) == 5 + seen = [] + def trace_func(frame, event, *args): + seen.append(event) + return trace_func + sys.settrace(trace_func) + try: + gen.throw(ValueError) + except ValueError: + pass + sys.settrace(None) + assert seen == ['call', 'exception', 'return'] + +def test_generator_trace_stopiteration(): + import sys + def f(): + yield 5 + gen = f() + assert next(gen) == 5 + seen = [] + frames = [] + def trace_func(frame, event, *args): + print('TRACE:', frame, event, args) + seen.append(event) + frames.append(frame) + return trace_func + def g(): + for x in gen: + never_entered + sys.settrace(trace_func) + g() + sys.settrace(None) + print('seen:', seen) + # on Python 3 we get an extra 'exception' when 'for' catches + # StopIteration (but not always! mess) + assert seen == ['call', 'line', 'call', 'return', 'exception', 'return'] + assert frames[-2].f_code.co_name == 'g' + +def test_nongenerator_trace_stopiteration(): + import sys + gen = iter([5]) + assert next(gen) == 5 + seen = [] + frames = [] + def trace_func(frame, event, *args): + print('TRACE:', frame, event, args) + seen.append(event) + frames.append(frame) + return trace_func + def g(): + for x in gen: + never_entered + sys.settrace(trace_func) + g() + sys.settrace(None) + print('seen:', seen) + # hack: don't report the StopIteration for some "simple" + # iterators. + assert seen == ['call', 'line', 'return'] + assert frames[-2].f_code.co_name == 'g' + +def test_yieldfrom_trace_stopiteration(): + import sys + def f2(): + yield 5 + def f(): + yield from f2() + gen = f() + assert next(gen) == 5 + seen = [] + frames = [] + def trace_func(frame, event, *args): + print('TRACE:', frame, event, args) + seen.append(event) + frames.append(frame) + return trace_func + def g(): + for x in gen: + never_entered + sys.settrace(trace_func) + g() # invokes next_yield_from() from resume_execute_frame() + sys.settrace(None) + print('seen:', seen) + assert seen == ['call', 'line', 'call', 'call', 'return', + 'exception', 'return', 'exception', 'return'] + assert frames[-4].f_code.co_name == 'f' + assert frames[-2].f_code.co_name == 'g' + +def test_yieldfrom_trace_stopiteration_2(): + import sys + def f2(): + if False: + yield 5 + def f(): + yield from f2() + gen = f() + seen = [] + frames = [] + def trace_func(frame, event, *args): + print('TRACE:', frame, event, args) + seen.append(event) + frames.append(frame) + return trace_func + def g(): + for x in gen: + never_entered + sys.settrace(trace_func) + g() # invokes next_yield_from() from YIELD_FROM() + sys.settrace(None) + print('seen:', seen) + assert seen == ['call', 'line', 'call', 'line', 'call', 'line', + 'return', 'exception', 'return', 'exception', 'return'] + assert frames[-4].f_code.co_name == 'f' + assert frames[-2].f_code.co_name == 'g' + +def test_yieldfrom_trace_stopiteration_3(): + import sys + def f(): + yield from [] + gen = f() + seen = [] + frames = [] + def trace_func(frame, event, *args): + print('TRACE:', frame, event, args) + seen.append(event) + frames.append(frame) + return trace_func + def g(): + for x in gen: + never_entered + sys.settrace(trace_func) + g() # invokes next_yield_from() from YIELD_FROM() + sys.settrace(None) + print('seen:', seen) + assert seen == ['call', 'line', 'call', 'line', + 'return', 'exception', 'return'] + assert frames[-4].f_code.co_name == 'f' + +def test_local_trace_function_returning_None_ignored(): + # behave the same as CPython does, and in contradiction with + # the documentation. + def tracer(f, event, arg): + assert event == 'call' + return local_tracer + + seen = [] + def local_tracer(f, event, arg): + seen.append(event) + return None # but 'local_tracer' will be called again + + def function(): + a = 1 + a = 2 + a = 3 + + import sys + sys.settrace(tracer) + function() + sys.settrace(None) + assert seen == ["line", "line", "line", "return"] + +def test_clear_locals(): + def make_frames(): + def outer(): + x = 5 + y = 6 + def inner(): + z = x + 2 + 1/0 + t = 9 + return inner() + try: + outer() + except ZeroDivisionError as e: + tb = e.__traceback__ + frames = [] + while tb: + frames.append(tb.tb_frame) + tb = tb.tb_next + return frames + + f, outer, inner = make_frames() + outer.clear() + inner.clear() + assert not outer.f_locals + assert not inner.f_locals diff --git a/pypy/interpreter/test/demomixedmod/__init__.py b/pypy/interpreter/test/demomixedmod/__init__.py --- a/pypy/interpreter/test/demomixedmod/__init__.py +++ b/pypy/interpreter/test/demomixedmod/__init__.py @@ -1,15 +0,0 @@ -from pypy.interpreter.mixedmodule import MixedModule - -class Module(MixedModule): - interpleveldefs = { - '__name__' : '(space.wrap("mixedmodule"))', - '__doc__' : '(space.wrap("mixedmodule doc"))', - 'somefunc' : 'file1.somefunc', - 'value' : '(space.w_None)', - 'path' : 'file1.initpath(space)', - 'cpypath' : 'space.wrap(sys.path)' - } - - appleveldefs = { - 'someappfunc' : 'file2_app.someappfunc', - } diff --git a/pypy/interpreter/test/demomixedmod/__init__.py b/pypy/interpreter/test/demomixedmod/moduledef.py copy from pypy/interpreter/test/demomixedmod/__init__.py copy to pypy/interpreter/test/demomixedmod/moduledef.py diff --git a/pypy/interpreter/test/fixtures.py b/pypy/interpreter/test/fixtures.py new file mode 100644 --- /dev/null +++ b/pypy/interpreter/test/fixtures.py @@ -0,0 +1,5 @@ +from _pytest.tmpdir import TempdirFactory + +def tempfile(space, config): + tmpdir = TempdirFactory(config).getbasetemp() + return space.newtext(str(tmpdir / 'tempfile1')) diff --git a/pypy/interpreter/test/test_appinterp.py b/pypy/interpreter/test/test_appinterp.py --- a/pypy/interpreter/test/test_appinterp.py +++ b/pypy/interpreter/test/test_appinterp.py @@ -3,32 +3,32 @@ from pypy.interpreter.gateway import appdef, ApplevelClass, applevel_temp from pypy.interpreter.error import OperationError -def test_execwith_novars(space): - val = space.appexec([], """ - (): - return 42 - """) +def test_execwith_novars(space): + val = space.appexec([], """ + (): + return 42 + """) assert space.eq_w(val, space.wrap(42)) -def test_execwith_withvars(space): +def test_execwith_withvars(space): val = space.appexec([space.wrap(7)], """ - (x): - y = 6 * x - return y - """) + (x): + y = 6 * x + return y + """) assert space.eq_w(val, space.wrap(42)) -def test_execwith_compile_error(space): +def test_execwith_compile_error(space): excinfo = py.test.raises(OperationError, space.appexec, [], """ - (): - y y + (): + y y """) # NOTE: the following test only works because excinfo.value is not # normalized so far - assert str(excinfo.value.get_w_value(space)).find('y y') != -1 + assert str(excinfo.value.get_w_value(space)).find('y y') != -1 def test_simple_applevel(space): - app = appdef("""app(x,y): + app = appdef("""app(x,y): return x + y """) assert app.__name__ == 'app' @@ -36,15 +36,15 @@ assert space.eq_w(w_result, space.wrap(42)) def test_applevel_with_one_default(space): - app = appdef("""app(x,y=1): + app = appdef("""app(x,y=1): return x + y """) assert app.__name__ == 'app' - w_result = app(space, space.wrap(41)) + w_result = app(space, space.wrap(41)) assert space.eq_w(w_result, space.wrap(42)) def test_applevel_with_two_defaults(space): - app = appdef("""app(x=1,y=2): + app = appdef("""app(x=1,y=2): return x + y """) w_result = app(space, space.wrap(41), space.wrap(1)) @@ -58,19 +58,19 @@ def test_applevel_noargs(space): - app = appdef("""app(): - return 42 + app = appdef("""app(): + return 42 """) assert app.__name__ == 'app' - w_result = app(space) + w_result = app(space) assert space.eq_w(w_result, space.wrap(42)) -def somefunc(arg2=42): - return arg2 +def somefunc(arg2=42): + return arg2 -def test_app2interp_somefunc(space): - app = appdef(somefunc) - w_result = app(space) +def test_app2interp_somefunc(space): + app = appdef(somefunc) + w_result = app(space) assert space.eq_w(w_result, space.wrap(42)) def test_applevel_functions(space, applevel_temp = applevel_temp): @@ -87,48 +87,49 @@ def test_applevel_class(space, applevel_temp = applevel_temp): app = applevel_temp(''' class C(object): - clsattr = 42 - def __init__(self, x=13): - self.attr = x + clsattr = 42 + def __init__(self, x=13): + self.attr = x ''') C = app.interphook('C') - c = C(space, space.wrap(17)) + c = C(space, space.wrap(17)) w_attr = space.getattr(c, space.wrap('clsattr')) assert space.eq_w(w_attr, space.wrap(42)) w_clsattr = space.getattr(c, space.wrap('attr')) assert space.eq_w(w_clsattr, space.wrap(17)) -def app_test_something_at_app_level(): +def app_test_something_at_app_level(): x = 2 assert x/2 == 1 -class AppTestMethods: - def test_some_app_test_method(self): +class AppTestMethods: + def test_some_app_test_method(self): assert 2 == 2 -class TestMixedModule: - def test_accesses(self): +class TestMixedModule: + def test_accesses(self): space = self.space - import demomixedmod - w_module = demomixedmod.Module(space, space.wrap('mixedmodule')) + from .demomixedmod.moduledef import Module + w_module = Module(space, space.wrap('mixedmodule')) space.appexec([w_module], """ - (module): - assert module.value is None + (module): + assert module.value is None assert module.__doc__ == 'mixedmodule doc' - assert module.somefunc is module.somefunc - result = module.somefunc() - assert result == True + assert module.somefunc is module.somefunc + result = module.somefunc() + assert result == True - assert module.someappfunc is module.someappfunc - appresult = module.someappfunc(41) - assert appresult == 42 + assert module.someappfunc is module.someappfunc + appresult = module.someappfunc(41) + assert appresult == 42 assert module.__dict__ is module.__dict__ - for name in ('somefunc', 'someappfunc', '__doc__', '__name__'): + for name in ('somefunc', 'someappfunc', '__doc__', '__name__'): assert name in module.__dict__ """) assert space.is_true(w_module.call('somefunc')) + assert Module.get_applevel_name() == 'demomixedmod' def test_whacking_at_loaders(self): """Some MixedModules change 'self.loaders' in __init__(), but doing diff --git a/pypy/interpreter/test/test_extmodules.py b/pypy/interpreter/test/test_extmodules.py --- a/pypy/interpreter/test/test_extmodules.py +++ b/pypy/interpreter/test/test_extmodules.py @@ -2,10 +2,10 @@ import pytest from pypy.config.pypyoption import get_pypy_config -from pypy.objspace.std import StdObjSpace +from pypy.objspace.std.objspace import StdObjSpace from rpython.tool.udir import udir From pypy.commits at gmail.com Thu Aug 1 12:42:46 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 01 Aug 2019 09:42:46 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d431686.1c69fb81.ede47.39b5@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97047:cc05f1d8f542 Date: 2019-08-01 17:41 +0100 http://bitbucket.org/pypy/pypy/changeset/cc05f1d8f542/ Log: hg merge default diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst --- a/pypy/doc/coding-guide.rst +++ b/pypy/doc/coding-guide.rst @@ -456,13 +456,10 @@ Testing modules in ``lib_pypy/`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can go to the :source:`pypy/module/test_lib_pypy/` directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in :source:`pypy/module/test_lib_pypy/` are allowed -and encouraged to let their tests run at interpreter level although -:source:`lib_pypy/` modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. +You can go to the :source:`pypy/module/test_lib_pypy/` directory and invoke the +testing tool ("py.test" or "python ../../pypy/test_all.py") to run tests +against the lib_pypy hierarchy. This allows us to quickly test our +python-coded reimplementations against CPython. Testing modules in ``pypy/module`` @@ -585,25 +582,42 @@ module global level and use plain 'assert' statements thanks to the usage of the `py.test`_ tool. - -Application Level tests +Application level tests ~~~~~~~~~~~~~~~~~~~~~~~ For testing the conformance and well-behavedness of PyPy it is often sufficient to write "normal" application-level Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: +coding style or restrictions. If we have a choice we often +use application level tests which are in files whose name starts with the +`apptest_` prefix and look like this:: - def app_test_something(): + def test_this(): # application level test code +These application level test functions will run on top +of PyPy, i.e. they have no access to interpreter details. + +By default, they run on top of an untranslated PyPy which runs on top of the +host interpreter. When passing the `-D` option, they run directly on top of the +host interpreter, which is usually a translated pypy executable in this case:: + + pypy3 -m pytest -D pypy/ + +Note that in interpreted mode, only a small subset of pytest's functionality is +available. + +Mixed-level tests (deprecated) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Mixed-level tests are similar to application-level tests, the difference being +that they're just snippets of app-level code embedded in an interp-level test +file, like this:: + class AppTestSomething(object): def test_this(self): # application level test code -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. You cannot use imported modules from global level because they are imported at interpreter-level while you test code runs at application level. If you need to use modules diff --git a/pypy/doc/contributing.rst b/pypy/doc/contributing.rst --- a/pypy/doc/contributing.rst +++ b/pypy/doc/contributing.rst @@ -329,11 +329,18 @@ Testing After Translation ^^^^^^^^^^^^^^^^^^^^^^^^^ -While the usual invocation of `pytest` translates a piece of RPython code and -runs it, we have a test extension to run tests without translation, directly -on the host python. This is very convenient for modules such as `cpyext`, to -compare and contrast test results between CPython and PyPy. Untranslated tests -are invoked by using the `-A` or `--runappdirect` option to `pytest`:: +While the usual invocation of `pytest` runs app-level tests on an untranslated +PyPy that runs on top of CPython, we have a test extension to run tests +directly on the host python. This is very convenient for modules such as +`cpyext`, to compare and contrast test results between CPython and PyPy. + +App-level tests run directly on the host interpreter when passing `-D` or +`--direct-apptest` to `pytest`:: + + pypy3 -m pytest -D pypy/interpreter/test/apptest_pyframe.py + +Mixed-level tests are invoked by using the `-A` or `--runappdirect` option to +`pytest`:: python2 pytest.py -A pypy/module/cpyext/test From pypy.commits at gmail.com Thu Aug 1 12:56:21 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 01 Aug 2019 09:56:21 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Remove obsolete test file (replaced by apptest_coroutine.py) Message-ID: <5d4319b5.1c69fb81.feef2.f0d5@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97048:1c9aa169eaaf Date: 2019-08-01 17:55 +0100 http://bitbucket.org/pypy/pypy/changeset/1c9aa169eaaf/ Log: Remove obsolete test file (replaced by apptest_coroutine.py) diff --git a/pypy/interpreter/test/test_coroutine.py b/pypy/interpreter/test/test_coroutine.py deleted file mode 100644 --- a/pypy/interpreter/test/test_coroutine.py +++ /dev/null @@ -1,735 +0,0 @@ - -class AppTestCoroutine: - - def test_cannot_iterate(self): """ - async def f(x): - pass - raises(TypeError, "for i in f(5): pass") - raises(TypeError, iter, f(5)) - raises(TypeError, next, f(5)) - """ - - def test_async_for(self): """ - class X: - def __aiter__(self): - return MyAIter() - class MyAIter: - async def __anext__(self): - return 42 - async def f(x): - sum = 0 - async for a in x: - sum += a - if sum > 100: - break - return sum - cr = f(X()) - try: - cr.send(None) - except StopIteration as e: - assert e.value == 42 * 3 - else: - assert False, "should have raised" - """ - - def test_StopAsyncIteration(self): """ - class X: - def __aiter__(self): - return MyAIter() - class MyAIter: - count = 0 - async def __anext__(self): - if self.count == 3: - raise StopAsyncIteration - self.count += 1 - return 42 - async def f(x): - sum = 0 - async for a in x: - sum += a - return sum - cr = f(X()) - try: - cr.send(None) - except StopIteration as e: - assert e.value == 42 * 3 - else: - assert False, "should have raised" - """ - - def test_async_for_old_style(self): """ - class X: - def __aiter__(self): - return MyAIter() - class MyAIter: - def __await__(self): - return iter([20, 30]) - async def f(x): - sum = 0 - async for a in x: - sum += a - if sum > 100: - break - return sum - cr = f(X()) - assert next(cr.__await__()) == 20 - """ - - def test_for_error_cause(self): """ - class F: - def __aiter__(self): - return self - def __anext__(self): - return self - def __await__(self): - 1 / 0 - - async def main(): - async for _ in F(): - pass - - c = raises(TypeError, main().send, None) - assert 'an invalid object from __anext__' in c.value.args[0], c.value - assert isinstance(c.value.__cause__, ZeroDivisionError) - """ - - def test_set_coroutine_wrapper(self): """ - import sys - async def f(): - pass - seen = [] - def my_wrapper(cr): - seen.append(cr) - return 42 - assert sys.get_coroutine_wrapper() is None - sys.set_coroutine_wrapper(my_wrapper) - assert sys.get_coroutine_wrapper() is my_wrapper - cr = f() - assert cr == 42 - sys.set_coroutine_wrapper(None) - assert sys.get_coroutine_wrapper() is None - """ - - def test_async_with(self): """ - seen = [] - class X: - async def __aenter__(self): - seen.append('aenter') - async def __aexit__(self, *args): - seen.append('aexit') - async def f(x): - async with x: - return 42 - c = f(X()) - try: - c.send(None) - except StopIteration as e: - assert e.value == 42 - else: - assert False, "should have raised" - assert seen == ['aenter', 'aexit'] - """ - - def test_async_with_exit_True(self): """ - seen = [] - class X: - async def __aenter__(self): - seen.append('aenter') - async def __aexit__(self, *args): - seen.append('aexit') - return True - async def f(x): - async with x: - return 42 - c = f(X()) - try: - c.send(None) - except StopIteration as e: - assert e.value == 42 - else: - assert False, "should have raised" - assert seen == ['aenter', 'aexit'] - """ - - def test_await(self): """ - class X: - def __await__(self): - i1 = yield 40 - assert i1 == 82 - i2 = yield 41 - assert i2 == 93 - async def f(): - await X() - await X() - c = f() - assert c.send(None) == 40 - assert c.send(82) == 41 - assert c.send(93) == 40 - assert c.send(82) == 41 - raises(StopIteration, c.send, 93) - """ - - def test_await_error(self): """ - async def f(): - await [42] - c = f() - try: - c.send(None) - except TypeError as e: - assert str(e) == "object list can't be used in 'await' expression" - else: - assert False, "should have raised" - """ - - def test_async_with_exception_context(self): """ - class CM: - async def __aenter__(self): - pass - async def __aexit__(self, *e): - 1/0 - async def f(): - async with CM(): - raise ValueError - c = f() - try: - c.send(None) - except ZeroDivisionError as e: - assert e.__context__ is not None - assert isinstance(e.__context__, ValueError) - else: - assert False, "should have raised" - """ - - def test_runtime_warning(self): """ - import gc, warnings - async def foobaz(): - pass - with warnings.catch_warnings(record=True) as l: - foobaz() - gc.collect() - gc.collect() - gc.collect() - - assert len(l) == 1, repr(l) - w = l[0].message - assert isinstance(w, RuntimeWarning) - assert str(w).startswith("coroutine ") - assert str(w).endswith("foobaz' was never awaited") - """ - - def test_async_for_with_tuple_subclass(self): """ - class Done(Exception): pass - - class AIter(tuple): - i = 0 - def __aiter__(self): - return self - async def __anext__(self): - if self.i >= len(self): - raise StopAsyncIteration - self.i += 1 - return self[self.i - 1] - - result = [] - async def foo(): - async for i in AIter([42]): - result.append(i) - raise Done - - try: - foo().send(None) - except Done: - pass - assert result == [42] - """ - - def test_async_yield(self): """ - class Done(Exception): pass - - async def mygen(): - yield 5 - - result = [] - async def foo(): - async for i in mygen(): - result.append(i) - raise Done - - try: - foo().send(None) - except Done: - pass - assert result == [5] - """ - - def test_async_yield_already_finished(self): """ - class Done(Exception): pass - - async def mygen(): - yield 5 - - result = [] - async def foo(): - g = mygen() - async for i in g: - result.append(i) - async for i in g: - assert False # should not be reached - raise Done - - try: - foo().send(None) - except Done: - pass - assert result == [5] - """ - - def test_async_yield_with_await(self): """ - class Done(Exception): pass - - class X: - def __await__(self): - i1 = yield 40 - assert i1 == 82 - i2 = yield 41 - assert i2 == 93 - - async def mygen(): - yield 5 - await X() - yield 6 - - result = [] - async def foo(): - async for i in mygen(): - result.append(i) - raise Done - - co = foo() - x = co.send(None) - assert x == 40 - assert result == [5] - x = co.send(82) - assert x == 41 - assert result == [5] - raises(Done, co.send, 93) - assert result == [5, 6] - """ - - def test_async_yield_with_explicit_send(self): """ - class X: - def __await__(self): - i1 = yield 40 - assert i1 == 82 - i2 = yield 41 - assert i2 == 93 - - async def mygen(): - x = yield 5 - assert x == 2189 - await X() - y = yield 6 - assert y == 319 - - result = [] - async def foo(): - gen = mygen() - result.append(await gen.asend(None)) - result.append(await gen.asend(2189)) - try: - await gen.asend(319) - except StopAsyncIteration: - return 42 - else: - raise AssertionError - - co = foo() - x = co.send(None) - assert x == 40 - assert result == [5] - x = co.send(82) - assert x == 41 - assert result == [5] - e = raises(StopIteration, co.send, 93) - assert e.value.args == (42,) - assert result == [5, 6] - """ - - def test_async_yield_explicit_asend_and_next(self): """ - async def mygen(y): - assert y == 4983 - x = yield 5 - assert x == 2189 - yield "ok" - - g = mygen(4983) - raises(TypeError, g.asend(42).__next__) - e = raises(StopIteration, g.asend(None).__next__) - assert e.value.args == (5,) - e = raises(StopIteration, g.asend(2189).__next__) - assert e.value.args == ("ok",) - """ - - def test_async_yield_explicit_asend_and_send(self): """ - async def mygen(y): - assert y == 4983 - x = yield 5 - assert x == 2189 - yield "ok" - - g = mygen(4983) - e = raises(TypeError, g.asend(None).send, 42) - assert str(e.value) == ("can't send non-None value to a just-started " - "async generator") - e = raises(StopIteration, g.asend(None).send, None) - assert e.value.args == (5,) - e = raises(StopIteration, g.asend("IGNORED").send, 2189) # xxx - assert e.value.args == ("ok",) - """ - - def test_async_yield_explicit_asend_used_several_times(self): """ - class X: - def __await__(self): - r = yield -2 - assert r == "cont1" - r = yield -3 - assert r == "cont2" - return -4 - async def mygen(y): - x = await X() - assert x == -4 - r = yield -5 - assert r == "foo" - r = yield -6 - assert r == "bar" - - g = mygen(4983) - gs = g.asend(None) - r = gs.send(None) - assert r == -2 - r = gs.send("cont1") - assert r == -3 - e = raises(StopIteration, gs.send, "cont2") - assert e.value.args == (-5,) - e = raises(StopIteration, gs.send, None) - assert e.value.args == () - e = raises(StopIteration, gs.send, None) - assert e.value.args == () - # - gs = g.asend("foo") - e = raises(StopIteration, gs.send, None) - assert e.value.args == (-6,) - e = raises(StopIteration, gs.send, "bar") - assert e.value.args == () - """ - - def test_async_yield_asend_notnone_throw(self): """ - async def f(): - yield 123 - - raises(ValueError, f().asend(42).throw, ValueError) - """ - - def test_async_yield_asend_none_throw(self): """ - async def f(): - yield 123 - - raises(ValueError, f().asend(None).throw, ValueError) - """ - - def test_async_yield_athrow_send_none(self): """ - async def ag(): - yield 42 - - raises(ValueError, ag().athrow(ValueError).send, None) - """ - - def test_async_yield_athrow_send_notnone(self): """ - async def ag(): - yield 42 - - ex = raises(RuntimeError, ag().athrow(ValueError).send, 42) - expected = ("can't send non-None value to a just-started coroutine", ) - assert ex.value.args == expected - """ - - def test_async_yield_athrow_send_after_exception(self): """ - async def ag(): - yield 42 - - athrow_coro = ag().athrow(ValueError) - raises(ValueError, athrow_coro.send, None) - raises(StopIteration, athrow_coro.send, None) - """ - - def test_async_yield_athrow_throw(self): """ - async def ag(): - yield 42 - - raises(RuntimeError, ag().athrow(ValueError).throw, LookupError) - # CPython's message makes little sense; PyPy's message is different - """ - - def test_async_yield_athrow_while_running(self): """ - values = [] - async def ag(): - try: - received = yield 1 - except ValueError: - values.append(42) - return - yield 2 - - - async def run(): - running = ag() - x = await running.asend(None) - assert x == 1 - try: - await running.athrow(ValueError) - except StopAsyncIteration: - pass - - - try: - run().send(None) - except StopIteration: - assert values == [42] - """ - - def test_async_aclose(self): """ - raises_generator_exit = False - async def ag(): - nonlocal raises_generator_exit - try: - yield - except GeneratorExit: - raises_generator_exit = True - raise - - async def run(): - a = ag() - async for i in a: - break - await a.aclose() - try: - run().send(None) - except StopIteration: - pass - assert raises_generator_exit - """ - - def test_async_aclose_ignore_generator_exit(self): """ - async def ag(): - try: - yield - except GeneratorExit: - yield - - async def run(): - a = ag() - async for i in a: - break - await a.aclose() - raises(RuntimeError, run().send, None) - """ - - def test_async_aclose_await_in_finally(self): """ - import types - - @types.coroutine - def coro(): - yield 'coro' - - state = 0 - async def ag(): - nonlocal state - try: - yield - finally: - state = 1 - await coro() - state = 2 - - async def run(): - a = ag() - async for i in a: - break - await a.aclose() - a = run() - assert state == 0 - assert a.send(None) == 'coro' - assert state == 1 - try: - a.send(None) - except StopIteration: - pass - assert state == 2 - """ - - def test_async_aclose_await_in_finally_with_exception(self): """ - import types - - @types.coroutine - def coro(): - yield 'coro' - - state = 0 - async def ag(): - nonlocal state - try: - yield - finally: - state = 1 - try: - await coro() - except Exception as exc: - state = exc - - async def run(): - a = ag() - async for i in a: - break - await a.aclose() - a = run() - assert state == 0 - assert a.send(None) == 'coro' - assert state == 1 - exc = RuntimeError() - try: - a.throw(exc) - except StopIteration: - pass - assert state == exc - """ - - def test_async_aclose_in_finalize_hook_await_in_finally(self): """ - import gc - import sys - import types - - @types.coroutine - def coro(): - yield 'coro' - - state = 0 - async def ag(): - nonlocal state - try: - yield - finally: - state = 1 - await coro() - state = 2 - - async def run(): - a = ag() - async for i in a: - break - del a - gc.collect() - gc.collect() - gc.collect() - a = run() - - a2 = None - assert sys.get_asyncgen_hooks() == (None, None) - def _finalize(g): - nonlocal a2 - a2 = g.aclose() - sys.set_asyncgen_hooks(finalizer=_finalize) - assert state == 0 - try: - a.send(None) - except StopIteration: - pass - assert a2.send(None) == 'coro' - assert state == 1 - try: - a2.send(None) - except StopIteration: - pass - assert state == 2 - sys.set_asyncgen_hooks(None, None) - """ - - def test_async_anext_close(self): """ - async def ag(): - yield 42 - - an = ag().__anext__() - an.close() - try: - next(an) - except StopIteration as e: - assert e.value is None - else: - assert False, "didn't raise" - """ - - def w_run_async(self, coro): - buffer = [] - result = None - while True: - try: - buffer.append(coro.send(None)) - except StopIteration as ex: - result = ex.args[0] if ex.args else None - break - return buffer, result - - def test_async_generator(self): - """ - async def f(i): - return i - - async def run_list(): - return [await c for c in [f(1), f(41)]] - - assert self.run_async(run_list()) == ([], [1, 41]) - """ - - def test_async_genexpr(self): - """ - async def f(it): - for i in it: - yield i - - async def run_gen(): - gen = (i + 1 async for i in f([10, 20])) - return [g + 100 async for g in gen] - - assert self.run_async(run_gen()) == ([], [111, 121]) - """ - - def test_anext_tuple(self): - """ - async def foo(): - try: - yield (1,) - except ZeroDivisionError: - yield (2,) - - async def run(): - it = foo().__aiter__() - return await it.__anext__() - - assert self.run_async(run()) == ([], (1,)) - """ - - def test_asyncgen_yield_stopiteration(self): - """ - async def foo(): - yield 1 - yield StopIteration(2) - - async def run(): - it = foo().__aiter__() - val1 = await it.__anext__() - assert val1 == 1 - val2 = await it.__anext__() - assert isinstance(val2, StopIteration) - assert val2.value == 2 - - self.run_async(run()) - """ From pypy.commits at gmail.com Fri Aug 2 08:28:58 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 02 Aug 2019 05:28:58 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: PyUnicode_AsUCS4, PyUnicode_AsUCS4Copy Message-ID: <5d442c8a.1c69fb81.4eff3.d494@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97049:3ec1002a818c Date: 2019-08-02 14:28 +0200 http://bitbucket.org/pypy/pypy/changeset/3ec1002a818c/ Log: PyUnicode_AsUCS4, PyUnicode_AsUCS4Copy diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py --- a/pypy/module/cpyext/test/test_unicodeobject.py +++ b/pypy/module/cpyext/test/test_unicodeobject.py @@ -1008,3 +1008,32 @@ _PyUnicode_Ready(space, py_str) assert get_kind(py_str) == 4 assert get_ascii(py_str) == 0 + + def test_as_ucs4(self, space): + w_x = space.wrap(u"ab\u0660") + count1 = space.int_w(space.len(w_x)) + x_chunk = PyUnicode_AsUCS4Copy(space, w_x) + assert x_chunk[0] == ord('a') + assert x_chunk[1] == ord('b') + assert x_chunk[2] == 0x0660 + assert x_chunk[3] == 0 + Py_UCS4 = lltype.typeOf(x_chunk).TO.OF + lltype.free(x_chunk, flavor='raw', track_allocation=False) + + target_chunk = lltype.malloc(rffi.CArray(Py_UCS4), 4, flavor='raw') + target_chunk[3] = rffi.cast(Py_UCS4, 99999) + x_chunk = PyUnicode_AsUCS4(space, w_x, target_chunk, 3, 0) + assert x_chunk == target_chunk + assert x_chunk[0] == ord('a') + assert x_chunk[1] == ord('b') + assert x_chunk[2] == 0x0660 + assert x_chunk[3] == 99999 + + x_chunk[2] = rffi.cast(Py_UCS4, 77777) + x_chunk = PyUnicode_AsUCS4(space, w_x, target_chunk, 4, 1) + assert x_chunk == target_chunk + assert x_chunk[0] == ord('a') + assert x_chunk[1] == ord('b') + assert x_chunk[2] == 0x0660 + assert x_chunk[3] == 0 + lltype.free(target_chunk, flavor='raw') diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -29,6 +29,7 @@ cts.parse_header(parse_dir / 'cpyext_unicodeobject.h') PyUnicodeObject = cts.gettype('PyUnicodeObject*') Py_UNICODE = cts.gettype('Py_UNICODE') +Py_UCS4 = cts.gettype('Py_UCS4') INT_realP = lltype.Ptr(lltype.Array(rffi.INT_real, hints={'nolength': True})) @bootstrap_function @@ -1081,3 +1082,28 @@ return space.call_method(w_str, '__getitem__', space.newslice(space.newint(start), space.newint(end), space.newint(1))) + + at cts.decl("Py_UCS4 *PyUnicode_AsUCS4(PyObject *u, Py_UCS4 *buffer, Py_ssize_t buflen, int copy_null)") +def PyUnicode_AsUCS4(space, ref, pbuffer, buflen, copy_null): + c_buffer = PyUnicode_AsUnicode(space, ref) + c_length = get_wsize(ref) + + size = c_length + if copy_null: + size += 1 + if not pbuffer: # internal, for PyUnicode_AsUCS4Copy() + pbuffer = lltype.malloc(rffi.CArray(Py_UCS4), size, + flavor='raw', track_allocation=False) + elif buflen < size: + raise oefmt(space.w_SystemError, "PyUnicode_AsUCS4: buflen too short") + + i = 0 + while i < size: + pbuffer[i] = rffi.cast(Py_UCS4, c_buffer[i]) + i += 1 + return pbuffer + + at cts.decl("Py_UCS4 *PyUnicode_AsUCS4Copy(PyObject *u)") +def PyUnicode_AsUCS4Copy(space, ref): + return PyUnicode_AsUCS4(space, ref, cts.cast('Py_UCS4*', 0), 0, + rffi.cast(rffi.INT_real, 1)) From pypy.commits at gmail.com Fri Aug 2 11:42:23 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 02 Aug 2019 08:42:23 -0700 (PDT) Subject: [pypy-commit] pypy vendor/stdlib-3.6: Fix v3.6.1 import so that the process described in stdlib-upgrade.txt works as intended Message-ID: <5d4459df.1c69fb81.8d330.1447@mx.google.com> Author: Ronan Lamy Branch: vendor/stdlib-3.6 Changeset: r97050:058b99d6e81f Date: 2019-08-02 16:41 +0100 http://bitbucket.org/pypy/pypy/changeset/058b99d6e81f/ Log: Fix v3.6.1 import so that the process described in stdlib- upgrade.txt works as intended diff too long, truncating to 2000 out of 5488 lines diff --git a/lib-python/3/idlelib/AutoComplete.py b/lib-python/3/idlelib/autocomplete.py rename from lib-python/3/idlelib/AutoComplete.py rename to lib-python/3/idlelib/autocomplete.py diff --git a/lib-python/3/idlelib/AutoExpand.py b/lib-python/3/idlelib/autoexpand.py rename from lib-python/3/idlelib/AutoExpand.py rename to lib-python/3/idlelib/autoexpand.py diff --git a/lib-python/3/idlelib/CallTips.py b/lib-python/3/idlelib/calltips.py rename from lib-python/3/idlelib/CallTips.py rename to lib-python/3/idlelib/calltips.py diff --git a/lib-python/3/idlelib/CodeContext.py b/lib-python/3/idlelib/codecontext.py rename from lib-python/3/idlelib/CodeContext.py rename to lib-python/3/idlelib/codecontext.py diff --git a/lib-python/3/idlelib/configDialog.py b/lib-python/3/idlelib/configdialog.py rename from lib-python/3/idlelib/configDialog.py rename to lib-python/3/idlelib/configdialog.py diff --git a/lib-python/3/idlelib/Debugger.py b/lib-python/3/idlelib/debugger.py rename from lib-python/3/idlelib/Debugger.py rename to lib-python/3/idlelib/debugger.py diff --git a/lib-python/3/idlelib/Delegator.py b/lib-python/3/idlelib/delegator.py rename from lib-python/3/idlelib/Delegator.py rename to lib-python/3/idlelib/delegator.py diff --git a/lib-python/3/idlelib/FileList.py b/lib-python/3/idlelib/filelist.py rename from lib-python/3/idlelib/FileList.py rename to lib-python/3/idlelib/filelist.py diff --git a/lib-python/3/idlelib/HyperParser.py b/lib-python/3/idlelib/hyperparser.py rename from lib-python/3/idlelib/HyperParser.py rename to lib-python/3/idlelib/hyperparser.py diff --git a/lib-python/3/idlelib/MultiCall.py b/lib-python/3/idlelib/multicall.py rename from lib-python/3/idlelib/MultiCall.py rename to lib-python/3/idlelib/multicall.py diff --git a/lib-python/3/idlelib/ParenMatch.py b/lib-python/3/idlelib/parenmatch.py rename from lib-python/3/idlelib/ParenMatch.py rename to lib-python/3/idlelib/parenmatch.py diff --git a/lib-python/3/idlelib/PathBrowser.py b/lib-python/3/idlelib/pathbrowser.py rename from lib-python/3/idlelib/PathBrowser.py rename to lib-python/3/idlelib/pathbrowser.py diff --git a/lib-python/3/idlelib/Percolator.py b/lib-python/3/idlelib/percolator.py rename from lib-python/3/idlelib/Percolator.py rename to lib-python/3/idlelib/percolator.py diff --git a/lib-python/3/idlelib/PyParse.py b/lib-python/3/idlelib/pyparse.py rename from lib-python/3/idlelib/PyParse.py rename to lib-python/3/idlelib/pyparse.py diff --git a/lib-python/3/idlelib/PyShell.py b/lib-python/3/idlelib/pyshell.py rename from lib-python/3/idlelib/PyShell.py rename to lib-python/3/idlelib/pyshell.py --- a/lib-python/3/idlelib/PyShell.py +++ b/lib-python/3/idlelib/pyshell.py @@ -5,15 +5,15 @@ except ImportError: print("** IDLE can't import Tkinter.\n" "Your Python may not be configured for Tk. **", file=sys.__stderr__) - sys.exit(1) + raise SystemExit(1) import tkinter.messagebox as tkMessageBox if TkVersion < 8.5: root = Tk() # otherwise create root in main root.withdraw() tkMessageBox.showerror("Idle Cannot Start", - "Idle requires tcl/tk 8.5+, not $s." % TkVersion, + "Idle requires tcl/tk 8.5+, not %s." % TkVersion, parent=root) - sys.exit(1) + raise SystemExit(1) from code import InteractiveInterpreter import getopt diff --git a/lib-python/3/idlelib/ScrolledList.py b/lib-python/3/idlelib/scrolledlist.py rename from lib-python/3/idlelib/ScrolledList.py rename to lib-python/3/idlelib/scrolledlist.py diff --git a/lib-python/3/idlelib/SearchEngine.py b/lib-python/3/idlelib/searchengine.py rename from lib-python/3/idlelib/SearchEngine.py rename to lib-python/3/idlelib/searchengine.py diff --git a/lib-python/3/idlelib/StackViewer.py b/lib-python/3/idlelib/stackviewer.py rename from lib-python/3/idlelib/StackViewer.py rename to lib-python/3/idlelib/stackviewer.py diff --git a/lib-python/3/idlelib/textView.py b/lib-python/3/idlelib/textview.py rename from lib-python/3/idlelib/textView.py rename to lib-python/3/idlelib/textview.py diff --git a/lib-python/3/idlelib/ToolTip.py b/lib-python/3/idlelib/tooltip.py rename from lib-python/3/idlelib/ToolTip.py rename to lib-python/3/idlelib/tooltip.py diff --git a/lib-python/3/idlelib/ZoomHeight.py b/lib-python/3/idlelib/zoomheight.py rename from lib-python/3/idlelib/ZoomHeight.py rename to lib-python/3/idlelib/zoomheight.py diff --git a/lib-python/3/site-packages/README b/lib-python/3/site-packages/README deleted file mode 100644 --- a/lib-python/3/site-packages/README +++ /dev/null @@ -1,2 +0,0 @@ -This directory exists so that 3rd party packages can be installed -here. Read the source for site.py for more details. diff --git a/lib-python/3/test/mod_generics_cache.py b/lib-python/3/test/mod_generics_cache.py new file mode 100644 --- /dev/null +++ b/lib-python/3/test/mod_generics_cache.py @@ -0,0 +1,14 @@ +"""Module for testing the behavior of generics across different modules.""" + +from typing import TypeVar, Generic + +T = TypeVar('T') + + +class A(Generic[T]): + pass + + +class B(Generic[T]): + class A(Generic[T]): + pass diff --git a/lib-python/3/test/mp_preload.py b/lib-python/3/test/mp_preload.py new file mode 100644 --- /dev/null +++ b/lib-python/3/test/mp_preload.py @@ -0,0 +1,18 @@ +import multiprocessing + +multiprocessing.Lock() + + +def f(): + print("ok") + + +if __name__ == "__main__": + ctx = multiprocessing.get_context("forkserver") + modname = "test.mp_preload" + # Make sure it's importable + __import__(modname) + ctx.set_forkserver_preload([modname]) + proc = ctx.Process(target=f) + proc.start() + proc.join() diff --git a/lib-python/3/test/regrtest.py b/lib-python/3/test/regrtest.py old mode 100644 new mode 100755 diff --git a/lib-python/3/timeit.py b/lib-python/3/timeit.py old mode 100644 new mode 100755 diff --git a/lib-python/3/venv/scripts/posix/activate b/lib-python/3/venv/scripts/common/activate rename from lib-python/3/venv/scripts/posix/activate rename to lib-python/3/venv/scripts/common/activate diff --git a/lib_pypy/_ctypes_test.c b/lib_pypy/_ctypes_test.c new file mode 100644 --- /dev/null +++ b/lib_pypy/_ctypes_test.c @@ -0,0 +1,687 @@ +#include + +#ifdef MS_WIN32 +#include +#endif + +#if defined(MS_WIN32) || defined(__CYGWIN__) +#define EXPORT(x) __declspec(dllexport) x +#else +#define EXPORT(x) x +#endif + +/* some functions handy for testing */ + +EXPORT(int) +_testfunc_cbk_reg_int(int a, int b, int c, int d, int e, + int (*func)(int, int, int, int, int)) +{ + return func(a*a, b*b, c*c, d*d, e*e); +} + +EXPORT(double) +_testfunc_cbk_reg_double(double a, double b, double c, double d, double e, + double (*func)(double, double, double, double, double)) +{ + return func(a*a, b*b, c*c, d*d, e*e); +} + +/* + * This structure should be the same as in test_callbacks.py and the + * method test_callback_large_struct. See issues 17310 and 20160: the + * structure must be larger than 8 bytes long. + */ + +typedef struct { + unsigned long first; + unsigned long second; + unsigned long third; +} Test; + +EXPORT(void) +_testfunc_cbk_large_struct(Test in, void (*func)(Test)) +{ + func(in); +} + +/* + * See issue 29565. Update a structure passed by value; + * the caller should not see any change. + */ + +EXPORT(void) +_testfunc_large_struct_update_value(Test in) +{ + in.first = 0x0badf00d; + in.second = 0x0badf00d; + in.third = 0x0badf00d; +} + +EXPORT(void)testfunc_array(int values[4]) +{ + printf("testfunc_array %d %d %d %d\n", + values[0], + values[1], + values[2], + values[3]); +} + +EXPORT(long double)testfunc_Ddd(double a, double b) +{ + long double result = (long double)(a * b); + printf("testfunc_Ddd(%p, %p)\n", &a, &b); + printf("testfunc_Ddd(%g, %g)\n", a, b); + return result; +} + +EXPORT(long double)testfunc_DDD(long double a, long double b) +{ + long double result = a * b; + printf("testfunc_DDD(%p, %p)\n", &a, &b); + printf("testfunc_DDD(%Lg, %Lg)\n", a, b); + return result; +} + +EXPORT(int)testfunc_iii(int a, int b) +{ + int result = a * b; + printf("testfunc_iii(%p, %p)\n", &a, &b); + return result; +} + +EXPORT(int)myprintf(char *fmt, ...) +{ + int result; + va_list argptr; + va_start(argptr, fmt); + result = vprintf(fmt, argptr); + va_end(argptr); + return result; +} + +EXPORT(char *)my_strtok(char *token, const char *delim) +{ + return strtok(token, delim); +} + +EXPORT(char *)my_strchr(const char *s, int c) +{ + return strchr(s, c); +} + + +EXPORT(double) my_sqrt(double a) +{ + return sqrt(a); +} + +EXPORT(void) my_qsort(void *base, size_t num, size_t width, int(*compare)(const void*, const void*)) +{ + qsort(base, num, width, compare); +} + +EXPORT(int *) _testfunc_ai8(int a[8]) +{ + return a; +} + +EXPORT(void) _testfunc_v(int a, int b, int *presult) +{ + *presult = a + b; +} + +EXPORT(int) _testfunc_i_bhilfd(signed char b, short h, int i, long l, float f, double d) +{ +/* printf("_testfunc_i_bhilfd got %d %d %d %ld %f %f\n", + b, h, i, l, f, d); +*/ + return (int)(b + h + i + l + f + d); +} + +EXPORT(float) _testfunc_f_bhilfd(signed char b, short h, int i, long l, float f, double d) +{ +/* printf("_testfunc_f_bhilfd got %d %d %d %ld %f %f\n", + b, h, i, l, f, d); +*/ + return (float)(b + h + i + l + f + d); +} + +EXPORT(double) _testfunc_d_bhilfd(signed char b, short h, int i, long l, float f, double d) +{ +/* printf("_testfunc_d_bhilfd got %d %d %d %ld %f %f\n", + b, h, i, l, f, d); +*/ + return (double)(b + h + i + l + f + d); +} + +EXPORT(long double) _testfunc_D_bhilfD(signed char b, short h, int i, long l, float f, long double d) +{ +/* printf("_testfunc_d_bhilfd got %d %d %d %ld %f %f\n", + b, h, i, l, f, d); +*/ + return (long double)(b + h + i + l + f + d); +} + +EXPORT(char *) _testfunc_p_p(void *s) +{ + return (char *)s; +} + +EXPORT(void *) _testfunc_c_p_p(int *argcp, char **argv) +{ + return argv[(*argcp)-1]; +} + +EXPORT(void *) get_strchr(void) +{ + return (void *)strchr; +} + +EXPORT(char *) my_strdup(char *src) +{ + char *dst = (char *)malloc(strlen(src)+1); + if (!dst) + return NULL; + strcpy(dst, src); + return dst; +} + +EXPORT(void)my_free(void *ptr) +{ + free(ptr); +} + +#ifdef HAVE_WCHAR_H +EXPORT(wchar_t *) my_wcsdup(wchar_t *src) +{ + size_t len = wcslen(src); + wchar_t *ptr = (wchar_t *)malloc((len + 1) * sizeof(wchar_t)); + if (ptr == NULL) + return NULL; + memcpy(ptr, src, (len+1) * sizeof(wchar_t)); + return ptr; +} + +EXPORT(size_t) my_wcslen(wchar_t *src) +{ + return wcslen(src); +} +#endif + +#ifndef MS_WIN32 +# ifndef __stdcall +# define __stdcall /* */ +# endif +#endif + +typedef struct { + int (*c)(int, int); + int (__stdcall *s)(int, int); +} FUNCS; + +EXPORT(int) _testfunc_callfuncp(FUNCS *fp) +{ + fp->c(1, 2); + fp->s(3, 4); + return 0; +} + +EXPORT(int) _testfunc_deref_pointer(int *pi) +{ + return *pi; +} + +#ifdef MS_WIN32 +EXPORT(int) _testfunc_piunk(IUnknown FAR *piunk) +{ + piunk->lpVtbl->AddRef(piunk); + return piunk->lpVtbl->Release(piunk); +} +#endif + +EXPORT(int) _testfunc_callback_with_pointer(int (*func)(int *)) +{ + int table[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; + + return (*func)(table); +} + +EXPORT(long long) _testfunc_q_bhilfdq(signed char b, short h, int i, long l, float f, + double d, long long q) +{ + return (long long)(b + h + i + l + f + d + q); +} + +EXPORT(long long) _testfunc_q_bhilfd(signed char b, short h, int i, long l, float f, double d) +{ + return (long long)(b + h + i + l + f + d); +} + +EXPORT(int) _testfunc_callback_i_if(int value, int (*func)(int)) +{ + int sum = 0; + while (value != 0) { + sum += func(value); + value /= 2; + } + return sum; +} + +EXPORT(long long) _testfunc_callback_q_qf(long long value, + long long (*func)(long long)) +{ + long long sum = 0; + + while (value != 0) { + sum += func(value); + value /= 2; + } + return sum; +} + +typedef struct { + char *name; + char *value; +} SPAM; + +typedef struct { + char *name; + int num_spams; + SPAM *spams; +} EGG; + +SPAM my_spams[2] = { + { "name1", "value1" }, + { "name2", "value2" }, +}; + +EGG my_eggs[1] = { + { "first egg", 1, my_spams } +}; + +EXPORT(int) getSPAMANDEGGS(EGG **eggs) +{ + *eggs = my_eggs; + return 1; +} + +typedef struct tagpoint { + int x; + int y; +} point; + +EXPORT(int) _testfunc_byval(point in, point *pout) +{ + if (pout) { + pout->x = in.x; + pout->y = in.y; + } + return in.x + in.y; +} + +EXPORT (int) an_integer = 42; + +EXPORT(int) get_an_integer(void) +{ + return an_integer; +} + +EXPORT(double) +integrate(double a, double b, double (*f)(double), long nstep) +{ + double x, sum=0.0, dx=(b-a)/(double)nstep; + for(x=a+0.5*dx; (b-x)*(x-a)>0.0; x+=dx) + sum += f(x); + return sum/(double)nstep; +} + +typedef struct { + void (*initialize)(void *(*)(int), void(*)(void *)); +} xxx_library; + +static void _xxx_init(void *(*Xalloc)(int), void (*Xfree)(void *)) +{ + void *ptr; + + printf("_xxx_init got %p %p\n", Xalloc, Xfree); + printf("calling\n"); + ptr = Xalloc(32); + Xfree(ptr); + printf("calls done, ptr was %p\n", ptr); +} + +xxx_library _xxx_lib = { + _xxx_init +}; + +EXPORT(xxx_library) *library_get(void) +{ + return &_xxx_lib; +} + +#ifdef MS_WIN32 +/* See Don Box (german), pp 79ff. */ +EXPORT(void) GetString(BSTR *pbstr) +{ + *pbstr = SysAllocString(L"Goodbye!"); +} +#endif + +/* + * Some do-nothing functions, for speed tests + */ +PyObject *py_func_si(PyObject *self, PyObject *args) +{ + char *name; + int i; + if (!PyArg_ParseTuple(args, "si", &name, &i)) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +EXPORT(void) _py_func_si(char *s, int i) +{ +} + +PyObject *py_func(PyObject *self, PyObject *args) +{ + Py_INCREF(Py_None); + return Py_None; +} + +EXPORT(void) _py_func(void) +{ +} + +EXPORT(long long) last_tf_arg_s; +EXPORT(unsigned long long) last_tf_arg_u; + +struct BITS { + int A: 1, B:2, C:3, D:4, E: 5, F: 6, G: 7, H: 8, I: 9; + short M: 1, N: 2, O: 3, P: 4, Q: 5, R: 6, S: 7; +}; + +EXPORT(void) set_bitfields(struct BITS *bits, char name, int value) +{ + switch (name) { + case 'A': bits->A = value; break; + case 'B': bits->B = value; break; + case 'C': bits->C = value; break; + case 'D': bits->D = value; break; + case 'E': bits->E = value; break; + case 'F': bits->F = value; break; + case 'G': bits->G = value; break; + case 'H': bits->H = value; break; + case 'I': bits->I = value; break; + + case 'M': bits->M = value; break; + case 'N': bits->N = value; break; + case 'O': bits->O = value; break; + case 'P': bits->P = value; break; + case 'Q': bits->Q = value; break; + case 'R': bits->R = value; break; + case 'S': bits->S = value; break; + } +} + +EXPORT(int) unpack_bitfields(struct BITS *bits, char name) +{ + switch (name) { + case 'A': return bits->A; + case 'B': return bits->B; + case 'C': return bits->C; + case 'D': return bits->D; + case 'E': return bits->E; + case 'F': return bits->F; + case 'G': return bits->G; + case 'H': return bits->H; + case 'I': return bits->I; + + case 'M': return bits->M; + case 'N': return bits->N; + case 'O': return bits->O; + case 'P': return bits->P; + case 'Q': return bits->Q; + case 'R': return bits->R; + case 'S': return bits->S; + } + return 0; +} + +static PyMethodDef module_methods[] = { +/* {"get_last_tf_arg_s", get_last_tf_arg_s, METH_NOARGS}, + {"get_last_tf_arg_u", get_last_tf_arg_u, METH_NOARGS}, +*/ + {"func_si", py_func_si, METH_VARARGS}, + {"func", py_func, METH_NOARGS}, + { NULL, NULL, 0, NULL}, +}; + +#define S last_tf_arg_s = (long long)c +#define U last_tf_arg_u = (unsigned long long)c + +EXPORT(signed char) tf_b(signed char c) { S; return c/3; } +EXPORT(unsigned char) tf_B(unsigned char c) { U; return c/3; } +EXPORT(short) tf_h(short c) { S; return c/3; } +EXPORT(unsigned short) tf_H(unsigned short c) { U; return c/3; } +EXPORT(int) tf_i(int c) { S; return c/3; } +EXPORT(unsigned int) tf_I(unsigned int c) { U; return c/3; } +EXPORT(long) tf_l(long c) { S; return c/3; } +EXPORT(unsigned long) tf_L(unsigned long c) { U; return c/3; } +EXPORT(long long) tf_q(long long c) { S; return c/3; } +EXPORT(unsigned long long) tf_Q(unsigned long long c) { U; return c/3; } +EXPORT(float) tf_f(float c) { S; return c/3; } +EXPORT(double) tf_d(double c) { S; return c/3; } +EXPORT(long double) tf_D(long double c) { S; return c/3; } + +#ifdef MS_WIN32 +EXPORT(signed char) __stdcall s_tf_b(signed char c) { S; return c/3; } +EXPORT(unsigned char) __stdcall s_tf_B(unsigned char c) { U; return c/3; } +EXPORT(short) __stdcall s_tf_h(short c) { S; return c/3; } +EXPORT(unsigned short) __stdcall s_tf_H(unsigned short c) { U; return c/3; } +EXPORT(int) __stdcall s_tf_i(int c) { S; return c/3; } +EXPORT(unsigned int) __stdcall s_tf_I(unsigned int c) { U; return c/3; } +EXPORT(long) __stdcall s_tf_l(long c) { S; return c/3; } +EXPORT(unsigned long) __stdcall s_tf_L(unsigned long c) { U; return c/3; } +EXPORT(long long) __stdcall s_tf_q(long long c) { S; return c/3; } +EXPORT(unsigned long long) __stdcall s_tf_Q(unsigned long long c) { U; return c/3; } +EXPORT(float) __stdcall s_tf_f(float c) { S; return c/3; } +EXPORT(double) __stdcall s_tf_d(double c) { S; return c/3; } +EXPORT(long double) __stdcall s_tf_D(long double c) { S; return c/3; } +#endif +/*******/ + +EXPORT(signed char) tf_bb(signed char x, signed char c) { S; return c/3; } +EXPORT(unsigned char) tf_bB(signed char x, unsigned char c) { U; return c/3; } +EXPORT(short) tf_bh(signed char x, short c) { S; return c/3; } +EXPORT(unsigned short) tf_bH(signed char x, unsigned short c) { U; return c/3; } +EXPORT(int) tf_bi(signed char x, int c) { S; return c/3; } +EXPORT(unsigned int) tf_bI(signed char x, unsigned int c) { U; return c/3; } +EXPORT(long) tf_bl(signed char x, long c) { S; return c/3; } +EXPORT(unsigned long) tf_bL(signed char x, unsigned long c) { U; return c/3; } +EXPORT(long long) tf_bq(signed char x, long long c) { S; return c/3; } +EXPORT(unsigned long long) tf_bQ(signed char x, unsigned long long c) { U; return c/3; } +EXPORT(float) tf_bf(signed char x, float c) { S; return c/3; } +EXPORT(double) tf_bd(signed char x, double c) { S; return c/3; } +EXPORT(long double) tf_bD(signed char x, long double c) { S; return c/3; } +EXPORT(void) tv_i(int c) { S; return; } + +#ifdef MS_WIN32 +EXPORT(signed char) __stdcall s_tf_bb(signed char x, signed char c) { S; return c/3; } +EXPORT(unsigned char) __stdcall s_tf_bB(signed char x, unsigned char c) { U; return c/3; } +EXPORT(short) __stdcall s_tf_bh(signed char x, short c) { S; return c/3; } +EXPORT(unsigned short) __stdcall s_tf_bH(signed char x, unsigned short c) { U; return c/3; } +EXPORT(int) __stdcall s_tf_bi(signed char x, int c) { S; return c/3; } +EXPORT(unsigned int) __stdcall s_tf_bI(signed char x, unsigned int c) { U; return c/3; } +EXPORT(long) __stdcall s_tf_bl(signed char x, long c) { S; return c/3; } +EXPORT(unsigned long) __stdcall s_tf_bL(signed char x, unsigned long c) { U; return c/3; } +EXPORT(long long) __stdcall s_tf_bq(signed char x, long long c) { S; return c/3; } +EXPORT(unsigned long long) __stdcall s_tf_bQ(signed char x, unsigned long long c) { U; return c/3; } +EXPORT(float) __stdcall s_tf_bf(signed char x, float c) { S; return c/3; } +EXPORT(double) __stdcall s_tf_bd(signed char x, double c) { S; return c/3; } +EXPORT(long double) __stdcall s_tf_bD(signed char x, long double c) { S; return c/3; } +EXPORT(void) __stdcall s_tv_i(int c) { S; return; } +#endif + +/********/ + +#ifndef MS_WIN32 + +typedef struct { + long x; + long y; +} POINT; + +typedef struct { + long left; + long top; + long right; + long bottom; +} RECT; + +#endif + +EXPORT(int) PointInRect(RECT *prc, POINT pt) +{ + if (pt.x < prc->left) + return 0; + if (pt.x > prc->right) + return 0; + if (pt.y < prc->top) + return 0; + if (pt.y > prc->bottom) + return 0; + return 1; +} + +EXPORT(long left = 10); +EXPORT(long top = 20); +EXPORT(long right = 30); +EXPORT(long bottom = 40); + +EXPORT(RECT) ReturnRect(int i, RECT ar, RECT* br, POINT cp, RECT dr, + RECT *er, POINT fp, RECT gr) +{ + /*Check input */ + if (ar.left + br->left + dr.left + er->left + gr.left != left * 5) + { + ar.left = 100; + return ar; + } + if (ar.right + br->right + dr.right + er->right + gr.right != right * 5) + { + ar.right = 100; + return ar; + } + if (cp.x != fp.x) + { + ar.left = -100; + } + if (cp.y != fp.y) + { + ar.left = -200; + } + switch(i) + { + case 0: + return ar; + break; + case 1: + return dr; + break; + case 2: + return gr; + break; + + } + return ar; +} + +typedef struct { + short x; + short y; +} S2H; + +EXPORT(S2H) ret_2h_func(S2H inp) +{ + inp.x *= 2; + inp.y *= 3; + return inp; +} + +typedef struct { + int a, b, c, d, e, f, g, h; +} S8I; + +EXPORT(S8I) ret_8i_func(S8I inp) +{ + inp.a *= 2; + inp.b *= 3; + inp.c *= 4; + inp.d *= 5; + inp.e *= 6; + inp.f *= 7; + inp.g *= 8; + inp.h *= 9; + return inp; +} + +EXPORT(int) GetRectangle(int flag, RECT *prect) +{ + if (flag == 0) + return 0; + prect->left = (int)flag; + prect->top = (int)flag + 1; + prect->right = (int)flag + 2; + prect->bottom = (int)flag + 3; + return 1; +} + +EXPORT(void) TwoOutArgs(int a, int *pi, int b, int *pj) +{ + *pi += a; + *pj += b; +} + +#ifdef MS_WIN32 +EXPORT(S2H) __stdcall s_ret_2h_func(S2H inp) { return ret_2h_func(inp); } +EXPORT(S8I) __stdcall s_ret_8i_func(S8I inp) { return ret_8i_func(inp); } +#endif + +#ifdef MS_WIN32 +/* Should port this */ +#include +#include + +EXPORT (HRESULT) KeepObject(IUnknown *punk) +{ + static IUnknown *pobj; + if (punk) + punk->lpVtbl->AddRef(punk); + if (pobj) + pobj->lpVtbl->Release(pobj); + pobj = punk; + return S_OK; +} + +#endif + + +static struct PyModuleDef _ctypes_testmodule = { + PyModuleDef_HEAD_INIT, + "_ctypes_test", + NULL, + -1, + module_methods, + NULL, + NULL, + NULL, + NULL +}; + +PyMODINIT_FUNC +PyInit__ctypes_test(void) +{ + return PyModule_Create(&_ctypes_testmodule); +} diff --git a/lib_pypy/_testcapimodule.c b/lib_pypy/_testcapimodule.c new file mode 100644 --- /dev/null +++ b/lib_pypy/_testcapimodule.c @@ -0,0 +1,4649 @@ +/* + * C Extension module to test Python interpreter C APIs. + * + * The 'test_*' functions exported by this module are run as part of the + * standard Python regression test, via Lib/test/test_capi.py. + */ + +#define PY_SSIZE_T_CLEAN + +#include "Python.h" +#include +#include "structmember.h" +#include "datetime.h" +#include "marshal.h" +#include + +#ifdef MS_WINDOWS +# include /* struct timeval */ +#endif + +#ifdef WITH_THREAD +#include "pythread.h" +#endif /* WITH_THREAD */ +static PyObject *TestError; /* set to exception object in init */ + +/* Raise TestError with test_name + ": " + msg, and return NULL. */ + +static PyObject * +raiseTestError(const char* test_name, const char* msg) +{ + PyErr_Format(TestError, "%s: %s", test_name, msg); + return NULL; +} + +/* Test #defines from pyconfig.h (particularly the SIZEOF_* defines). + + The ones derived from autoconf on the UNIX-like OSes can be relied + upon (in the absence of sloppy cross-compiling), but the Windows + platforms have these hardcoded. Better safe than sorry. +*/ +static PyObject* +sizeof_error(const char* fatname, const char* typname, + int expected, int got) +{ + PyErr_Format(TestError, + "%s #define == %d but sizeof(%s) == %d", + fatname, expected, typname, got); + return (PyObject*)NULL; +} + +static PyObject* +test_config(PyObject *self) +{ +#define CHECK_SIZEOF(FATNAME, TYPE) \ + if (FATNAME != sizeof(TYPE)) \ + return sizeof_error(#FATNAME, #TYPE, FATNAME, sizeof(TYPE)) + + CHECK_SIZEOF(SIZEOF_SHORT, short); + CHECK_SIZEOF(SIZEOF_INT, int); + CHECK_SIZEOF(SIZEOF_LONG, long); + CHECK_SIZEOF(SIZEOF_VOID_P, void*); + CHECK_SIZEOF(SIZEOF_TIME_T, time_t); + CHECK_SIZEOF(SIZEOF_LONG_LONG, long long); + +#undef CHECK_SIZEOF + + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject* +test_sizeof_c_types(PyObject *self) +{ +#if defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5))) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wtype-limits" +#endif +#define CHECK_SIZEOF(TYPE, EXPECTED) \ + if (EXPECTED != sizeof(TYPE)) { \ + PyErr_Format(TestError, \ + "sizeof(%s) = %u instead of %u", \ + #TYPE, sizeof(TYPE), EXPECTED); \ + return (PyObject*)NULL; \ + } +#define IS_SIGNED(TYPE) (((TYPE)-1) < (TYPE)0) +#define CHECK_SIGNNESS(TYPE, SIGNED) \ + if (IS_SIGNED(TYPE) != SIGNED) { \ + PyErr_Format(TestError, \ + "%s signness is, instead of %i", \ + #TYPE, IS_SIGNED(TYPE), SIGNED); \ + return (PyObject*)NULL; \ + } + + /* integer types */ + CHECK_SIZEOF(Py_UCS1, 1); + CHECK_SIZEOF(Py_UCS2, 2); + CHECK_SIZEOF(Py_UCS4, 4); + CHECK_SIGNNESS(Py_UCS1, 0); + CHECK_SIGNNESS(Py_UCS2, 0); + CHECK_SIGNNESS(Py_UCS4, 0); + CHECK_SIZEOF(int32_t, 4); + CHECK_SIGNNESS(int32_t, 1); + CHECK_SIZEOF(uint32_t, 4); + CHECK_SIGNNESS(uint32_t, 0); + CHECK_SIZEOF(int64_t, 8); + CHECK_SIGNNESS(int64_t, 1); + CHECK_SIZEOF(uint64_t, 8); + CHECK_SIGNNESS(uint64_t, 0); + + /* pointer/size types */ + CHECK_SIZEOF(size_t, sizeof(void *)); + CHECK_SIGNNESS(size_t, 0); + CHECK_SIZEOF(Py_ssize_t, sizeof(void *)); + CHECK_SIGNNESS(Py_ssize_t, 1); + + CHECK_SIZEOF(uintptr_t, sizeof(void *)); + CHECK_SIGNNESS(uintptr_t, 0); + CHECK_SIZEOF(intptr_t, sizeof(void *)); + CHECK_SIGNNESS(intptr_t, 1); + + Py_INCREF(Py_None); + return Py_None; + +#undef IS_SIGNED +#undef CHECK_SIGNESS +#undef CHECK_SIZEOF +#if defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5))) +#pragma GCC diagnostic pop +#endif +} + + +static PyObject* +test_list_api(PyObject *self) +{ + PyObject* list; + int i; + + /* SF bug 132008: PyList_Reverse segfaults */ +#define NLIST 30 + list = PyList_New(NLIST); + if (list == (PyObject*)NULL) + return (PyObject*)NULL; + /* list = range(NLIST) */ + for (i = 0; i < NLIST; ++i) { + PyObject* anint = PyLong_FromLong(i); + if (anint == (PyObject*)NULL) { + Py_DECREF(list); + return (PyObject*)NULL; + } + PyList_SET_ITEM(list, i, anint); + } + /* list.reverse(), via PyList_Reverse() */ + i = PyList_Reverse(list); /* should not blow up! */ + if (i != 0) { + Py_DECREF(list); + return (PyObject*)NULL; + } + /* Check that list == range(29, -1, -1) now */ + for (i = 0; i < NLIST; ++i) { + PyObject* anint = PyList_GET_ITEM(list, i); + if (PyLong_AS_LONG(anint) != NLIST-1-i) { + PyErr_SetString(TestError, + "test_list_api: reverse screwed up"); + Py_DECREF(list); + return (PyObject*)NULL; + } + } + Py_DECREF(list); +#undef NLIST + + Py_INCREF(Py_None); + return Py_None; +} + +static int +test_dict_inner(int count) +{ + Py_ssize_t pos = 0, iterations = 0; + int i; + PyObject *dict = PyDict_New(); + PyObject *v, *k; + + if (dict == NULL) + return -1; + + for (i = 0; i < count; i++) { + v = PyLong_FromLong(i); + if (v == NULL) { + return -1; + } + if (PyDict_SetItem(dict, v, v) < 0) { + Py_DECREF(v); + return -1; + } + Py_DECREF(v); + } + + while (PyDict_Next(dict, &pos, &k, &v)) { + PyObject *o; + iterations++; + + i = PyLong_AS_LONG(v) + 1; + o = PyLong_FromLong(i); + if (o == NULL) + return -1; + if (PyDict_SetItem(dict, k, o) < 0) { + Py_DECREF(o); + return -1; + } + Py_DECREF(o); + } + + Py_DECREF(dict); + + if (iterations != count) { + PyErr_SetString( + TestError, + "test_dict_iteration: dict iteration went wrong "); + return -1; + } else { + return 0; + } +} + +static PyObject* +test_dict_iteration(PyObject* self) +{ + int i; + + for (i = 0; i < 200; i++) { + if (test_dict_inner(i) < 0) { + return NULL; + } + } + + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject* +dict_getitem_knownhash(PyObject *self, PyObject *args) +{ + PyObject *mp, *key, *result; + Py_ssize_t hash; + + if (!PyArg_ParseTuple(args, "OOn:dict_getitem_knownhash", + &mp, &key, &hash)) { + return NULL; + } + + result = _PyDict_GetItem_KnownHash(mp, key, (Py_hash_t)hash); + if (result == NULL && !PyErr_Occurred()) { + _PyErr_SetKeyError(key); + return NULL; + } + + Py_XINCREF(result); + return result; +} + +static PyObject* +dict_hassplittable(PyObject *self, PyObject *arg) +{ + if (!PyDict_Check(arg)) { + PyErr_Format(PyExc_TypeError, + "dict_hassplittable() argument must be dict, not '%s'", + arg->ob_type->tp_name); + return NULL; + } + + return PyBool_FromLong(_PyDict_HasSplitTable((PyDictObject*)arg)); +} + +/* Issue #4701: Check that PyObject_Hash implicitly calls + * PyType_Ready if it hasn't already been called + */ +static PyTypeObject _HashInheritanceTester_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + "hashinheritancetester", /* Name of this type */ + sizeof(PyObject), /* Basic object size */ + 0, /* Item size for varobject */ + (destructor)PyObject_Del, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT, /* tp_flags */ + 0, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + PyType_GenericNew, /* tp_new */ +}; + +static PyObject* +test_lazy_hash_inheritance(PyObject* self) +{ + PyTypeObject *type; + PyObject *obj; + Py_hash_t hash; + + type = &_HashInheritanceTester_Type; + + if (type->tp_dict != NULL) + /* The type has already been initialized. This probably means + -R is being used. */ + Py_RETURN_NONE; + + + obj = PyObject_New(PyObject, type); + if (obj == NULL) { + PyErr_Clear(); + PyErr_SetString( + TestError, + "test_lazy_hash_inheritance: failed to create object"); + return NULL; + } + + if (type->tp_dict != NULL) { + PyErr_SetString( + TestError, + "test_lazy_hash_inheritance: type initialised too soon"); + Py_DECREF(obj); + return NULL; + } + + hash = PyObject_Hash(obj); + if ((hash == -1) && PyErr_Occurred()) { + PyErr_Clear(); + PyErr_SetString( + TestError, + "test_lazy_hash_inheritance: could not hash object"); + Py_DECREF(obj); + return NULL; + } + + if (type->tp_dict == NULL) { + PyErr_SetString( + TestError, + "test_lazy_hash_inheritance: type not initialised by hash()"); + Py_DECREF(obj); + return NULL; + } + + if (type->tp_hash != PyType_Type.tp_hash) { + PyErr_SetString( + TestError, + "test_lazy_hash_inheritance: unexpected hash function"); + Py_DECREF(obj); + return NULL; + } + + Py_DECREF(obj); + + Py_RETURN_NONE; +} + + +/* Tests of PyLong_{As, From}{Unsigned,}Long(), and + PyLong_{As, From}{Unsigned,}LongLong(). + + Note that the meat of the test is contained in testcapi_long.h. + This is revolting, but delicate code duplication is worse: "almost + exactly the same" code is needed to test long long, but the ubiquitous + dependence on type names makes it impossible to use a parameterized + function. A giant macro would be even worse than this. A C++ template + would be perfect. + + The "report an error" functions are deliberately not part of the #include + file: if the test fails, you can set a breakpoint in the appropriate + error function directly, and crawl back from there in the debugger. +*/ + +#define UNBIND(X) Py_DECREF(X); (X) = NULL + +static PyObject * +raise_test_long_error(const char* msg) +{ + return raiseTestError("test_long_api", msg); +} + +#define TESTNAME test_long_api_inner +#define TYPENAME long +#define F_S_TO_PY PyLong_FromLong +#define F_PY_TO_S PyLong_AsLong +#define F_U_TO_PY PyLong_FromUnsignedLong +#define F_PY_TO_U PyLong_AsUnsignedLong + +#include "testcapi_long.h" + +static PyObject * +test_long_api(PyObject* self) +{ + return TESTNAME(raise_test_long_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + +static PyObject * +raise_test_longlong_error(const char* msg) +{ + return raiseTestError("test_longlong_api", msg); +} + +#define TESTNAME test_longlong_api_inner +#define TYPENAME long long +#define F_S_TO_PY PyLong_FromLongLong +#define F_PY_TO_S PyLong_AsLongLong +#define F_U_TO_PY PyLong_FromUnsignedLongLong +#define F_PY_TO_U PyLong_AsUnsignedLongLong + +#include "testcapi_long.h" + +static PyObject * +test_longlong_api(PyObject* self, PyObject *args) +{ + return TESTNAME(raise_test_longlong_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + +/* Test the PyLong_AsLongAndOverflow API. General conversion to PY_LONG + is tested by test_long_api_inner. This test will concentrate on proper + handling of overflow. +*/ + +static PyObject * +test_long_and_overflow(PyObject *self) +{ + PyObject *num, *one, *temp; + long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than LONG_MAX even on 64-bit platforms */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = LONG_MAX + 1 */ + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_DECREF(num); + num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than LONG_MIN even on 64-bit platforms */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = LONG_MIN - 1 */ + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_DECREF(num); + num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MAX) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MIN) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + Py_INCREF(Py_None); + return Py_None; +} + +/* Test the PyLong_AsLongLongAndOverflow API. General conversion to + long long is tested by test_long_api_inner. This test will + concentrate on proper handling of overflow. +*/ + +static PyObject * +test_long_long_and_overflow(PyObject *self) +{ + PyObject *num, *one, *temp; + long long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than PY_LLONG_MAX on a typical machine. */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = PY_LLONG_MAX + 1 */ + num = PyLong_FromLongLong(PY_LLONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_DECREF(num); + num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than PY_LLONG_MIN on a typical platform */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = PY_LLONG_MIN - 1 */ + num = PyLong_FromLongLong(PY_LLONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_DECREF(num); + num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLongLong(PY_LLONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != PY_LLONG_MAX) + return raiseTestError("test_long_long_and_overflow", + "expected return value PY_LLONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLongLong(PY_LLONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != PY_LLONG_MIN) + return raiseTestError("test_long_long_and_overflow", + "expected return value PY_LLONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + Py_INCREF(Py_None); + return Py_None; +} + +/* Test the PyLong_As{Size,Ssize}_t API. At present this just tests that + non-integer arguments are handled correctly. It should be extended to + test overflow handling. + */ + +static PyObject * +test_long_as_size_t(PyObject *self) +{ + size_t out_u; + Py_ssize_t out_s; + + Py_INCREF(Py_None); + + out_u = PyLong_AsSize_t(Py_None); + if (out_u != (size_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + out_s = PyLong_AsSsize_t(Py_None); + if (out_s != (Py_ssize_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +/* Test the PyLong_AsDouble API. At present this just tests that + non-integer arguments are handled correctly. + */ + +static PyObject * +test_long_as_double(PyObject *self) +{ + double out; + + Py_INCREF(Py_None); + + out = PyLong_AsDouble(Py_None); + if (out != -1.0 || !PyErr_Occurred()) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +/* Test the L code for PyArg_ParseTuple. This should deliver a long long + for both long and int arguments. The test may leak a little memory if + it fails. +*/ +static PyObject * +test_L_code(PyObject *self) +{ + PyObject *tuple, *num; + long long value; + + tuple = PyTuple_New(1); + if (tuple == NULL) + return NULL; + + num = PyLong_FromLong(42); + if (num == NULL) + return NULL; + + PyTuple_SET_ITEM(tuple, 0, num); + + value = -1; + if (PyArg_ParseTuple(tuple, "L:test_L_code", &value) < 0) + return NULL; + if (value != 42) + return raiseTestError("test_L_code", + "L code returned wrong value for long 42"); + + Py_DECREF(num); + num = PyLong_FromLong(42); + if (num == NULL) + return NULL; + + PyTuple_SET_ITEM(tuple, 0, num); + + value = -1; + if (PyArg_ParseTuple(tuple, "L:test_L_code", &value) < 0) + return NULL; + if (value != 42) + return raiseTestError("test_L_code", + "L code returned wrong value for int 42"); + + Py_DECREF(tuple); + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +return_none(void *unused) +{ + Py_RETURN_NONE; +} + +static PyObject * +raise_error(void *unused) +{ + PyErr_SetNone(PyExc_ValueError); + return NULL; +} + +static int +test_buildvalue_N_error(const char *fmt) +{ + PyObject *arg, *res; + + arg = PyList_New(0); + if (arg == NULL) { + return -1; + } + + Py_INCREF(arg); + res = Py_BuildValue(fmt, return_none, NULL, arg); + if (res == NULL) { + return -1; + } + Py_DECREF(res); + if (Py_REFCNT(arg) != 1) { + PyErr_Format(TestError, "test_buildvalue_N: " + "arg was not decrefed in successful " + "Py_BuildValue(\"%s\")", fmt); + return -1; + } + + Py_INCREF(arg); + res = Py_BuildValue(fmt, raise_error, NULL, arg); + if (res != NULL || !PyErr_Occurred()) { + PyErr_Format(TestError, "test_buildvalue_N: " + "Py_BuildValue(\"%s\") didn't complain", fmt); + return -1; + } + PyErr_Clear(); + if (Py_REFCNT(arg) != 1) { + PyErr_Format(TestError, "test_buildvalue_N: " + "arg was not decrefed in failed " + "Py_BuildValue(\"%s\")", fmt); + return -1; + } + Py_DECREF(arg); + return 0; +} + +static PyObject * +test_buildvalue_N(PyObject *self, PyObject *noargs) +{ + PyObject *arg, *res; + + arg = PyList_New(0); + if (arg == NULL) { + return NULL; + } + Py_INCREF(arg); + res = Py_BuildValue("N", arg); + if (res == NULL) { + return NULL; + } + if (res != arg) { + return raiseTestError("test_buildvalue_N", + "Py_BuildValue(\"N\") returned wrong result"); + } + if (Py_REFCNT(arg) != 2) { + return raiseTestError("test_buildvalue_N", + "arg was not decrefed in Py_BuildValue(\"N\")"); + } + Py_DECREF(res); + Py_DECREF(arg); + + if (test_buildvalue_N_error("O&N") < 0) + return NULL; + if (test_buildvalue_N_error("(O&N)") < 0) + return NULL; + if (test_buildvalue_N_error("[O&N]") < 0) + return NULL; + if (test_buildvalue_N_error("{O&N}") < 0) + return NULL; + if (test_buildvalue_N_error("{()O&(())N}") < 0) + return NULL; + + Py_RETURN_NONE; +} + + +static PyObject * +get_args(PyObject *self, PyObject *args) +{ + if (args == NULL) { + args = Py_None; + } + Py_INCREF(args); + return args; +} + +static PyObject * +get_kwargs(PyObject *self, PyObject *args, PyObject *kwargs) +{ + if (kwargs == NULL) { + kwargs = Py_None; + } + Py_INCREF(kwargs); + return kwargs; +} + +/* Test tuple argument processing */ +static PyObject * +getargs_tuple(PyObject *self, PyObject *args) +{ + int a, b, c; + if (!PyArg_ParseTuple(args, "i(ii)", &a, &b, &c)) + return NULL; + return Py_BuildValue("iii", a, b, c); +} + +/* test PyArg_ParseTupleAndKeywords */ +static PyObject * +getargs_keywords(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"arg1","arg2","arg3","arg4","arg5", NULL}; + static const char fmt[] = "(ii)i|(i(ii))(iii)i"; + int int_args[10]={-1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, fmt, keywords, + &int_args[0], &int_args[1], &int_args[2], &int_args[3], &int_args[4], + &int_args[5], &int_args[6], &int_args[7], &int_args[8], &int_args[9])) + return NULL; + return Py_BuildValue("iiiiiiiiii", + int_args[0], int_args[1], int_args[2], int_args[3], int_args[4], + int_args[5], int_args[6], int_args[7], int_args[8], int_args[9]); +} + +/* test PyArg_ParseTupleAndKeywords keyword-only arguments */ +static PyObject * +getargs_keyword_only(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"required", "optional", "keyword_only", NULL}; + int required = -1; + int optional = -1; + int keyword_only = -1; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|i$i", keywords, + &required, &optional, &keyword_only)) + return NULL; + return Py_BuildValue("iii", required, optional, keyword_only); +} + +/* test PyArg_ParseTupleAndKeywords positional-only arguments */ +static PyObject * +getargs_positional_only_and_keywords(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"", "", "keyword", NULL}; + int required = -1; + int optional = -1; + int keyword = -1; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i|ii", keywords, + &required, &optional, &keyword)) + return NULL; + return Py_BuildValue("iii", required, optional, keyword); +} + +/* Functions to call PyArg_ParseTuple with integer format codes, + and return the result. +*/ +static PyObject * +getargs_b(PyObject *self, PyObject *args) +{ + unsigned char value; + if (!PyArg_ParseTuple(args, "b", &value)) + return NULL; + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_B(PyObject *self, PyObject *args) +{ + unsigned char value; + if (!PyArg_ParseTuple(args, "B", &value)) + return NULL; + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_h(PyObject *self, PyObject *args) +{ + short value; + if (!PyArg_ParseTuple(args, "h", &value)) + return NULL; + return PyLong_FromLong((long)value); +} + +static PyObject * +getargs_H(PyObject *self, PyObject *args) +{ + unsigned short value; + if (!PyArg_ParseTuple(args, "H", &value)) + return NULL; + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_I(PyObject *self, PyObject *args) +{ + unsigned int value; + if (!PyArg_ParseTuple(args, "I", &value)) + return NULL; + return PyLong_FromUnsignedLong((unsigned long)value); +} + +static PyObject * +getargs_k(PyObject *self, PyObject *args) +{ + unsigned long value; + if (!PyArg_ParseTuple(args, "k", &value)) + return NULL; + return PyLong_FromUnsignedLong(value); +} + +static PyObject * +getargs_i(PyObject *self, PyObject *args) +{ + int value; + if (!PyArg_ParseTuple(args, "i", &value)) + return NULL; + return PyLong_FromLong((long)value); +} + +static PyObject * +getargs_l(PyObject *self, PyObject *args) +{ + long value; + if (!PyArg_ParseTuple(args, "l", &value)) + return NULL; + return PyLong_FromLong(value); +} + +static PyObject * +getargs_n(PyObject *self, PyObject *args) +{ + Py_ssize_t value; + if (!PyArg_ParseTuple(args, "n", &value)) + return NULL; + return PyLong_FromSsize_t(value); +} + +static PyObject * +getargs_p(PyObject *self, PyObject *args) +{ + int value; + if (!PyArg_ParseTuple(args, "p", &value)) + return NULL; + return PyLong_FromLong(value); +} + +static PyObject * +getargs_L(PyObject *self, PyObject *args) +{ + long long value; From pypy.commits at gmail.com Fri Aug 2 15:36:19 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 02 Aug 2019 12:36:19 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-3.6.9: fix importlib merge Message-ID: <5d4490b3.1c69fb81.8ac23.1fa3@mx.google.com> Author: Ronan Lamy Branch: stdlib-3.6.9 Changeset: r97055:6f617f012a11 Date: 2019-08-02 20:35 +0100 http://bitbucket.org/pypy/pypy/changeset/6f617f012a11/ Log: fix importlib merge diff --git a/lib-python/3/importlib/_bootstrap.py b/lib-python/3/importlib/_bootstrap.py --- a/lib-python/3/importlib/_bootstrap.py +++ b/lib-python/3/importlib/_bootstrap.py @@ -162,16 +162,16 @@ _imp.acquire_lock() try: - try: - lock = _module_locks[name]() - except KeyError: + try: + lock = _module_locks[name]() + except KeyError: lock = None - if lock is None: - if _thread is None: - lock = _DummyModuleLock(name) - else: - lock = _ModuleLock(name) + if lock is None: + if _thread is None: + lock = _DummyModuleLock(name) + else: + lock = _ModuleLock(name) def cb(ref, name=name): _imp.acquire_lock() @@ -180,11 +180,11 @@ # after the previous lock was destroyed # but before the weakref callback was called. if _module_locks.get(name) is ref: - del _module_locks[name] + del _module_locks[name] finally: _imp.release_lock() - _module_locks[name] = _weakref.ref(lock, cb) + _module_locks[name] = _weakref.ref(lock, cb) finally: _imp.release_lock() @@ -968,7 +968,7 @@ with _ModuleLockManager(name): module = sys.modules.get(name, _NEEDS_LOADING) if module is _NEEDS_LOADING: - return _find_and_load_unlocked(name, import_) + return _find_and_load_unlocked(name, import_) if module is None: message = ('import of {} halted; ' @@ -991,7 +991,7 @@ _sanity_check(name, package, level) if level > 0: name = _resolve_name(name, package, level) - return _find_and_load(name, _gcd_import) + return _find_and_load(name, _gcd_import) def _handle_fromlist(module, fromlist, import_, *, recursive=False): From pypy.commits at gmail.com Fri Aug 2 16:16:42 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 02 Aug 2019 13:16:42 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix translation Message-ID: <5d449a2a.1c69fb81.d6304.19aa@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97056:c56ece3c21ce Date: 2019-08-02 20:58 +0100 http://bitbucket.org/pypy/pypy/changeset/c56ece3c21ce/ Log: fix translation diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -59,7 +59,7 @@ 2: '_2BYTE_KIND', 4: '_4BYTE_KIND', } - + def new_empty_unicode(space, length): """ @@ -1089,7 +1089,7 @@ c_length = get_wsize(ref) size = c_length - if copy_null: + if rffi.cast(lltype.Signed, copy_null): size += 1 if not pbuffer: # internal, for PyUnicode_AsUCS4Copy() pbuffer = lltype.malloc(rffi.CArray(Py_UCS4), size, From pypy.commits at gmail.com Fri Aug 2 16:17:47 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 02 Aug 2019 13:17:47 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-3.6.9: hg merge py3.6 Message-ID: <5d449a6b.1c69fb81.16b9e.85ce@mx.google.com> Author: Ronan Lamy Branch: stdlib-3.6.9 Changeset: r97057:e4ecd24cf733 Date: 2019-08-02 21:17 +0100 http://bitbucket.org/pypy/pypy/changeset/e4ecd24cf733/ Log: hg merge py3.6 diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -59,7 +59,7 @@ 2: '_2BYTE_KIND', 4: '_4BYTE_KIND', } - + def new_empty_unicode(space, length): """ @@ -1089,7 +1089,7 @@ c_length = get_wsize(ref) size = c_length - if copy_null: + if rffi.cast(lltype.Signed, copy_null): size += 1 if not pbuffer: # internal, for PyUnicode_AsUCS4Copy() pbuffer = lltype.malloc(rffi.CArray(Py_UCS4), size, From pypy.commits at gmail.com Sat Aug 3 13:21:58 2019 From: pypy.commits at gmail.com (rlamy) Date: Sat, 03 Aug 2019 10:21:58 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-3.6.9: Quick hack to get ssl working again Message-ID: <5d45c2b6.1c69fb81.c4190.ba35@mx.google.com> Author: Ronan Lamy Branch: stdlib-3.6.9 Changeset: r97058:e5a0ccd44fa7 Date: 2019-08-03 18:13 +0100 http://bitbucket.org/pypy/pypy/changeset/e5a0ccd44fa7/ Log: Quick hack to get ssl working again diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -90,6 +90,7 @@ PROTOCOL_TLSv1_2 = 5 PROTOCOL_TLS_CLIENT = 0x10 PROTOCOL_TLS_SERVER = 0x11 +HAS_TLSv1_3 = False # XXX: temporary hack! _PROTOCOL_NAMES = (name for name in dir(lib) if name.startswith('PROTOCOL_')) From pypy.commits at gmail.com Sat Aug 3 13:22:00 2019 From: pypy.commits at gmail.com (rlamy) Date: Sat, 03 Aug 2019 10:22:00 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-3.6.9: Add missing RegrTests Message-ID: <5d45c2b8.1c69fb81.2f7f9.1e03@mx.google.com> Author: Ronan Lamy Branch: stdlib-3.6.9 Changeset: r97059:2d781dffdbc3 Date: 2019-08-03 18:21 +0100 http://bitbucket.org/pypy/pypy/changeset/2d781dffdbc3/ Log: Add missing RegrTests diff --git a/lib-python/conftest.py b/lib-python/conftest.py --- a/lib-python/conftest.py +++ b/lib-python/conftest.py @@ -118,6 +118,7 @@ RegrTest('test_augassign.py', core=True), RegrTest('test_base64.py', usemodules='struct'), RegrTest('test_baseexception.py'), + RegrTest('test_bdb.py'), RegrTest('test_bigaddrspace.py'), RegrTest('test_bigmem.py'), RegrTest('test_binascii.py', usemodules='binascii'), @@ -374,6 +375,7 @@ RegrTest('test_re.py', core=True), RegrTest('test_readline.py'), RegrTest('test_regrtest.py'), + RegrTest('test_repl.py'), RegrTest('test_reprlib.py', core=True), RegrTest('test_resource.py'), RegrTest('test_richcmp.py', core=True), From pypy.commits at gmail.com Sun Aug 4 15:14:04 2019 From: pypy.commits at gmail.com (rlamy) Date: Sun, 04 Aug 2019 12:14:04 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-3.6.9: fix merge Message-ID: <5d472e7c.1c69fb81.58ab2.3293@mx.google.com> Author: Ronan Lamy Branch: stdlib-3.6.9 Changeset: r97060:eee05a6f019e Date: 2019-08-04 20:12 +0100 http://bitbucket.org/pypy/pypy/changeset/eee05a6f019e/ Log: fix merge diff --git a/lib-python/3/pathlib.py b/lib-python/3/pathlib.py --- a/lib-python/3/pathlib.py +++ b/lib-python/3/pathlib.py @@ -330,7 +330,7 @@ target = accessor.readlink(newpath) except OSError as e: if e.errno != EINVAL and strict: - raise + raise # Not a symlink, or non-strict mode. We just leave the path # untouched. path = newpath @@ -1244,18 +1244,18 @@ """ if self._closed: self._raise_closed() - try: - self._accessor.mkdir(self, mode) + try: + self._accessor.mkdir(self, mode) except FileNotFoundError: if not parents or self.parent == self: - raise + raise self.parent.mkdir(parents=True, exist_ok=True) self.mkdir(mode, parents=False, exist_ok=exist_ok) except OSError: # Cannot rely on checking for EEXIST, since the operating system # could give priority to other errors like EACCES or EROFS - if not exist_ok or not self.is_dir(): - raise + if not exist_ok or not self.is_dir(): + raise def chmod(self, mode): """ From pypy.commits at gmail.com Mon Aug 5 11:29:55 2019 From: pypy.commits at gmail.com (rlamy) Date: Mon, 05 Aug 2019 08:29:55 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-3.6.9: Skip CPython-specific test Message-ID: <5d484b73.1c69fb81.19d4e.5732@mx.google.com> Author: Ronan Lamy Branch: stdlib-3.6.9 Changeset: r97061:b85f5377415f Date: 2019-08-05 16:29 +0100 http://bitbucket.org/pypy/pypy/changeset/b85f5377415f/ Log: Skip CPython-specific test diff --git a/lib-python/3/test/test_importlib/test_util.py b/lib-python/3/test/test_importlib/test_util.py --- a/lib-python/3/test/test_importlib/test_util.py +++ b/lib-python/3/test/test_importlib/test_util.py @@ -762,6 +762,7 @@ """ Test release compatibility issues relating to importlib """ + @support.cpython_only @unittest.skipUnless( sys.version_info.releaselevel in ('candidate', 'final'), 'only applies to candidate or final python release levels' From pypy.commits at gmail.com Mon Aug 5 11:36:31 2019 From: pypy.commits at gmail.com (rlamy) Date: Mon, 05 Aug 2019 08:36:31 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-3.6.9: Close branch stdlib-3.6.9 Message-ID: <5d484cff.1c69fb81.f6d50.9ec9@mx.google.com> Author: Ronan Lamy Branch: stdlib-3.6.9 Changeset: r97062:c02cfef04371 Date: 2019-08-05 15:35 +0000 http://bitbucket.org/pypy/pypy/changeset/c02cfef04371/ Log: Close branch stdlib-3.6.9 From pypy.commits at gmail.com Mon Aug 5 11:37:04 2019 From: pypy.commits at gmail.com (rlamy) Date: Mon, 05 Aug 2019 08:37:04 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Merged in stdlib-3.6.9 (pull request #661) Message-ID: <5d484d20.1c69fb81.3c153.398e@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97063:64416f48a678 Date: 2019-08-05 15:35 +0000 http://bitbucket.org/pypy/pypy/changeset/64416f48a678/ Log: Merged in stdlib-3.6.9 (pull request #661) Update stdlib to 3.6.9 diff too long, truncating to 2000 out of 60350 lines diff --git a/lib-python/3/_collections_abc.py b/lib-python/3/_collections_abc.py --- a/lib-python/3/_collections_abc.py +++ b/lib-python/3/_collections_abc.py @@ -901,6 +901,9 @@ def index(self, value, start=0, stop=None): '''S.index(value, [start, [stop]]) -> integer -- return first index of value. Raises ValueError if the value is not present. + + Supporting start and stop arguments is optional, but + recommended. ''' if start is not None and start < 0: start = max(len(self) + start, 0) @@ -910,7 +913,8 @@ i = start while stop is None or i < stop: try: - if self[i] == value: + v = self[i] + if v is value or v == value: return i except IndexError: break @@ -919,7 +923,7 @@ def count(self, value): 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) + return sum(1 for v in self if v is value or v == value) Sequence.register(tuple) Sequence.register(str) diff --git a/lib-python/3/_osx_support.py b/lib-python/3/_osx_support.py --- a/lib-python/3/_osx_support.py +++ b/lib-python/3/_osx_support.py @@ -17,7 +17,7 @@ _UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', - 'PY_CORE_CFLAGS') + 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') # configuration variables that may contain compiler calls _COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') @@ -212,7 +212,7 @@ # Do not alter a config var explicitly overridden by env var if cv in _config_vars and cv not in os.environ: flags = _config_vars[cv] - flags = re.sub(r'-arch\s+\w+\s', ' ', flags, re.ASCII) + flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII) flags = re.sub('-isysroot [^ \t]*', ' ', flags) _save_modified_value(_config_vars, cv, flags) diff --git a/lib-python/3/_pydecimal.py b/lib-python/3/_pydecimal.py --- a/lib-python/3/_pydecimal.py +++ b/lib-python/3/_pydecimal.py @@ -2062,7 +2062,7 @@ if not other and not self: return context._raise_error(InvalidOperation, 'at least one of pow() 1st argument ' - 'and 2nd argument must be nonzero ;' + 'and 2nd argument must be nonzero; ' '0**0 is not defined') # compute sign of result diff --git a/lib-python/3/_pyio.py b/lib-python/3/_pyio.py --- a/lib-python/3/_pyio.py +++ b/lib-python/3/_pyio.py @@ -2064,6 +2064,7 @@ self.buffer.write(b) if self._line_buffering and (haslf or "\r" in s): self.flush() + self._set_decoded_chars('') self._snapshot = None if self._decoder: self._decoder.reset() diff --git a/lib-python/3/_threading_local.py b/lib-python/3/_threading_local.py --- a/lib-python/3/_threading_local.py +++ b/lib-python/3/_threading_local.py @@ -56,11 +56,7 @@ >>> class MyLocal(local): ... number = 2 - ... initialized = False ... def __init__(self, **kw): - ... if self.initialized: - ... raise SystemError('__init__ called too many times') - ... self.initialized = True ... self.__dict__.update(kw) ... def squared(self): ... return self.number ** 2 @@ -97,7 +93,7 @@ >>> thread.start() >>> thread.join() >>> log - [[('color', 'red'), ('initialized', True)], 11] + [[('color', 'red')], 11] without affecting this thread's data: diff --git a/lib-python/3/abc.py b/lib-python/3/abc.py --- a/lib-python/3/abc.py +++ b/lib-python/3/abc.py @@ -129,8 +129,8 @@ # external code. _abc_invalidation_counter = 0 - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) + def __new__(mcls, name, bases, namespace, **kwargs): + cls = super().__new__(mcls, name, bases, namespace, **kwargs) # Compute set of abstract method names abstracts = {name for name, value in namespace.items() @@ -170,9 +170,11 @@ """Debug helper to print the ABC registry.""" print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): + for name in sorted(cls.__dict__): if name.startswith("_abc_"): value = getattr(cls, name) + if isinstance(value, WeakSet): + value = set(value) print("%s: %r" % (name, value), file=file) def __instancecheck__(cls, instance): diff --git a/lib-python/3/aifc.py b/lib-python/3/aifc.py --- a/lib-python/3/aifc.py +++ b/lib-python/3/aifc.py @@ -322,6 +322,7 @@ else: raise Error('not an AIFF or AIFF-C file') self._comm_chunk_read = 0 + self._ssnd_chunk = None while 1: self._ssnd_seek_needed = 1 try: diff --git a/lib-python/3/antigravity.py b/lib-python/3/antigravity.py --- a/lib-python/3/antigravity.py +++ b/lib-python/3/antigravity.py @@ -11,7 +11,7 @@ 37.857713 -122.544543 ''' - # http://xkcd.com/426/ + # https://xkcd.com/426/ h = hashlib.md5(datedow).hexdigest() p, q = [('%f' % float.fromhex('0.' + x)) for x in (h[:16], h[16:32])] print('%d%s %d%s' % (latitude, p[1:], longitude, q[1:])) diff --git a/lib-python/3/argparse.py b/lib-python/3/argparse.py --- a/lib-python/3/argparse.py +++ b/lib-python/3/argparse.py @@ -325,7 +325,11 @@ if len(prefix) + len(usage) > text_width: # break usage into wrappable parts - part_regexp = r'\(.*?\)+|\[.*?\]+|\S+' + part_regexp = ( + r'\(.*?\)+(?=\s|$)|' + r'\[.*?\]+(?=\s|$)|' + r'\S+' + ) opt_usage = format(optionals, groups) pos_usage = format(positionals, groups) opt_parts = _re.findall(part_regexp, opt_usage) diff --git a/lib-python/3/asyncio/base_events.py b/lib-python/3/asyncio/base_events.py --- a/lib-python/3/asyncio/base_events.py +++ b/lib-python/3/asyncio/base_events.py @@ -54,6 +54,11 @@ _FATAL_ERROR_IGNORE = (BrokenPipeError, ConnectionResetError, ConnectionAbortedError) +_HAS_IPv6 = hasattr(socket, 'AF_INET6') + +# Maximum timeout passed to select to avoid OS limitations +MAXIMUM_SELECT_TIMEOUT = 24 * 3600 + def _format_handle(handle): cb = handle._callback @@ -84,18 +89,24 @@ 'SO_REUSEPORT defined but not implemented.') -def _is_stream_socket(sock): - # Linux's socket.type is a bitmask that can include extra info - # about socket, therefore we can't do simple - # `sock_type == socket.SOCK_STREAM`. - return (sock.type & socket.SOCK_STREAM) == socket.SOCK_STREAM +def _is_stream_socket(sock_type): + if hasattr(socket, 'SOCK_NONBLOCK'): + # Linux's socket.type is a bitmask that can include extra info + # about socket (like SOCK_NONBLOCK bit), therefore we can't do simple + # `sock_type == socket.SOCK_STREAM`, see + # https://github.com/torvalds/linux/blob/v4.13/include/linux/net.h#L77 + # for more details. + return (sock_type & 0xF) == socket.SOCK_STREAM + else: + return sock_type == socket.SOCK_STREAM -def _is_dgram_socket(sock): - # Linux's socket.type is a bitmask that can include extra info - # about socket, therefore we can't do simple - # `sock_type == socket.SOCK_DGRAM`. - return (sock.type & socket.SOCK_DGRAM) == socket.SOCK_DGRAM +def _is_dgram_socket(sock_type): + if hasattr(socket, 'SOCK_NONBLOCK'): + # See the comment in `_is_stream_socket`. + return (sock_type & 0xF) == socket.SOCK_DGRAM + else: + return sock_type == socket.SOCK_DGRAM def _ipaddr_info(host, port, family, type, proto): @@ -108,14 +119,9 @@ host is None: return None - if type == socket.SOCK_STREAM: - # Linux only: - # getaddrinfo() can raise when socket.type is a bit mask. - # So if socket.type is a bit mask of SOCK_STREAM, and say - # SOCK_NONBLOCK, we simply return None, which will trigger - # a call to getaddrinfo() letting it process this request. + if _is_stream_socket(type): proto = socket.IPPROTO_TCP - elif type == socket.SOCK_DGRAM: + elif _is_dgram_socket(type): proto = socket.IPPROTO_UDP else: return None @@ -135,7 +141,7 @@ if family == socket.AF_UNSPEC: afs = [socket.AF_INET] - if hasattr(socket, 'AF_INET6'): + if _HAS_IPv6: afs.append(socket.AF_INET6) else: afs = [family] @@ -151,7 +157,10 @@ try: socket.inet_pton(af, host) # The host has already been resolved. - return af, type, proto, '', (host, port) + if _HAS_IPv6 and af == socket.AF_INET6: + return af, type, proto, '', (host, port, 0, 0) + else: + return af, type, proto, '', (host, port) except OSError: pass @@ -173,6 +182,17 @@ proto=proto, flags=flags) +if hasattr(socket, 'TCP_NODELAY'): + def _set_nodelay(sock): + if (sock.family in {socket.AF_INET, socket.AF_INET6} and + _is_stream_socket(sock.type) and + sock.proto == socket.IPPROTO_TCP): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) +else: + def _set_nodelay(sock): + pass + + def _run_until_complete_cb(fut): exc = fut._exception if (isinstance(exc, BaseException) @@ -359,10 +379,7 @@ def _asyncgen_finalizer_hook(self, agen): self._asyncgens.discard(agen) if not self.is_closed(): - self.create_task(agen.aclose()) - # Wake up the loop if the finalizer was called from - # a different thread. - self._write_to_self() + self.call_soon_threadsafe(self.create_task, agen.aclose()) def _asyncgen_firstiter_hook(self, agen): if self._asyncgens_shutdown_called: @@ -459,7 +476,8 @@ # local task. future.exception() raise - future.remove_done_callback(_run_until_complete_cb) + finally: + future.remove_done_callback(_run_until_complete_cb) if not future.done(): raise RuntimeError('Event loop stopped before Future completed.') @@ -788,7 +806,7 @@ if sock is None: raise ValueError( 'host and port was not specified and no sock specified') - if not _is_stream_socket(sock): + if not _is_stream_socket(sock.type): # We allow AF_INET, AF_INET6, AF_UNIX as long as they # are SOCK_STREAM. # We support passing AF_UNIX sockets even though we have @@ -840,7 +858,7 @@ allow_broadcast=None, sock=None): """Create datagram connection.""" if sock is not None: - if not _is_dgram_socket(sock): + if not _is_dgram_socket(sock.type): raise ValueError( 'A UDP Socket was expected, got {!r}'.format(sock)) if (local_addr or remote_addr or @@ -995,7 +1013,6 @@ raise ValueError( 'host/port and sock can not be specified at the same time') - AF_INET6 = getattr(socket, 'AF_INET6', 0) if reuse_address is None: reuse_address = os.name == 'posix' and sys.platform != 'cygwin' sockets = [] @@ -1035,7 +1052,9 @@ # Disable IPv4/IPv6 dual stack support (enabled by # default on Linux) which makes a single socket # listen on both address families. - if af == AF_INET6 and hasattr(socket, 'IPPROTO_IPV6'): + if (_HAS_IPv6 and + af == socket.AF_INET6 and + hasattr(socket, 'IPPROTO_IPV6')): sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, True) @@ -1053,7 +1072,7 @@ else: if sock is None: raise ValueError('Neither host/port nor sock were specified') - if not _is_stream_socket(sock): + if not _is_stream_socket(sock.type): raise ValueError( 'A Stream Socket was expected, got {!r}'.format(sock)) sockets = [sock] @@ -1077,7 +1096,7 @@ This method is a coroutine. When completed, the coroutine returns a (transport, protocol) pair. """ - if not _is_stream_socket(sock): + if not _is_stream_socket(sock.type): raise ValueError( 'A Stream Socket was expected, got {!r}'.format(sock)) @@ -1151,6 +1170,7 @@ if bufsize != 0: raise ValueError("bufsize must be 0") protocol = protocol_factory() + debug_log = None if self._debug: # don't log parameters: they may contain sensitive information # (password) and may be too long @@ -1158,7 +1178,7 @@ self._log_subprocess(debug_log, stdin, stdout, stderr) transport = yield from self._make_subprocess_transport( protocol, cmd, True, stdin, stdout, stderr, bufsize, **kwargs) - if self._debug: + if self._debug and debug_log is not None: logger.info('%s: %r', debug_log, transport) return transport, protocol @@ -1180,6 +1200,7 @@ "a bytes or text string, not %s" % type(arg).__name__) protocol = protocol_factory() + debug_log = None if self._debug: # don't log parameters: they may contain sensitive information # (password) and may be too long @@ -1188,7 +1209,7 @@ transport = yield from self._make_subprocess_transport( protocol, popen_args, False, stdin, stdout, stderr, bufsize, **kwargs) - if self._debug: + if self._debug and debug_log is not None: logger.info('%s: %r', debug_log, transport) return transport, protocol @@ -1221,6 +1242,11 @@ handler is set, and can be called by a custom exception handler that wants to defer to the default behavior. + This default handler logs the error message and other + context-dependent information. In debug mode, a truncated + stack trace is also appended showing where the given object + (e.g. a handle or future or task) was created, if any. + The context parameter has the same meaning as in `call_exception_handler()`. """ @@ -1363,7 +1389,7 @@ elif self._scheduled: # Compute the desired timeout. when = self._scheduled[0]._when - timeout = max(0, when - self.time()) + timeout = min(max(0, when - self.time()), MAXIMUM_SELECT_TIMEOUT) if self._debug and timeout != 0: t0 = self.time() diff --git a/lib-python/3/asyncio/constants.py b/lib-python/3/asyncio/constants.py --- a/lib-python/3/asyncio/constants.py +++ b/lib-python/3/asyncio/constants.py @@ -5,3 +5,8 @@ # Seconds to wait before retrying accept(). ACCEPT_RETRY_DELAY = 1 + +# Number of stack entries to capture in debug mode. +# The large the number, the slower the operation in debug mode +# (see extract_stack() in events.py) +DEBUG_STACK_DEPTH = 10 diff --git a/lib-python/3/asyncio/coroutines.py b/lib-python/3/asyncio/coroutines.py --- a/lib-python/3/asyncio/coroutines.py +++ b/lib-python/3/asyncio/coroutines.py @@ -10,6 +10,7 @@ import types from . import compat +from . import constants from . import events from . import base_futures from .log import logger @@ -91,7 +92,7 @@ assert inspect.isgenerator(gen) or inspect.iscoroutine(gen), gen self.gen = gen self.func = func # Used to unwrap @coroutine decorator - self._source_traceback = traceback.extract_stack(sys._getframe(1)) + self._source_traceback = events.extract_stack(sys._getframe(1)) self.__name__ = getattr(gen, '__name__', None) self.__qualname__ = getattr(gen, '__qualname__', None) @@ -183,8 +184,9 @@ tb = getattr(self, '_source_traceback', ()) if tb: tb = ''.join(traceback.format_list(tb)) - msg += ('\nCoroutine object created at ' - '(most recent call last):\n') + msg += (f'\nCoroutine object created at ' + f'(most recent call last, truncated to ' + f'{constants.DEBUG_STACK_DEPTH} last lines):\n') msg += tb.rstrip() logger.error(msg) @@ -197,7 +199,7 @@ """ if _inspect_iscoroutinefunction(func): # In Python 3.5 that's all we need to do for coroutines - # defiend with "async def". + # defined with "async def". # Wrapping in CoroWrapper will happen via # 'sys.set_coroutine_wrapper' function. return func @@ -308,18 +310,25 @@ if coro_name is None: coro_name = events._format_callback(func, (), {}) - try: + coro_code = None + if hasattr(coro, 'cr_code') and coro.cr_code: + coro_code = coro.cr_code + elif hasattr(coro, 'gi_code') and coro.gi_code: coro_code = coro.gi_code - except AttributeError: - coro_code = coro.cr_code - try: + coro_frame = None + if hasattr(coro, 'cr_frame') and coro.cr_frame: + coro_frame = coro.cr_frame + elif hasattr(coro, 'gi_frame') and coro.gi_frame: coro_frame = coro.gi_frame - except AttributeError: - coro_frame = coro.cr_frame - filename = coro_code.co_filename + filename = '' + if coro_code and coro_code.co_filename: + filename = coro_code.co_filename + lineno = 0 + coro_repr = coro_name + if (isinstance(coro, CoroWrapper) and not inspect.isgeneratorfunction(coro.func) and coro.func is not None): @@ -336,7 +345,7 @@ lineno = coro_frame.f_lineno coro_repr = ('%s running at %s:%s' % (coro_name, filename, lineno)) - else: + elif coro_code: lineno = coro_code.co_firstlineno coro_repr = ('%s done, defined at %s:%s' % (coro_name, filename, lineno)) diff --git a/lib-python/3/asyncio/events.py b/lib-python/3/asyncio/events.py --- a/lib-python/3/asyncio/events.py +++ b/lib-python/3/asyncio/events.py @@ -19,7 +19,8 @@ import threading import traceback -from asyncio import compat +from . import compat +from . import constants def _get_function_source(func): @@ -57,10 +58,10 @@ suffix = _format_args_and_kwargs(args, kwargs) + suffix return _format_callback(func.func, func.args, func.keywords, suffix) - if hasattr(func, '__qualname__'): - func_repr = getattr(func, '__qualname__') - elif hasattr(func, '__name__'): - func_repr = getattr(func, '__name__') + if hasattr(func, '__qualname__') and func.__qualname__: + func_repr = func.__qualname__ + elif hasattr(func, '__name__') and func.__name__: + func_repr = func.__name__ else: func_repr = repr(func) @@ -77,6 +78,23 @@ return func_repr +def extract_stack(f=None, limit=None): + """Replacement for traceback.extract_stack() that only does the + necessary work for asyncio debug mode. + """ + if f is None: + f = sys._getframe().f_back + if limit is None: + # Limit the amount of work to a reasonable amount, as extract_stack() + # can be called for each coroutine and future in debug mode. + limit = constants.DEBUG_STACK_DEPTH + stack = traceback.StackSummary.extract(traceback.walk_stack(f), + limit=limit, + lookup_lines=False) + stack.reverse() + return stack + + class Handle: """Object returned by callback registration methods.""" @@ -90,7 +108,7 @@ self._cancelled = False self._repr = None if self._loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) + self._source_traceback = extract_stack(sys._getframe(1)) else: self._source_traceback = None @@ -611,8 +629,7 @@ # A TLS for the running event loop, used by _get_running_loop. class _RunningLoop(threading.local): - _loop = None - _pid = None + loop_pid = (None, None) _running_loop = _RunningLoop() @@ -624,8 +641,8 @@ This is a low-level function intended to be used by event loops. This function is thread-specific. """ - running_loop = _running_loop._loop - if running_loop is not None and _running_loop._pid == os.getpid(): + running_loop, pid = _running_loop.loop_pid + if running_loop is not None and pid == os.getpid(): return running_loop @@ -635,8 +652,7 @@ This is a low-level function intended to be used by event loops. This function is thread-specific. """ - _running_loop._pid = os.getpid() - _running_loop._loop = loop + _running_loop.loop_pid = (loop, os.getpid()) def _init_event_loop_policy(): diff --git a/lib-python/3/asyncio/futures.py b/lib-python/3/asyncio/futures.py --- a/lib-python/3/asyncio/futures.py +++ b/lib-python/3/asyncio/futures.py @@ -123,11 +123,13 @@ Differences: + - This class is not thread-safe. + - result() and exception() do not take a timeout argument and raise an exception when the future isn't done yet. - Callbacks registered with add_done_callback() are always called - via the event loop's call_soon_threadsafe(). + via the event loop's call_soon(). - This class is not compatible with the wait() and as_completed() methods in the concurrent.futures package. @@ -152,8 +154,7 @@ # `yield Future()` (incorrect). _asyncio_future_blocking = False - _log_traceback = False # Used for Python 3.4 and later - _tb_logger = None # Used for Python 3.3 only + _log_traceback = False def __init__(self, *, loop=None): """Initialize the future. @@ -168,7 +169,7 @@ self._loop = loop self._callbacks = [] if self._loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) + self._source_traceback = events.extract_stack(sys._getframe(1)) _repr_info = base_futures._future_repr_info @@ -202,6 +203,7 @@ change the future's state to cancelled, schedule the callbacks and return True. """ + self._log_traceback = False if self._state != _PENDING: return False self._state = _CANCELLED @@ -248,9 +250,6 @@ if self._state != _FINISHED: raise InvalidStateError('Result is not ready.') self._log_traceback = False - if self._tb_logger is not None: - self._tb_logger.clear() - self._tb_logger = None if self._exception is not None: raise self._exception return self._result @@ -268,9 +267,6 @@ if self._state != _FINISHED: raise InvalidStateError('Exception is not set.') self._log_traceback = False - if self._tb_logger is not None: - self._tb_logger.clear() - self._tb_logger = None return self._exception def add_done_callback(self, fn): @@ -423,6 +419,9 @@ source_loop.call_soon_threadsafe(source.cancel) def _call_set_state(source): + if (destination.cancelled() and + dest_loop is not None and dest_loop.is_closed()): + return if dest_loop is None or dest_loop is source_loop: _set_state(destination, source) else: diff --git a/lib-python/3/asyncio/locks.py b/lib-python/3/asyncio/locks.py --- a/lib-python/3/asyncio/locks.py +++ b/lib-python/3/asyncio/locks.py @@ -172,12 +172,22 @@ fut = self._loop.create_future() self._waiters.append(fut) + + # Finally block should be called before the CancelledError + # handling as we don't want CancelledError to call + # _wake_up_first() and attempt to wake up itself. try: - yield from fut - self._locked = True - return True - finally: - self._waiters.remove(fut) + try: + yield from fut + finally: + self._waiters.remove(fut) + except futures.CancelledError: + if not self._locked: + self._wake_up_first() + raise + + self._locked = True + return True def release(self): """Release a lock. @@ -192,14 +202,23 @@ """ if self._locked: self._locked = False - # Wake up the first waiter who isn't cancelled. - for fut in self._waiters: - if not fut.done(): - fut.set_result(True) - break + self._wake_up_first() else: raise RuntimeError('Lock is not acquired.') + def _wake_up_first(self): + """Wake up the first waiter if it isn't done.""" + try: + fut = next(iter(self._waiters)) + except StopIteration: + return + + # .done() necessarily means that a waiter will wake up later on and + # either take the lock, or, if it was cancelled and lock wasn't + # taken already, will hit this again and wake up a new waiter. + if not fut.done(): + fut.set_result(True) + class Event: """Asynchronous equivalent to threading.Event. @@ -330,12 +349,16 @@ finally: # Must reacquire lock even if wait is cancelled + cancelled = False while True: try: yield from self.acquire() break except futures.CancelledError: - pass + cancelled = True + + if cancelled: + raise futures.CancelledError @coroutine def wait_for(self, predicate): diff --git a/lib-python/3/asyncio/proactor_events.py b/lib-python/3/asyncio/proactor_events.py --- a/lib-python/3/asyncio/proactor_events.py +++ b/lib-python/3/asyncio/proactor_events.py @@ -156,29 +156,29 @@ extra=None, server=None): super().__init__(loop, sock, protocol, waiter, extra, server) self._paused = False + self._reschedule_on_resume = False self._loop.call_soon(self._loop_reading) def pause_reading(self): - if self._closing: - raise RuntimeError('Cannot pause_reading() when closing') - if self._paused: - raise RuntimeError('Already paused') + if self._closing or self._paused: + return self._paused = True if self._loop.get_debug(): logger.debug("%r pauses reading", self) def resume_reading(self): - if not self._paused: - raise RuntimeError('Not paused') + if self._closing or not self._paused: + return self._paused = False - if self._closing: - return - self._loop.call_soon(self._loop_reading, self._read_fut) + if self._reschedule_on_resume: + self._loop.call_soon(self._loop_reading, self._read_fut) + self._reschedule_on_resume = False if self._loop.get_debug(): logger.debug("%r resumes reading", self) def _loop_reading(self, fut=None): if self._paused: + self._reschedule_on_resume = True return data = None @@ -232,8 +232,9 @@ def write(self, data): if not isinstance(data, (bytes, bytearray, memoryview)): - raise TypeError('data argument must be byte-ish (%r)', - type(data)) + msg = ("data argument must be a bytes-like object, not '%s'" % + type(data).__name__) + raise TypeError(msg) if self._eof_written: raise RuntimeError('write_eof() already called') @@ -349,6 +350,11 @@ transports.Transport): """Transport for connected sockets.""" + def __init__(self, loop, sock, protocol, waiter=None, + extra=None, server=None): + super().__init__(loop, sock, protocol, waiter, extra, server) + base_events._set_nodelay(sock) + def _set_extra(self, sock): self._extra['socket'] = sock try: diff --git a/lib-python/3/asyncio/queues.py b/lib-python/3/asyncio/queues.py --- a/lib-python/3/asyncio/queues.py +++ b/lib-python/3/asyncio/queues.py @@ -167,6 +167,12 @@ yield from getter except: getter.cancel() # Just in case getter is not done yet. + + try: + self._getters.remove(getter) + except ValueError: + pass + if not self.empty() and not getter.cancelled(): # We were woken up by put_nowait(), but can't take # the call. Wake up the next in line. diff --git a/lib-python/3/asyncio/selector_events.py b/lib-python/3/asyncio/selector_events.py --- a/lib-python/3/asyncio/selector_events.py +++ b/lib-python/3/asyncio/selector_events.py @@ -40,17 +40,6 @@ return bool(key.events & event) -if hasattr(socket, 'TCP_NODELAY'): - def _set_nodelay(sock): - if (sock.family in {socket.AF_INET, socket.AF_INET6} and - sock.type == socket.SOCK_STREAM and - sock.proto == socket.IPPROTO_TCP): - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) -else: - def _set_nodelay(sock): - pass - - class BaseSelectorEventLoop(base_events.BaseEventLoop): """Selector event loop. @@ -363,25 +352,25 @@ if self._debug and sock.gettimeout() != 0: raise ValueError("the socket must be non-blocking") fut = self.create_future() - self._sock_recv(fut, False, sock, n) + self._sock_recv(fut, None, sock, n) return fut - def _sock_recv(self, fut, registered, sock, n): + def _sock_recv(self, fut, registered_fd, sock, n): # _sock_recv() can add itself as an I/O callback if the operation can't # be done immediately. Don't use it directly, call sock_recv(). - fd = sock.fileno() - if registered: + if registered_fd is not None: # Remove the callback early. It should be rare that the # selector says the fd is ready but the call still returns # EAGAIN, and I am willing to take a hit in that case in # order to simplify the common case. - self.remove_reader(fd) + self.remove_reader(registered_fd) if fut.cancelled(): return try: data = sock.recv(n) except (BlockingIOError, InterruptedError): - self.add_reader(fd, self._sock_recv, fut, True, sock, n) + fd = sock.fileno() + self.add_reader(fd, self._sock_recv, fut, fd, sock, n) except Exception as exc: fut.set_exception(exc) else: @@ -402,16 +391,14 @@ raise ValueError("the socket must be non-blocking") fut = self.create_future() if data: - self._sock_sendall(fut, False, sock, data) + self._sock_sendall(fut, None, sock, data) else: fut.set_result(None) return fut - def _sock_sendall(self, fut, registered, sock, data): - fd = sock.fileno() - - if registered: - self.remove_writer(fd) + def _sock_sendall(self, fut, registered_fd, sock, data): + if registered_fd is not None: + self.remove_writer(registered_fd) if fut.cancelled(): return @@ -428,7 +415,8 @@ else: if n: data = data[n:] - self.add_writer(fd, self._sock_sendall, fut, True, sock, data) + fd = sock.fileno() + self.add_writer(fd, self._sock_sendall, fut, fd, sock, data) @coroutine def sock_connect(self, sock, address): @@ -674,6 +662,12 @@ def get_write_buffer_size(self): return len(self._buffer) + def _add_reader(self, fd, callback, *args): + if self._closing: + return + + self._loop._add_reader(fd, callback, *args) + class _SelectorSocketTransport(_SelectorTransport): @@ -686,11 +680,11 @@ # Disable the Nagle algorithm -- small writes will be # sent without waiting for the TCP ACK. This generally # decreases the latency (in some cases significantly.) - _set_nodelay(self._sock) + base_events._set_nodelay(self._sock) self._loop.call_soon(self._protocol.connection_made, self) # only start reading when connection_made() has been called - self._loop.call_soon(self._loop._add_reader, + self._loop.call_soon(self._add_reader, self._sock_fd, self._read_ready) if waiter is not None: # only wake up the waiter when connection_made() has been called @@ -698,22 +692,18 @@ waiter, None) def pause_reading(self): - if self._closing: - raise RuntimeError('Cannot pause_reading() when closing') - if self._paused: - raise RuntimeError('Already paused') + if self._closing or self._paused: + return self._paused = True self._loop._remove_reader(self._sock_fd) if self._loop.get_debug(): logger.debug("%r pauses reading", self) def resume_reading(self): - if not self._paused: - raise RuntimeError('Not paused') + if self._closing or not self._paused: + return self._paused = False - if self._closing: - return - self._loop._add_reader(self._sock_fd, self._read_ready) + self._add_reader(self._sock_fd, self._read_ready) if self._loop.get_debug(): logger.debug("%r resumes reading", self) @@ -801,7 +791,7 @@ self._sock.shutdown(socket.SHUT_WR) def write_eof(self): - if self._eof: + if self._closing or self._eof: return self._eof = True if not self._buffer: @@ -1053,7 +1043,7 @@ self._address = address self._loop.call_soon(self._protocol.connection_made, self) # only start reading when connection_made() has been called - self._loop.call_soon(self._loop._add_reader, + self._loop.call_soon(self._add_reader, self._sock_fd, self._read_ready) if waiter is not None: # only wake up the waiter when connection_made() has been called diff --git a/lib-python/3/asyncio/sslproto.py b/lib-python/3/asyncio/sslproto.py --- a/lib-python/3/asyncio/sslproto.py +++ b/lib-python/3/asyncio/sslproto.py @@ -294,11 +294,10 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): - def __init__(self, loop, ssl_protocol, app_protocol): + def __init__(self, loop, ssl_protocol): self._loop = loop # SSLProtocol instance self._ssl_protocol = ssl_protocol - self._app_protocol = app_protocol self._closed = False def get_extra_info(self, name, default=None): @@ -306,10 +305,10 @@ return self._ssl_protocol._get_extra_info(name, default) def set_protocol(self, protocol): - self._app_protocol = protocol + self._ssl_protocol._app_protocol = protocol def get_protocol(self): - return self._app_protocol + return self._ssl_protocol._app_protocol def is_closing(self): return self._closed @@ -436,8 +435,7 @@ self._waiter = waiter self._loop = loop self._app_protocol = app_protocol - self._app_transport = _SSLProtocolTransport(self._loop, - self, self._app_protocol) + self._app_transport = _SSLProtocolTransport(self._loop, self) # _SSLPipe instance (None until the connection is made) self._sslpipe = None self._session_established = False @@ -499,6 +497,10 @@ The argument is a bytes object. """ + if self._sslpipe is None: + # transport closing, sslpipe is destroyed + return + try: ssldata, appdata = self._sslpipe.feed_ssldata(data) except ssl.SSLError as e: @@ -543,14 +545,19 @@ def _get_extra_info(self, name, default=None): if name in self._extra: return self._extra[name] + elif self._transport is not None: + return self._transport.get_extra_info(name, default) else: - return self._transport.get_extra_info(name, default) + return default def _start_shutdown(self): if self._in_shutdown: return - self._in_shutdown = True - self._write_appdata(b'') + if self._in_handshake: + self._abort() + else: + self._in_shutdown = True + self._write_appdata(b'') def _write_appdata(self, data): self._write_backlog.append((data, 0)) @@ -567,7 +574,7 @@ # (b'', 1) is a special value in _process_write_backlog() to do # the SSL handshake self._write_backlog.append((b'', 1)) - self._loop.call_soon(self._process_write_backlog) + self._process_write_backlog() def _on_handshake_complete(self, handshake_exc): self._in_handshake = False @@ -623,7 +630,7 @@ def _process_write_backlog(self): # Try to make progress on the write backlog. - if self._transport is None: + if self._transport is None or self._sslpipe is None: return try: @@ -681,12 +688,14 @@ self._transport._force_close(exc) def _finalize(self): + self._sslpipe = None + if self._transport is not None: self._transport.close() def _abort(self): - if self._transport is not None: - try: + try: + if self._transport is not None: self._transport.abort() - finally: - self._finalize() + finally: + self._finalize() diff --git a/lib-python/3/asyncio/streams.py b/lib-python/3/asyncio/streams.py --- a/lib-python/3/asyncio/streams.py +++ b/lib-python/3/asyncio/streams.py @@ -35,6 +35,9 @@ self.partial = partial self.expected = expected + def __reduce__(self): + return type(self), (self.partial, self.expected) + class LimitOverrunError(Exception): """Reached the buffer limit while looking for a separator. @@ -46,6 +49,9 @@ super().__init__(message) self.consumed = consumed + def __reduce__(self): + return type(self), (self.args[0], self.consumed) + @coroutine def open_connection(host=None, port=None, *, diff --git a/lib-python/3/asyncio/tasks.py b/lib-python/3/asyncio/tasks.py --- a/lib-python/3/asyncio/tasks.py +++ b/lib-python/3/asyncio/tasks.py @@ -148,6 +148,7 @@ terminates with a CancelledError exception (even if cancel() was not called). """ + self._log_traceback = False if self.done(): return False if self._fut_waiter is not None: @@ -180,7 +181,12 @@ else: result = coro.throw(exc) except StopIteration as exc: - self.set_result(exc.value) + if self._must_cancel: + # Task is cancelled right before coro stops. + self._must_cancel = False + self.set_exception(futures.CancelledError()) + else: + self.set_result(exc.value) except futures.CancelledError: super().cancel() # I.e., Future.cancel(self). except Exception as exc: @@ -227,7 +233,7 @@ self._step, RuntimeError( 'yield was used instead of yield from for ' - 'generator in task {!r} with {}'.format( + 'generator in task {!r} with {!r}'.format( self, result))) else: # Yielding something else is an error. @@ -517,7 +523,8 @@ elif compat.PY35 and inspect.isawaitable(coro_or_future): return ensure_future(_wrap_awaitable(coro_or_future), loop=loop) else: - raise TypeError('A Future, a coroutine or an awaitable is required') + raise TypeError('An asyncio.Future, a coroutine or an awaitable is ' + 'required') @coroutine @@ -541,6 +548,7 @@ def __init__(self, children, *, loop=None): super().__init__(loop=loop) self._children = children + self._cancel_requested = False def cancel(self): if self.done(): @@ -549,6 +557,11 @@ for child in self._children: if child.cancel(): ret = True + if ret: + # If any child tasks were actually cancelled, we should + # propagate the cancellation request regardless of + # *return_exceptions* argument. See issue 32684. + self._cancel_requested = True return ret @@ -629,7 +642,10 @@ results[i] = res nfinished += 1 if nfinished == nchildren: - outer.set_result(results) + if outer._cancel_requested: + outer.set_exception(futures.CancelledError()) + else: + outer.set_result(results) for i, fut in enumerate(children): fut.add_done_callback(functools.partial(_done_callback, i)) diff --git a/lib-python/3/asyncio/test_utils.py b/lib-python/3/asyncio/test_utils.py --- a/lib-python/3/asyncio/test_utils.py +++ b/lib-python/3/asyncio/test_utils.py @@ -33,6 +33,7 @@ from . import tasks from .coroutines import coroutine from .log import logger +from test import support if sys.platform == 'win32': # pragma: no cover @@ -41,6 +42,21 @@ from socket import socketpair # pragma: no cover +def data_file(filename): + if hasattr(support, 'TEST_HOME_DIR'): + fullname = os.path.join(support.TEST_HOME_DIR, filename) + if os.path.isfile(fullname): + return fullname + fullname = os.path.join(os.path.dirname(os.__file__), 'test', filename) + if os.path.isfile(fullname): + return fullname + raise FileNotFoundError(filename) + + +ONLYCERT = data_file('ssl_cert.pem') +ONLYKEY = data_file('ssl_key.pem') + + def dummy_ssl_context(): if ssl is None: return None @@ -113,12 +129,8 @@ # contains the ssl key and certificate files) differs # between the stdlib and stand-alone asyncio. # Prefer our own if we can find it. - here = os.path.join(os.path.dirname(__file__), '..', 'tests') - if not os.path.isdir(here): - here = os.path.join(os.path.dirname(os.__file__), - 'test', 'test_asyncio') - keyfile = os.path.join(here, 'ssl_key.pem') - certfile = os.path.join(here, 'ssl_cert.pem') + keyfile = ONLYKEY + certfile = ONLYCERT context = ssl.SSLContext() context.load_cert_chain(certfile, keyfile) @@ -334,12 +346,19 @@ return False def assert_reader(self, fd, callback, *args): - assert fd in self.readers, 'fd {} is not registered'.format(fd) + if fd not in self.readers: + raise AssertionError(f'fd {fd} is not registered') handle = self.readers[fd] - assert handle._callback == callback, '{!r} != {!r}'.format( - handle._callback, callback) - assert handle._args == args, '{!r} != {!r}'.format( - handle._args, args) + if handle._callback != callback: + raise AssertionError( + f'unexpected callback: {handle._callback} != {callback}') + if handle._args != args: + raise AssertionError( + f'unexpected callback args: {handle._args} != {args}') + + def assert_no_reader(self, fd): + if fd in self.readers: + raise AssertionError(f'fd {fd} is registered') def _add_writer(self, fd, callback, *args): self.writers[fd] = events.Handle(callback, args, self) @@ -437,12 +456,19 @@ class TestCase(unittest.TestCase): + @staticmethod + def close_loop(loop): + executor = loop._default_executor + if executor is not None: + executor.shutdown(wait=True) + loop.close() + def set_event_loop(self, loop, *, cleanup=True): assert loop is not None # ensure that the event loop is passed explicitly in asyncio events.set_event_loop(None) if cleanup: - self.addCleanup(loop.close) + self.addCleanup(self.close_loop, loop) def new_test_loop(self, gen=None): loop = TestLoop(gen) @@ -455,6 +481,7 @@ def setUp(self): self._get_running_loop = events._get_running_loop events._get_running_loop = lambda: None + self._thread_cleanup = support.threading_setup() def tearDown(self): self.unpatch_get_running_loop() @@ -465,6 +492,10 @@ # in an except block of a generator self.assertEqual(sys.exc_info(), (None, None, None)) + self.doCleanups() + support.threading_cleanup(*self._thread_cleanup) + support.reap_children() + if not compat.PY34: # Python 3.3 compatibility def subTest(self, *args, **kwargs): diff --git a/lib-python/3/asyncio/unix_events.py b/lib-python/3/asyncio/unix_events.py --- a/lib-python/3/asyncio/unix_events.py +++ b/lib-python/3/asyncio/unix_events.py @@ -61,8 +61,17 @@ def close(self): super().close() - for sig in list(self._signal_handlers): - self.remove_signal_handler(sig) + if not sys.is_finalizing(): + for sig in list(self._signal_handlers): + self.remove_signal_handler(sig) + else: + if self._signal_handlers: + warnings.warn(f"Closing the loop {self!r} " + f"on interpreter shutdown " + f"stage, skipping signal handlers removal", + ResourceWarning, + source=self) + self._signal_handlers.clear() def _process_self_data(self, data): for signum in data: @@ -242,7 +251,7 @@ if sock is None: raise ValueError('no path and sock were specified') if (sock.family != socket.AF_UNIX or - not base_events._is_stream_socket(sock)): + not base_events._is_stream_socket(sock.type)): raise ValueError( 'A UNIX Domain Stream Socket was expected, got {!r}' .format(sock)) @@ -297,7 +306,7 @@ 'path was not specified, and no sock specified') if (sock.family != socket.AF_UNIX or - not base_events._is_stream_socket(sock)): + not base_events._is_stream_socket(sock.type)): raise ValueError( 'A UNIX Domain Stream Socket was expected, got {!r}' .format(sock)) diff --git a/lib-python/3/asyncore.py b/lib-python/3/asyncore.py --- a/lib-python/3/asyncore.py +++ b/lib-python/3/asyncore.py @@ -619,8 +619,9 @@ def close(self): if self.fd < 0: return - os.close(self.fd) + fd = self.fd self.fd = -1 + os.close(fd) def fileno(self): return self.fd diff --git a/lib-python/3/base64.py b/lib-python/3/base64.py --- a/lib-python/3/base64.py +++ b/lib-python/3/base64.py @@ -231,23 +231,16 @@ raise binascii.Error('Non-base32 digit found') from None decoded += acc.to_bytes(5, 'big') # Process the last, partial quanta - if padchars: + if l % 8 or padchars not in {0, 1, 3, 4, 6}: + raise binascii.Error('Incorrect padding') + if padchars and decoded: acc <<= 5 * padchars last = acc.to_bytes(5, 'big') - if padchars == 1: - decoded[-5:] = last[:-1] - elif padchars == 3: - decoded[-5:] = last[:-2] - elif padchars == 4: - decoded[-5:] = last[:-3] - elif padchars == 6: - decoded[-5:] = last[:-4] - else: - raise binascii.Error('Incorrect padding') + leftover = (43 - 5 * padchars) // 8 # 1: 4, 3: 3, 4: 2, 6: 1 + decoded[-5:] = last[:leftover] return bytes(decoded) - # RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns # lowercase. The RFC also recommends against accepting input case # insensitively. diff --git a/lib-python/3/bdb.py b/lib-python/3/bdb.py --- a/lib-python/3/bdb.py +++ b/lib-python/3/bdb.py @@ -3,10 +3,13 @@ import fnmatch import sys import os -from inspect import CO_GENERATOR +from inspect import CO_GENERATOR, CO_COROUTINE, CO_ASYNC_GENERATOR __all__ = ["BdbQuit", "Bdb", "Breakpoint"] +GENERATOR_AND_COROUTINE_FLAGS = CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR + + class BdbQuit(Exception): """Exception to give up completely.""" @@ -77,7 +80,7 @@ # No need to trace this function return # None # Ignore call events in generator except when stepping. - if self.stopframe and frame.f_code.co_flags & CO_GENERATOR: + if self.stopframe and frame.f_code.co_flags & GENERATOR_AND_COROUTINE_FLAGS: return self.trace_dispatch self.user_call(frame, arg) if self.quitting: raise BdbQuit @@ -86,7 +89,7 @@ def dispatch_return(self, frame, arg): if self.stop_here(frame) or frame == self.returnframe: # Ignore return events in generator except when stepping. - if self.stopframe and frame.f_code.co_flags & CO_GENERATOR: + if self.stopframe and frame.f_code.co_flags & GENERATOR_AND_COROUTINE_FLAGS: return self.trace_dispatch try: self.frame_returning = frame @@ -104,7 +107,7 @@ # When stepping with next/until/return in a generator frame, skip # the internal StopIteration exception (with no traceback) # triggered by a subiterator run with the 'yield from' statement. - if not (frame.f_code.co_flags & CO_GENERATOR + if not (frame.f_code.co_flags & GENERATOR_AND_COROUTINE_FLAGS and arg[0] is StopIteration and arg[2] is None): self.user_exception(frame, arg) if self.quitting: raise BdbQuit @@ -113,7 +116,7 @@ # next/until command at the last statement in the generator before the # exception. elif (self.stopframe and frame is not self.stopframe - and self.stopframe.f_code.co_flags & CO_GENERATOR + and self.stopframe.f_code.co_flags & GENERATOR_AND_COROUTINE_FLAGS and arg[0] in (StopIteration, GeneratorExit)): self.user_exception(frame, arg) if self.quitting: raise BdbQuit @@ -230,7 +233,7 @@ def set_return(self, frame): """Stop when returning from the given frame.""" - if frame.f_code.co_flags & CO_GENERATOR: + if frame.f_code.co_flags & GENERATOR_AND_COROUTINE_FLAGS: self._set_stopinfo(frame, None, -1) else: self._set_stopinfo(frame.f_back, frame) diff --git a/lib-python/3/cProfile.py b/lib-python/3/cProfile.py --- a/lib-python/3/cProfile.py +++ b/lib-python/3/cProfile.py @@ -25,11 +25,11 @@ # ____________________________________________________________ class Profile(_lsprof.Profiler): - """Profile(custom_timer=None, time_unit=None, subcalls=True, builtins=True) + """Profile(timer=None, timeunit=None, subcalls=True, builtins=True) Builds a profiler object using the specified timer function. The default timer is a fast built-in one based on real time. - For custom timer functions returning integers, time_unit can + For custom timer functions returning integers, timeunit can be a float specifying a scale (i.e. how long each integer unit is, in seconds). """ @@ -121,7 +121,7 @@ # ____________________________________________________________ def main(): - import os, sys + import os, sys, pstats from optparse import OptionParser usage = "cProfile.py [-o output_file_path] [-s sort] scriptfile [arg] ..." parser = OptionParser(usage=usage) @@ -130,7 +130,8 @@ help="Save stats to ", default=None) parser.add_option('-s', '--sort', dest="sort", help="Sort order when printing to stdout, based on pstats.Stats class", - default=-1) + default=-1, + choices=sorted(pstats.Stats.sort_arg_dict_default)) if not sys.argv[1:]: parser.print_usage() diff --git a/lib-python/3/cgi.py b/lib-python/3/cgi.py --- a/lib-python/3/cgi.py +++ b/lib-python/3/cgi.py @@ -404,7 +404,8 @@ """ def __init__(self, fp=None, headers=None, outerboundary=b'', environ=os.environ, keep_blank_values=0, strict_parsing=0, - limit=None, encoding='utf-8', errors='replace'): + limit=None, encoding='utf-8', errors='replace', + max_num_fields=None): """Constructor. Read multipart/* until last part. Arguments, all optional: @@ -444,10 +445,14 @@ for the page sending the form (content-type : meta http-equiv or header) + max_num_fields: int. If set, then __init__ throws a ValueError + if there are more than n fields read by parse_qsl(). + """ method = 'GET' self.keep_blank_values = keep_blank_values self.strict_parsing = strict_parsing + self.max_num_fields = max_num_fields if 'REQUEST_METHOD' in environ: method = environ['REQUEST_METHOD'].upper() self.qs_on_post = None @@ -670,12 +675,11 @@ qs = qs.decode(self.encoding, self.errors) if self.qs_on_post: qs += '&' + self.qs_on_post - self.list = [] query = urllib.parse.parse_qsl( qs, self.keep_blank_values, self.strict_parsing, - encoding=self.encoding, errors=self.errors) - for key, value in query: - self.list.append(MiniFieldStorage(key, value)) + encoding=self.encoding, errors=self.errors, + max_num_fields=self.max_num_fields) + self.list = [MiniFieldStorage(key, value) for key, value in query] self.skip_lines() FieldStorageClass = None @@ -689,9 +693,9 @@ if self.qs_on_post: query = urllib.parse.parse_qsl( self.qs_on_post, self.keep_blank_values, self.strict_parsing, - encoding=self.encoding, errors=self.errors) - for key, value in query: - self.list.append(MiniFieldStorage(key, value)) + encoding=self.encoding, errors=self.errors, + max_num_fields=self.max_num_fields) + self.list.extend(MiniFieldStorage(key, value) for key, value in query) klass = self.FieldStorageClass or self.__class__ first_line = self.fp.readline() # bytes @@ -706,6 +710,11 @@ first_line = self.fp.readline() self.bytes_read += len(first_line) + # Propagate max_num_fields into the sub class appropriately + max_num_fields = self.max_num_fields + if max_num_fields is not None: + max_num_fields -= len(self.list) + while True: parser = FeedParser() hdr_text = b"" @@ -727,7 +736,15 @@ part = klass(self.fp, headers, ib, environ, keep_blank_values, strict_parsing,self.limit-self.bytes_read, - self.encoding, self.errors) + self.encoding, self.errors, max_num_fields) + + if max_num_fields is not None: + max_num_fields -= 1 + if part.list: + max_num_fields -= len(part.list) + if max_num_fields < 0: + raise ValueError('Max number of fields exceeded') + self.bytes_read += part.bytes_read self.list.append(part) if part.done or self.bytes_read >= self.length > 0: diff --git a/lib-python/3/cgitb.py b/lib-python/3/cgitb.py --- a/lib-python/3/cgitb.py +++ b/lib-python/3/cgitb.py @@ -124,7 +124,7 @@ args, varargs, varkw, locals = inspect.getargvalues(frame) call = '' if func != '?': - call = 'in ' + strong(func) + \ + call = 'in ' + strong(pydoc.html.escape(func)) + \ inspect.formatargvalues(args, varargs, varkw, locals, formatvalue=lambda value: '=' + pydoc.html.repr(value)) @@ -282,7 +282,7 @@ if self.display: if plain: - doc = doc.replace('&', '&').replace('<', '<') + doc = pydoc.html.escape(doc) self.file.write('
' + doc + '
\n') else: self.file.write(doc + '\n') diff --git a/lib-python/3/codecs.py b/lib-python/3/codecs.py --- a/lib-python/3/codecs.py +++ b/lib-python/3/codecs.py @@ -479,15 +479,17 @@ self.charbuffer = self._empty_charbuffer.join(self.linebuffer) self.linebuffer = None + if chars < 0: + # For compatibility with other read() methods that take a + # single argument + chars = size + # read until we get the required number of characters (if available) while True: # can the request be satisfied from the character buffer? if chars >= 0: if len(self.charbuffer) >= chars: break - elif size >= 0: - if len(self.charbuffer) >= size: - break # we need more data if size < 0: newdata = self.stream.read() diff --git a/lib-python/3/collections/__init__.py b/lib-python/3/collections/__init__.py --- a/lib-python/3/collections/__init__.py +++ b/lib-python/3/collections/__init__.py @@ -85,9 +85,7 @@ def __init__(*args, **kwds): '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - + regular dictionaries. Keyword argument order is preserved. ''' if not args: raise TypeError("descriptor '__init__' of 'OrderedDict' object " @@ -157,9 +155,9 @@ dict.clear(self) def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. + '''Remove and return a (key, value) pair from the dictionary. + Pairs are returned in LIFO order if last is true or FIFO order if false. - ''' if not self: raise KeyError('dictionary is empty') diff --git a/lib-python/3/compileall.py b/lib-python/3/compileall.py --- a/lib-python/3/compileall.py +++ b/lib-python/3/compileall.py @@ -16,10 +16,6 @@ import py_compile import struct -try: - from concurrent.futures import ProcessPoolExecutor -except ImportError: - ProcessPoolExecutor = None from functools import partial __all__ = ["compile_dir","compile_file","compile_path"] @@ -68,9 +64,17 @@ optimize: optimization level or -1 for level of the interpreter workers: maximum number of parallel workers """ - if workers is not None and workers < 0: - raise ValueError('workers must be greater or equal to 0') - + ProcessPoolExecutor = None + if workers is not None: + if workers < 0: + raise ValueError('workers must be greater or equal to 0') + elif workers != 1: + try: + # Only import when needed, as low resource platforms may + # fail to import it + from concurrent.futures import ProcessPoolExecutor + except ImportError: + workers = 1 files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels, ddir=ddir) success = True diff --git a/lib-python/3/concurrent/futures/_base.py b/lib-python/3/concurrent/futures/_base.py --- a/lib-python/3/concurrent/futures/_base.py +++ b/lib-python/3/concurrent/futures/_base.py @@ -170,6 +170,29 @@ return waiter + +def _yield_finished_futures(fs, waiter, ref_collect): + """ + Iterate on the list *fs*, yielding finished futures one by one in + reverse order. + Before yielding a future, *waiter* is removed from its waiters + and the future is removed from each set in the collection of sets + *ref_collect*. + + The aim of this function is to avoid keeping stale references after + the future is yielded and before the iterator resumes. + """ + while fs: + f = fs[-1] + for futures_set in ref_collect: + futures_set.remove(f) + with f._condition: + f._waiters.remove(waiter) + del f + # Careful not to keep a reference to the popped value + yield fs.pop() + + def as_completed(fs, timeout=None): """An iterator over the given futures that yields each as it completes. @@ -189,28 +212,30 @@ before the given timeout. """ if timeout is not None: - end_time = timeout + time.time() + end_time = timeout + time.monotonic() fs = set(fs) + total_futures = len(fs) with _AcquireFutures(fs): finished = set( f for f in fs if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) pending = fs - finished waiter = _create_and_install_waiters(fs, _AS_COMPLETED) - + finished = list(finished) try: - yield from finished + yield from _yield_finished_futures(finished, waiter, + ref_collect=(fs,)) while pending: if timeout is None: wait_timeout = None else: - wait_timeout = end_time - time.time() + wait_timeout = end_time - time.monotonic() if wait_timeout < 0: raise TimeoutError( '%d (of %d) futures unfinished' % ( - len(pending), len(fs))) + len(pending), total_futures)) waiter.event.wait(wait_timeout) @@ -219,11 +244,13 @@ waiter.finished_futures = [] waiter.event.clear() - for future in finished: - yield future - pending.remove(future) + # reverse to keep finishing order + finished.reverse() + yield from _yield_finished_futures(finished, waiter, + ref_collect=(fs, pending)) finally: + # Remove waiter from unfinished futures for f in fs: with f._condition: f._waiters.remove(waiter) @@ -543,7 +570,7 @@ Exception: If fn(*args) raises for any values. """ if timeout is not None: - end_time = timeout + time.time() + end_time = timeout + time.monotonic() fs = [self.submit(fn, *args) for args in zip(*iterables)] @@ -551,11 +578,14 @@ # before the first iterator value is required. def result_iterator(): try: - for future in fs: + # reverse to keep finishing order + fs.reverse() + while fs: + # Careful not to keep a reference to the popped future if timeout is None: - yield future.result() + yield fs.pop().result() else: - yield future.result(end_time - time.time()) + yield fs.pop().result(end_time - time.monotonic()) finally: for future in fs: future.cancel() diff --git a/lib-python/3/concurrent/futures/process.py b/lib-python/3/concurrent/futures/process.py --- a/lib-python/3/concurrent/futures/process.py +++ b/lib-python/3/concurrent/futures/process.py @@ -357,6 +357,18 @@ raise NotImplementedError(_system_limited) +def _chain_from_iterable_of_lists(iterable): + """ + Specialized implementation of itertools.chain.from_iterable. + Each item in *iterable* should be a list. This function is + careful not to keep references to yielded objects. + """ + for element in iterable: + element.reverse() + while element: + yield element.pop() + + class BrokenProcessPool(RuntimeError): """ Raised when a process in a ProcessPoolExecutor terminated abruptly @@ -482,7 +494,7 @@ results = super().map(partial(_process_chunk, fn), _get_chunks(*iterables, chunksize=chunksize), timeout=timeout) - return itertools.chain.from_iterable(results) + return _chain_from_iterable_of_lists(results) def shutdown(self, wait=True): with self._shutdown_lock: diff --git a/lib-python/3/concurrent/futures/thread.py b/lib-python/3/concurrent/futures/thread.py --- a/lib-python/3/concurrent/futures/thread.py +++ b/lib-python/3/concurrent/futures/thread.py @@ -7,6 +7,7 @@ import atexit from concurrent.futures import _base +import itertools import queue import threading import weakref @@ -53,8 +54,10 @@ try: result = self.fn(*self.args, **self.kwargs) - except BaseException as e: - self.future.set_exception(e) + except BaseException as exc: + self.future.set_exception(exc) + # Break a reference cycle with the exception 'exc' + self = None else: self.future.set_result(result) @@ -81,6 +84,10 @@ _base.LOGGER.critical('Exception in worker', exc_info=True) class ThreadPoolExecutor(_base.Executor): + + # Used to assign unique thread names when thread_name_prefix is not supplied. + _counter = itertools.count().__next__ + def __init__(self, max_workers=None, thread_name_prefix=''): """Initializes a new ThreadPoolExecutor instance. @@ -101,7 +108,8 @@ self._threads = set() self._shutdown = False self._shutdown_lock = threading.Lock() - self._thread_name_prefix = thread_name_prefix + self._thread_name_prefix = (thread_name_prefix or + ("ThreadPoolExecutor-%d" % self._counter())) def submit(self, fn, *args, **kwargs): with self._shutdown_lock: diff --git a/lib-python/3/configparser.py b/lib-python/3/configparser.py --- a/lib-python/3/configparser.py +++ b/lib-python/3/configparser.py @@ -80,7 +80,7 @@ Return list of configuration options for the named section. read(filenames, encoding=None) - Read and parse the list of named configuration files, given by + Read and parse the iterable of named configuration files, given by name. A single filename is also allowed. Non-existing files are ignored. Return list of successfully read files. @@ -677,13 +677,13 @@ return list(opts.keys()) def read(self, filenames, encoding=None): - """Read and parse a filename or a list of filenames. + """Read and parse a filename or an iterable of filenames. Files that cannot be opened are silently ignored; this is - designed so that you can specify a list of potential + designed so that you can specify an iterable of potential configuration file locations (e.g. current directory, user's home directory, systemwide directory), and all existing - configuration files in the list will be read. A single + configuration files in the iterable will be read. A single filename may also be given. Return list of successfully read files. diff --git a/lib-python/3/contextlib.py b/lib-python/3/contextlib.py --- a/lib-python/3/contextlib.py +++ b/lib-python/3/contextlib.py @@ -1,6 +1,7 @@ """Utilities for with-statement contexts. See PEP 343.""" import abc import sys +import _collections_abc from collections import deque from functools import wraps @@ -25,9 +26,7 @@ @classmethod def __subclasshook__(cls, C): if cls is AbstractContextManager: - if (any("__enter__" in B.__dict__ for B in C.__mro__) and - any("__exit__" in B.__dict__ for B in C.__mro__)): - return True + return _collections_abc._check_methods(C, "__enter__", "__exit__") return NotImplemented @@ -88,7 +87,7 @@ try: next(self.gen) except StopIteration: - return + return False else: raise RuntimeError("generator didn't stop") else: @@ -98,7 +97,6 @@ value = type() try: self.gen.throw(type, value, traceback) - raise RuntimeError("generator didn't stop after throw()") except StopIteration as exc: # Suppress StopIteration *unless* it's the same exception that # was passed to throw(). This prevents a StopIteration @@ -111,7 +109,7 @@ # Likewise, avoid suppressing if a StopIteration exception # was passed to throw() and later wrapped into a RuntimeError # (see PEP 479). - if exc.__cause__ is value: + if type is StopIteration and exc.__cause__ is value: return False raise except: @@ -122,8 +120,10 @@ # fixes the impedance mismatch between the throw() protocol # and the __exit__() protocol. # - if sys.exc_info()[1] is not value: - raise + if sys.exc_info()[1] is value: + return False + raise + raise RuntimeError("generator didn't stop after throw()") def contextmanager(func): diff --git a/lib-python/3/copyreg.py b/lib-python/3/copyreg.py --- a/lib-python/3/copyreg.py +++ b/lib-python/3/copyreg.py @@ -128,7 +128,11 @@ continue # mangled names elif name.startswith('__') and not name.endswith('__'): - names.append('_%s%s' % (c.__name__, name)) + stripped = c.__name__.lstrip('_') + if stripped: + names.append('_%s%s' % (stripped, name)) + else: + names.append(name) else: names.append(name) diff --git a/lib-python/3/csv.py b/lib-python/3/csv.py --- a/lib-python/3/csv.py +++ b/lib-python/3/csv.py @@ -217,7 +217,7 @@ matches = [] for restr in (r'(?P[^\w\n"\'])(?P ?)(?P["\']).*?(?P=quote)(?P=delim)', # ,".*?", r'(?:^|\n)(?P["\']).*?(?P=quote)(?P[^\w\n"\'])(?P ?)', # ".*?", - r'(?P>[^\w\n"\'])(?P ?)(?P["\']).*?(?P=quote)(?:$|\n)', # ,".*?" + r'(?P[^\w\n"\'])(?P ?)(?P["\']).*?(?P=quote)(?:$|\n)', # ,".*?" r'(?:^|\n)(?P["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space) regexp = re.compile(restr, re.DOTALL | re.MULTILINE) matches = regexp.findall(data) diff --git a/lib-python/3/ctypes/test/test_anon.py b/lib-python/3/ctypes/test/test_anon.py --- a/lib-python/3/ctypes/test/test_anon.py +++ b/lib-python/3/ctypes/test/test_anon.py @@ -1,4 +1,5 @@ import unittest +import test.support from ctypes import * class AnonTest(unittest.TestCase): @@ -35,6 +36,18 @@ From pypy.commits at gmail.com Mon Aug 5 11:49:31 2019 From: pypy.commits at gmail.com (rlamy) Date: Mon, 05 Aug 2019 08:49:31 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: update whatsnew Message-ID: <5d48500b.1c69fb81.14e56.7011@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97064:02f897916463 Date: 2019-08-05 16:48 +0100 http://bitbucket.org/pypy/pypy/changeset/02f897916463/ Log: update whatsnew diff --git a/pypy/doc/whatsnew-pypy3-head.rst b/pypy/doc/whatsnew-pypy3-head.rst --- a/pypy/doc/whatsnew-pypy3-head.rst +++ b/pypy/doc/whatsnew-pypy3-head.rst @@ -26,3 +26,11 @@ .. branch: Ram-Rachum/faulthandleris_enabled-should-return-fal-1563636614875 .. branch: Anthony-Sottile/fix-leak-of-file-descriptor-with-_iofile-1559687440863 +.. branch: py3tests + +Add handling of application-level test files and -D flag to test runner + +.. branch: vendor/stdlib-3.6 +.. branch: stdlib-3.6.9 + +Update standard library to version 3.6.9 From pypy.commits at gmail.com Mon Aug 5 12:11:23 2019 From: pypy.commits at gmail.com (rlamy) Date: Mon, 05 Aug 2019 09:11:23 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix merge Message-ID: <5d48552b.1c69fb81.59e8f.4f9e@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97065:05f51882e7bd Date: 2019-08-05 17:10 +0100 http://bitbucket.org/pypy/pypy/changeset/05f51882e7bd/ Log: fix merge diff --git a/lib-python/3/test/test_compile.py b/lib-python/3/test/test_compile.py --- a/lib-python/3/test/test_compile.py +++ b/lib-python/3/test/test_compile.py @@ -513,7 +513,7 @@ self.assertInvalidSingle('1\n2') if check_impl_detail(): # it's a single statment in PyPy - self.assertInvalidSingle('def f(): pass') + self.assertInvalidSingle('def f(): pass') self.assertInvalidSingle('a = 13\nb = 187') self.assertInvalidSingle('del x\ndel y') self.assertInvalidSingle('f()\ng()') From pypy.commits at gmail.com Mon Aug 5 12:53:40 2019 From: pypy.commits at gmail.com (rlamy) Date: Mon, 05 Aug 2019 09:53:40 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: convert test_compile.py to new-style apptest Message-ID: <5d485f14.1c69fb81.fc0d4.c1a9@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97067:7c9b554a3d09 Date: 2019-08-05 17:50 +0100 http://bitbucket.org/pypy/pypy/changeset/7c9b554a3d09/ Log: convert test_compile.py to new-style apptest diff --git a/pypy/module/__builtin__/test/test_compile.py b/pypy/module/__builtin__/test/apptest_compile.py copy from pypy/module/__builtin__/test/test_compile.py copy to pypy/module/__builtin__/test/apptest_compile.py --- a/pypy/module/__builtin__/test/test_compile.py +++ b/pypy/module/__builtin__/test/apptest_compile.py @@ -1,161 +1,159 @@ -# coding: utf-8 -class AppTestCompile: +from pytest import raises, skip +import sys - def test_simple(self): - import sys - co = compile('1+2', '?', 'eval') - assert eval(co) == 3 - co = compile(memoryview(b'1+2'), '?', 'eval') - assert eval(co) == 3 - exc = raises(ValueError, compile, chr(0), '?', 'eval') - assert str(exc.value) == "source code string cannot contain null bytes" - compile("from __future__ import with_statement", "", "exec") - raises(SyntaxError, compile, '-', '?', 'eval') - raises(SyntaxError, compile, '"\\xt"', '?', 'eval') - raises(ValueError, compile, '1+2', '?', 'maybenot') - raises(ValueError, compile, "\n", "", "exec", 0xff) - raises(TypeError, compile, '1+2', 12, 34) +def test_simple(): + co = compile('1+2', '?', 'eval') + assert eval(co) == 3 + co = compile(memoryview(b'1+2'), '?', 'eval') + assert eval(co) == 3 + exc = raises(ValueError, compile, chr(0), '?', 'eval') + assert str(exc.value) == "source code string cannot contain null bytes" + compile("from __future__ import with_statement", "", "exec") + raises(SyntaxError, compile, '-', '?', 'eval') + raises(SyntaxError, compile, '"\\xt"', '?', 'eval') + raises(ValueError, compile, '1+2', '?', 'maybenot') + raises(ValueError, compile, "\n", "", "exec", 0xff) + raises(TypeError, compile, '1+2', 12, 34) - def test_error_message(self): - import re - compile('# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') - compile(b'\xef\xbb\xbf\n', 'dummy', 'exec') - compile(b'\xef\xbb\xbf# -*- coding: utf-8 -*-\n', 'dummy', 'exec') - exc = raises(SyntaxError, compile, - b'# -*- coding: fake -*-\n', 'dummy', 'exec') - assert 'fake' in str(exc.value) - exc = raises(SyntaxError, compile, - b'\xef\xbb\xbf# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') - assert 'iso-8859-15' in str(exc.value) - assert 'BOM' in str(exc.value) - exc = raises(SyntaxError, compile, - b'\xef\xbb\xbf# -*- coding: fake -*-\n', 'dummy', 'exec') - assert 'fake' in str(exc.value) - assert 'BOM' in str(exc.value) +def test_error_message(): + compile('# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') + compile(b'\xef\xbb\xbf\n', 'dummy', 'exec') + compile(b'\xef\xbb\xbf# -*- coding: utf-8 -*-\n', 'dummy', 'exec') + exc = raises(SyntaxError, compile, + b'# -*- coding: fake -*-\n', 'dummy', 'exec') + assert 'fake' in str(exc.value) + exc = raises(SyntaxError, compile, + b'\xef\xbb\xbf# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') + assert 'iso-8859-15' in str(exc.value) + assert 'BOM' in str(exc.value) + exc = raises(SyntaxError, compile, + b'\xef\xbb\xbf# -*- coding: fake -*-\n', 'dummy', 'exec') + assert 'fake' in str(exc.value) + assert 'BOM' in str(exc.value) - def test_unicode(self): - try: - compile(u'-', '?', 'eval') - except SyntaxError as e: - assert e.lineno == 1 +def test_unicode(): + try: + compile(u'-', '?', 'eval') + except SyntaxError as e: + assert e.lineno == 1 - def test_incorrect_escape_deprecation_bytes(self): - import warnings - with warnings.catch_warnings(record=True) as l: - warnings.simplefilter('always', category=DeprecationWarning) - compile(r"b'\}'", '', 'exec') - assert len(l) == 1 +def test_incorrect_escape_deprecation_bytes(): + import warnings + with warnings.catch_warnings(record=True) as l: + warnings.simplefilter('always', category=DeprecationWarning) + compile(r"b'\}'", '', 'exec') + assert len(l) == 1 - def test_unicode_encoding(self): - code = "# -*- coding: utf-8 -*-\npass\n" - compile(code, "tmp", "exec") +def test_unicode_encoding(): + code = "# -*- coding: utf-8 -*-\npass\n" + compile(code, "tmp", "exec") - def test_bytes(self): - code = b"# -*- coding: utf-8 -*-\npass\n" - compile(code, "tmp", "exec") - c = compile(b"# coding: latin1\nfoo = 'caf\xe9'\n", "", "exec") - ns = {} - exec(c, ns) - assert ns['foo'] == 'café' - assert eval(b"# coding: latin1\n'caf\xe9'\n") == 'café' +def test_bytes(): + code = b"# -*- coding: utf-8 -*-\npass\n" + compile(code, "tmp", "exec") + c = compile(b"# coding: latin1\nfoo = 'caf\xe9'\n", "", "exec") + ns = {} + exec(c, ns) + assert ns['foo'] == 'café' + assert eval(b"# coding: latin1\n'caf\xe9'\n") == 'café' - def test_memoryview(self): - m = memoryview(b'2 + 1') - co = compile(m, 'baz', 'eval') - assert eval(co) == 3 - assert eval(m) == 3 - ns = {} - exec(memoryview(b'r = 2 + 1'), ns) - assert ns['r'] == 3 +def test_memoryview(): + m = memoryview(b'2 + 1') + co = compile(m, 'baz', 'eval') + assert eval(co) == 3 + assert eval(m) == 3 + ns = {} + exec(memoryview(b'r = 2 + 1'), ns) + assert ns['r'] == 3 - def test_recompile_ast(self): - import _ast - # raise exception when node type doesn't match with compile mode - co1 = compile('print(1)', '', 'exec', _ast.PyCF_ONLY_AST) - raises(TypeError, compile, co1, '', 'eval') - co2 = compile('1+1', '', 'eval', _ast.PyCF_ONLY_AST) - tree = compile(co2, '', 'eval') - assert compile(co2, '', 'eval', _ast.PyCF_ONLY_AST) is co2 +def test_recompile_ast(): + import _ast + # raise exception when node type doesn't match with compile mode + co1 = compile('print(1)', '', 'exec', _ast.PyCF_ONLY_AST) + raises(TypeError, compile, co1, '', 'eval') + co2 = compile('1+1', '', 'eval', _ast.PyCF_ONLY_AST) + tree = compile(co2, '', 'eval') + assert compile(co2, '', 'eval', _ast.PyCF_ONLY_AST) is co2 - def test_leading_newlines(self): - src = """ +def test_leading_newlines(): + src = """ def fn(): pass """ - co = compile(src, 'mymod', 'exec') - firstlineno = co.co_firstlineno - assert firstlineno == 2 + co = compile(src, 'mymod', 'exec') + firstlineno = co.co_firstlineno + assert firstlineno == 2 - def test_null_bytes(self): - raises(ValueError, compile, '\x00', 'mymod', 'exec', 0) - src = "#abc\x00def\n" - raises(ValueError, compile, src, 'mymod', 'exec') - raises(ValueError, compile, src, 'mymod', 'exec', 0) +def test_null_bytes(): + raises(ValueError, compile, '\x00', 'mymod', 'exec', 0) + src = "#abc\x00def\n" + raises(ValueError, compile, src, 'mymod', 'exec') + raises(ValueError, compile, src, 'mymod', 'exec', 0) - def test_null_bytes_flag(self): - try: - from _ast import PyCF_ACCEPT_NULL_BYTES - except ImportError: - skip('PyPy only (requires _ast.PyCF_ACCEPT_NULL_BYTES)') - raises(SyntaxError, compile, '\x00', 'mymod', 'exec', - PyCF_ACCEPT_NULL_BYTES) - src = "#abc\x00def\n" - compile(src, 'mymod', 'exec', PyCF_ACCEPT_NULL_BYTES) # works +def test_null_bytes_flag(): + try: + from _ast import PyCF_ACCEPT_NULL_BYTES + except ImportError: + skip('PyPy only (requires _ast.PyCF_ACCEPT_NULL_BYTES)') + raises(SyntaxError, compile, '\x00', 'mymod', 'exec', + PyCF_ACCEPT_NULL_BYTES) + src = "#abc\x00def\n" + compile(src, 'mymod', 'exec', PyCF_ACCEPT_NULL_BYTES) # works - def test_compile_regression(self): - """Clone of the part of the original test that was failing.""" - import ast +def test_compile_regression(): + """Clone of the part of the original test that was failing.""" + import ast - codestr = '''def f(): - """doc""" - try: - assert False - except AssertionError: - return (True, f.__doc__) - else: - return (False, f.__doc__) - ''' + codestr = '''def f(): + """doc""" + try: + assert False + except AssertionError: + return (True, f.__doc__) + else: + return (False, f.__doc__) + ''' - def f(): """doc""" - values = [(-1, __debug__, f.__doc__), - (0, True, 'doc'), - (1, False, 'doc'), - (2, False, None)] + def f(): """doc""" + values = [(-1, __debug__, f.__doc__), + (0, True, 'doc'), + (1, False, 'doc'), + (2, False, None)] - for optval, debugval, docstring in values: - # test both direct compilation and compilation via AST - codeobjs = [] - codeobjs.append( - compile(codestr, "", "exec", optimize=optval)) - tree = ast.parse(codestr) - codeobjs.append(compile(tree, "", "exec", optimize=optval)) + for optval, debugval, docstring in values: + # test both direct compilation and compilation via AST + codeobjs = [] + codeobjs.append( + compile(codestr, "", "exec", optimize=optval)) + tree = ast.parse(codestr) + codeobjs.append(compile(tree, "", "exec", optimize=optval)) - for i, code in enumerate(codeobjs): - print(optval, debugval, docstring, i) - ns = {} - exec(code, ns) - rv = ns['f']() - assert rv == (debugval, docstring) + for i, code in enumerate(codeobjs): + print(optval, debugval, docstring, i) + ns = {} + exec(code, ns) + rv = ns['f']() + assert rv == (debugval, docstring) - def test_assert_remove(self): - """Test removal of the asserts with optimize=1.""" - import ast +def test_assert_remove(): + """Test removal of the asserts with optimize=1.""" + import ast - code = """def f(): - assert False - """ - tree = ast.parse(code) - for to_compile in [code, tree]: - compiled = compile(to_compile, "", "exec", optimize=1) - ns = {} - exec(compiled, ns) - ns['f']() + code = """def f(): + assert False + """ + tree = ast.parse(code) + for to_compile in [code, tree]: + compiled = compile(to_compile, "", "exec", optimize=1) + ns = {} + exec(compiled, ns) + ns['f']() - def test_docstring_remove(self): - """Test removal of docstrings with optimize=2.""" - import ast - import marshal +def test_docstring_remove(): + """Test removal of docstrings with optimize=2.""" + import ast + import marshal - code = """ + code = """ 'module_doc' def f(): @@ -164,59 +162,19 @@ class C: 'class_doc' """ - tree = ast.parse(code) - for to_compile in [code, tree]: - compiled = compile(to_compile, "", "exec", optimize=2) + tree = ast.parse(code) + for to_compile in [code, tree]: + compiled = compile(to_compile, "", "exec", optimize=2) - ns = {} - exec(compiled, ns) - assert '__doc__' not in ns - assert ns['f'].__doc__ is None - assert ns['C'].__doc__ is None + ns = {} + exec(compiled, ns) + assert '__doc__' not in ns + assert ns['f'].__doc__ is None + assert ns['C'].__doc__ is None - # Check that the docstrings are gone from the bytecode and not just - # inaccessible. - marshalled = str(marshal.dumps(compiled)) - assert 'module_doc' not in marshalled - assert 'func_doc' not in marshalled - assert 'class_doc' not in marshalled - - -class TestOptimizeO: - """Test interaction of -O flag and optimize parameter of compile.""" - - def setup_method(self, method): - space = self.space - self._sys_debug = space.sys.debug - # imitate -O - space.sys.debug = False - - def teardown_method(self, method): - self.space.sys.debug = self._sys_debug - - def test_O_optmize_0(self): - """Test that assert is not ignored if -O flag is set but optimize=0.""" - space = self.space - w_res = space.appexec([], """(): - assert False # check that our -O imitation hack works - try: - exec(compile('assert False', '', 'exec', optimize=0)) - except AssertionError: - return True - else: - return False - """) - assert space.unwrap(w_res) - - def test_O_optimize__1(self): - """Test that assert is ignored with -O and optimize=-1.""" - space = self.space - space.appexec([], """(): - exec(compile('assert False', '', 'exec', optimize=-1)) - """) - - -# TODO: Check the value of __debug__ inside of the compiled block! -# According to the documentation, it should follow the optimize flag. -# However, cpython3.5.0a0 behaves the same way as PyPy (__debug__ follows -# -O, -OO flags of the interpreter). + # Check that the docstrings are gone from the bytecode and not just + # inaccessible. + marshalled = str(marshal.dumps(compiled)) + assert 'module_doc' not in marshalled + assert 'func_doc' not in marshalled + assert 'class_doc' not in marshalled diff --git a/pypy/module/__builtin__/test/test_compile.py b/pypy/module/__builtin__/test/test_compile.py --- a/pypy/module/__builtin__/test/test_compile.py +++ b/pypy/module/__builtin__/test/test_compile.py @@ -1,187 +1,3 @@ -# coding: utf-8 -class AppTestCompile: - - def test_simple(self): - import sys - co = compile('1+2', '?', 'eval') - assert eval(co) == 3 - co = compile(memoryview(b'1+2'), '?', 'eval') - assert eval(co) == 3 - exc = raises(ValueError, compile, chr(0), '?', 'eval') - assert str(exc.value) == "source code string cannot contain null bytes" - compile("from __future__ import with_statement", "", "exec") - raises(SyntaxError, compile, '-', '?', 'eval') - raises(SyntaxError, compile, '"\\xt"', '?', 'eval') - raises(ValueError, compile, '1+2', '?', 'maybenot') - raises(ValueError, compile, "\n", "", "exec", 0xff) - raises(TypeError, compile, '1+2', 12, 34) - - def test_error_message(self): - import re - compile('# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') - compile(b'\xef\xbb\xbf\n', 'dummy', 'exec') - compile(b'\xef\xbb\xbf# -*- coding: utf-8 -*-\n', 'dummy', 'exec') - exc = raises(SyntaxError, compile, - b'# -*- coding: fake -*-\n', 'dummy', 'exec') - assert 'fake' in str(exc.value) - exc = raises(SyntaxError, compile, - b'\xef\xbb\xbf# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') - assert 'iso-8859-15' in str(exc.value) - assert 'BOM' in str(exc.value) - exc = raises(SyntaxError, compile, - b'\xef\xbb\xbf# -*- coding: fake -*-\n', 'dummy', 'exec') - assert 'fake' in str(exc.value) - assert 'BOM' in str(exc.value) - - def test_unicode(self): - try: - compile(u'-', '?', 'eval') - except SyntaxError as e: - assert e.lineno == 1 - - def test_incorrect_escape_deprecation_bytes(self): - import warnings - with warnings.catch_warnings(record=True) as l: - warnings.simplefilter('always', category=DeprecationWarning) - compile(r"b'\}'", '', 'exec') - assert len(l) == 1 - - def test_unicode_encoding(self): - code = "# -*- coding: utf-8 -*-\npass\n" - compile(code, "tmp", "exec") - - def test_bytes(self): - code = b"# -*- coding: utf-8 -*-\npass\n" - compile(code, "tmp", "exec") - c = compile(b"# coding: latin1\nfoo = 'caf\xe9'\n", "", "exec") - ns = {} - exec(c, ns) - assert ns['foo'] == 'café' - assert eval(b"# coding: latin1\n'caf\xe9'\n") == 'café' - - def test_memoryview(self): - m = memoryview(b'2 + 1') - co = compile(m, 'baz', 'eval') - assert eval(co) == 3 - assert eval(m) == 3 - ns = {} - exec(memoryview(b'r = 2 + 1'), ns) - assert ns['r'] == 3 - - def test_recompile_ast(self): - import _ast - # raise exception when node type doesn't match with compile mode - co1 = compile('print(1)', '', 'exec', _ast.PyCF_ONLY_AST) - raises(TypeError, compile, co1, '', 'eval') - co2 = compile('1+1', '', 'eval', _ast.PyCF_ONLY_AST) - tree = compile(co2, '', 'eval') - assert compile(co2, '', 'eval', _ast.PyCF_ONLY_AST) is co2 - - def test_leading_newlines(self): - src = """ -def fn(): pass -""" - co = compile(src, 'mymod', 'exec') - firstlineno = co.co_firstlineno - assert firstlineno == 2 - - def test_null_bytes(self): - raises(ValueError, compile, '\x00', 'mymod', 'exec', 0) - src = "#abc\x00def\n" - raises(ValueError, compile, src, 'mymod', 'exec') - raises(ValueError, compile, src, 'mymod', 'exec', 0) - - def test_null_bytes_flag(self): - try: - from _ast import PyCF_ACCEPT_NULL_BYTES - except ImportError: - skip('PyPy only (requires _ast.PyCF_ACCEPT_NULL_BYTES)') - raises(SyntaxError, compile, '\x00', 'mymod', 'exec', - PyCF_ACCEPT_NULL_BYTES) - src = "#abc\x00def\n" - compile(src, 'mymod', 'exec', PyCF_ACCEPT_NULL_BYTES) # works - - def test_compile_regression(self): - """Clone of the part of the original test that was failing.""" - import ast - - codestr = '''def f(): - """doc""" - try: - assert False - except AssertionError: - return (True, f.__doc__) - else: - return (False, f.__doc__) - ''' - - def f(): """doc""" - values = [(-1, __debug__, f.__doc__), - (0, True, 'doc'), - (1, False, 'doc'), - (2, False, None)] - - for optval, debugval, docstring in values: - # test both direct compilation and compilation via AST - codeobjs = [] - codeobjs.append( - compile(codestr, "", "exec", optimize=optval)) - tree = ast.parse(codestr) - codeobjs.append(compile(tree, "", "exec", optimize=optval)) - - for i, code in enumerate(codeobjs): - print(optval, debugval, docstring, i) - ns = {} - exec(code, ns) - rv = ns['f']() - assert rv == (debugval, docstring) - - def test_assert_remove(self): - """Test removal of the asserts with optimize=1.""" - import ast - - code = """def f(): - assert False - """ - tree = ast.parse(code) - for to_compile in [code, tree]: - compiled = compile(to_compile, "", "exec", optimize=1) - ns = {} - exec(compiled, ns) - ns['f']() - - def test_docstring_remove(self): - """Test removal of docstrings with optimize=2.""" - import ast - import marshal - - code = """ -'module_doc' - -def f(): - 'func_doc' - -class C: - 'class_doc' -""" - tree = ast.parse(code) - for to_compile in [code, tree]: - compiled = compile(to_compile, "", "exec", optimize=2) - - ns = {} - exec(compiled, ns) - assert '__doc__' not in ns - assert ns['f'].__doc__ is None - assert ns['C'].__doc__ is None - - # Check that the docstrings are gone from the bytecode and not just - # inaccessible. - marshalled = str(marshal.dumps(compiled)) - assert 'module_doc' not in marshalled - assert 'func_doc' not in marshalled - assert 'class_doc' not in marshalled - - class TestOptimizeO: """Test interaction of -O flag and optimize parameter of compile.""" From pypy.commits at gmail.com Tue Aug 6 08:18:18 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 06 Aug 2019 05:18:18 -0700 (PDT) Subject: [pypy-commit] pypy default: Issue #3050 Message-ID: <5d49700a.1c69fb81.324d6.96d6@mx.google.com> Author: Armin Rigo Branch: Changeset: r97068:6e42888fc78a Date: 2019-08-06 14:17 +0200 http://bitbucket.org/pypy/pypy/changeset/6e42888fc78a/ Log: Issue #3050 subprocess: accept arguments that are not directly subscriptable (like iterators) diff --git a/lib-python/2.7/subprocess.py b/lib-python/2.7/subprocess.py --- a/lib-python/2.7/subprocess.py +++ b/lib-python/2.7/subprocess.py @@ -337,7 +337,7 @@ # --- PyPy hack, see _pypy_install_libs_after_virtualenv() --- # match arguments passed by different versions of virtualenv - if args[1:] in ( + if isinstance(args, (list, tuple)) and args[1:] in ( ['-c', 'import sys; print(sys.prefix)'], # 1.6 10ba3f3c ['-c', "\nimport sys\nprefix = sys.prefix\n" # 1.7 0e9342ce "if sys.version_info[0] == 3:\n" From pypy.commits at gmail.com Tue Aug 6 08:18:20 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 06 Aug 2019 05:18:20 -0700 (PDT) Subject: [pypy-commit] pypy default: merge heads Message-ID: <5d49700c.1c69fb81.02bd.6070@mx.google.com> Author: Armin Rigo Branch: Changeset: r97069:6b6a9bdd8658 Date: 2019-08-06 14:17 +0200 http://bitbucket.org/pypy/pypy/changeset/6b6a9bdd8658/ Log: merge heads diff --git a/pypy/module/__builtin__/test/test_compile.py b/pypy/module/__builtin__/test/apptest_compile.py rename from pypy/module/__builtin__/test/test_compile.py rename to pypy/module/__builtin__/test/apptest_compile.py --- a/pypy/module/__builtin__/test/test_compile.py +++ b/pypy/module/__builtin__/test/apptest_compile.py @@ -1,81 +1,82 @@ -class AppTestCompile: - def test_simple(self): - import sys - co = compile('1+2', '?', 'eval') +from pytest import raises +import sys + +def test_simple(): + co = compile('1+2', '?', 'eval') + assert eval(co) == 3 + co = compile(buffer('1+2'), '?', 'eval') + assert eval(co) == 3 + exc = raises(TypeError, compile, chr(0), '?', 'eval') + assert str(exc.value) == "compile() expected string without null bytes" + exc = raises(TypeError, compile, unichr(0), '?', 'eval') + assert str(exc.value) == "compile() expected string without null bytes" + + if '__pypy__' in sys.modules: + co = compile(memoryview('1+2'), '?', 'eval') assert eval(co) == 3 - co = compile(buffer('1+2'), '?', 'eval') - assert eval(co) == 3 - exc = raises(TypeError, compile, chr(0), '?', 'eval') - assert str(exc.value) == "compile() expected string without null bytes" - exc = raises(TypeError, compile, unichr(0), '?', 'eval') - assert str(exc.value) == "compile() expected string without null bytes" + compile("from __future__ import with_statement", "", "exec") + raises(SyntaxError, compile, '-', '?', 'eval') + raises(ValueError, compile, '"\\xt"', '?', 'eval') + raises(ValueError, compile, '1+2', '?', 'maybenot') + raises(ValueError, compile, "\n", "", "exec", 0xff) + raises(TypeError, compile, '1+2', 12, 34) - if '__pypy__' in sys.modules: - co = compile(memoryview('1+2'), '?', 'eval') - assert eval(co) == 3 - compile("from __future__ import with_statement", "", "exec") - raises(SyntaxError, compile, '-', '?', 'eval') - raises(ValueError, compile, '"\\xt"', '?', 'eval') - raises(ValueError, compile, '1+2', '?', 'maybenot') - raises(ValueError, compile, "\n", "", "exec", 0xff) - raises(TypeError, compile, '1+2', 12, 34) +def test_error_message(): + import re + compile('# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') + compile(b'\xef\xbb\xbf\n', 'dummy', 'exec') + compile(b'\xef\xbb\xbf# -*- coding: utf-8 -*-\n', 'dummy', 'exec') + exc = raises(SyntaxError, compile, + b'# -*- coding: fake -*-\n', 'dummy', 'exec') + assert 'fake' in str(exc.value) + exc = raises(SyntaxError, compile, + b'\xef\xbb\xbf# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') + assert 'iso-8859-15' in str(exc.value) + assert 'BOM' in str(exc.value) + exc = raises(SyntaxError, compile, + b'\xef\xbb\xbf# -*- coding: fake -*-\n', 'dummy', 'exec') + assert 'fake' in str(exc.value) + assert 'BOM' in str(exc.value) - def test_error_message(self): - import re - compile('# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') - compile(b'\xef\xbb\xbf\n', 'dummy', 'exec') - compile(b'\xef\xbb\xbf# -*- coding: utf-8 -*-\n', 'dummy', 'exec') - exc = raises(SyntaxError, compile, - b'# -*- coding: fake -*-\n', 'dummy', 'exec') - assert 'fake' in str(exc.value) - exc = raises(SyntaxError, compile, - b'\xef\xbb\xbf# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec') - assert 'iso-8859-15' in str(exc.value) - assert 'BOM' in str(exc.value) - exc = raises(SyntaxError, compile, - b'\xef\xbb\xbf# -*- coding: fake -*-\n', 'dummy', 'exec') - assert 'fake' in str(exc.value) - assert 'BOM' in str(exc.value) +def test_unicode(): + try: + compile(u'-', '?', 'eval') + except SyntaxError as e: + assert e.lineno == 1 - def test_unicode(self): - try: - compile(u'-', '?', 'eval') - except SyntaxError as e: - assert e.lineno == 1 +def test_unicode_encoding(): + code = u"# -*- coding: utf-8 -*-\npass\n" + raises(SyntaxError, compile, code, "tmp", "exec") - def test_unicode_encoding(self): - code = u"# -*- coding: utf-8 -*-\npass\n" - raises(SyntaxError, compile, code, "tmp", "exec") +def test_recompile_ast(): + import _ast + # raise exception when node type doesn't match with compile mode + co1 = compile('print 1', '', 'exec', _ast.PyCF_ONLY_AST) + raises(TypeError, compile, co1, '', 'eval') + co2 = compile('1+1', '', 'eval', _ast.PyCF_ONLY_AST) + tree = compile(co2, '', 'eval') + assert compile(co2, '', 'eval', _ast.PyCF_ONLY_AST) is co2 - def test_recompile_ast(self): - import _ast - # raise exception when node type doesn't match with compile mode - co1 = compile('print 1', '', 'exec', _ast.PyCF_ONLY_AST) - raises(TypeError, compile, co1, '', 'eval') - co2 = compile('1+1', '', 'eval', _ast.PyCF_ONLY_AST) - tree = compile(co2, '', 'eval') - assert compile(co2, '', 'eval', _ast.PyCF_ONLY_AST) is co2 - - def test_leading_newlines(self): - src = """ +def test_leading_newlines(): + src = """ def fn(): pass """ - co = compile(src, 'mymod', 'exec') - firstlineno = co.co_firstlineno - assert firstlineno == 2 + co = compile(src, 'mymod', 'exec') + firstlineno = co.co_firstlineno + assert firstlineno == 2 - def test_null_bytes(self): - raises(TypeError, compile, '\x00', 'mymod', 'exec', 0) - src = "#abc\x00def\n" - raises(TypeError, compile, src, 'mymod', 'exec') - raises(TypeError, compile, src, 'mymod', 'exec', 0) +def test_null_bytes(): + raises(TypeError, compile, '\x00', 'mymod', 'exec', 0) + src = "#abc\x00def\n" + raises(TypeError, compile, src, 'mymod', 'exec') + raises(TypeError, compile, src, 'mymod', 'exec', 0) - def test_null_bytes_flag(self): - try: - from _ast import PyCF_ACCEPT_NULL_BYTES - except ImportError: - skip('PyPy only (requires _ast.PyCF_ACCEPT_NULL_BYTES)') - raises(SyntaxError, compile, '\x00', 'mymod', 'exec', - PyCF_ACCEPT_NULL_BYTES) - src = "#abc\x00def\n" - compile(src, 'mymod', 'exec', PyCF_ACCEPT_NULL_BYTES) # works +def test_null_bytes_flag(): + try: + from _ast import PyCF_ACCEPT_NULL_BYTES + except ImportError: + skip('PyPy only (requires _ast.PyCF_ACCEPT_NULL_BYTES)') + raises(SyntaxError, compile, '\x00', 'mymod', 'exec', + PyCF_ACCEPT_NULL_BYTES) + src = "#abc\x00def\n" + compile(src, 'mymod', 'exec', PyCF_ACCEPT_NULL_BYTES) # works From pypy.commits at gmail.com Tue Aug 6 08:31:22 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 06 Aug 2019 05:31:22 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: A branch to revive sandboxing for Anvil, with a new idea that should move Message-ID: <5d49731a.1c69fb81.8ac23.0238@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97070:7658f5db8ad6 Date: 2019-08-06 14:30 +0200 http://bitbucket.org/pypy/pypy/changeset/7658f5db8ad6/ Log: A branch to revive sandboxing for Anvil, with a new idea that should move to burden of keeping it up-to-date out of the pypy-c From pypy.commits at gmail.com Tue Aug 6 09:42:05 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 06 Aug 2019 06:42:05 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Remove support for the outdated register_external(sandboxsafe=False) Message-ID: <5d4983ad.1c69fb81.4e1ae.c08b@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97071:2b1d31cf3e3e Date: 2019-08-06 15:34 +0200 http://bitbucket.org/pypy/pypy/changeset/2b1d31cf3e3e/ Log: Remove support for the outdated register_external(sandboxsafe=False) diff --git a/rpython/annotator/policy.py b/rpython/annotator/policy.py --- a/rpython/annotator/policy.py +++ b/rpython/annotator/policy.py @@ -72,29 +72,3 @@ for callback in bk.pending_specializations: callback() del bk.pending_specializations[:] - if annotator.added_blocks is not None: - all_blocks = annotator.added_blocks - else: - all_blocks = annotator.annotated - for block in list(all_blocks): - for i, instr in enumerate(block.operations): - if not isinstance(instr, (op.simple_call, op.call_args)): - continue - v_func = instr.args[0] - s_func = annotator.annotation(v_func) - if not hasattr(s_func, 'needs_sandboxing'): - continue - key = ('sandboxing', s_func.const) - if key not in bk.emulated_pbc_calls: - params_s = s_func.args_s - s_result = s_func.s_result - from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline - sandbox_trampoline = make_sandbox_trampoline( - s_func.name, params_s, s_result) - sandbox_trampoline._signature_ = [SomeTuple(items=params_s)], s_result - bk.emulate_pbc_call(key, bk.immutablevalue(sandbox_trampoline), params_s) - else: - s_trampoline = bk.emulated_pbc_calls[key][0] - sandbox_trampoline = s_trampoline.const - new = instr.replace({instr.args[0]: Constant(sandbox_trampoline)}) - block.operations[i] = new diff --git a/rpython/rlib/debug.py b/rpython/rlib/debug.py --- a/rpython/rlib/debug.py +++ b/rpython/rlib/debug.py @@ -6,7 +6,6 @@ from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rlib.objectmodel import we_are_translated, always_inline from rpython.rlib.rarithmetic import is_valid_int, r_longlong -from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem import rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -460,7 +459,10 @@ def attach_gdb(): - import pdb; pdb.set_trace() + if not we_are_translated(): + import pdb; pdb.set_trace() + else: + impl_attach_gdb() if not sys.platform.startswith('win'): if sys.platform.startswith('linux'): @@ -586,11 +588,8 @@ d['separate_module_files'] = [cppfile] return ExternalCompilationInfo(**d) - ll_attach = rffi.llexternal("AttachToVS", [], lltype.Void, - compilation_info=make_vs_attach_eci()) + #ll_attach = rffi.llexternal("AttachToVS", [], lltype.Void, + # compilation_info=make_vs_attach_eci()) def impl_attach_gdb(): #ll_attach() print "AttachToVS is disabled at the moment (compilation failure)" - -register_external(attach_gdb, [], result=None, - export_name="impl_attach_gdb", llimpl=impl_attach_gdb) diff --git a/rpython/rlib/rfloat.py b/rpython/rlib/rfloat.py --- a/rpython/rlib/rfloat.py +++ b/rpython/rlib/rfloat.py @@ -5,7 +5,6 @@ from rpython.annotator.model import SomeString, SomeChar from rpython.rlib import objectmodel, unroll -from rpython.rtyper.extfunc import register_external from rpython.rtyper.tool import rffi_platform from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.rlib.objectmodel import not_rpython diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py --- a/rpython/rlib/rposix_environ.py +++ b/rpython/rlib/rposix_environ.py @@ -5,7 +5,6 @@ from rpython.rlib.objectmodel import enforceargs # importing rposix here creates a cycle on Windows from rpython.rtyper.controllerentry import Controller -from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import rffi, lltype from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -97,9 +96,6 @@ # Lower-level interface: dummy placeholders and external registations def r_envkeys(): - just_a_placeholder - -def envkeys_llimpl(): environ = os_get_environ() result = [] i = 0 @@ -111,10 +107,6 @@ i += 1 return result -register_external(r_envkeys, [], [str0], # returns a list of strings - export_name='ll_os.ll_os_envkeys', - llimpl=envkeys_llimpl) - # ____________________________________________________________ def r_envitems(): @@ -190,18 +182,7 @@ return envitems_llimpl, getenv_llimpl, putenv_llimpl -envitems_llimpl, getenv_llimpl, putenv_llimpl = make_env_impls() - -register_external(r_envitems, [], [(str0, str0)], - export_name='ll_os.ll_os_envitems', - llimpl=envitems_llimpl) -register_external(r_getenv, [str0], - annmodel.SomeString(can_be_None=True, no_nul=True), - export_name='ll_os.ll_os_getenv', - llimpl=getenv_llimpl) -register_external(r_putenv, [str0, str0], annmodel.s_None, - export_name='ll_os.ll_os_putenv', - llimpl=putenv_llimpl) +r_envitems, r_getenv, r_putenv = make_env_impls() # ____________________________________________________________ @@ -215,7 +196,7 @@ os_unsetenv = llexternal('unsetenv', [rffi.CCHARP], rffi.INT, save_err=rffi.RFFI_SAVE_ERRNO) - def unsetenv_llimpl(name): + def r_unsetenv(name): with rffi.scoped_str2charp(name) as l_name: error = rffi.cast(lltype.Signed, os_unsetenv(l_name)) if error: @@ -229,7 +210,4 @@ del envkeepalive.byname[name] rffi.free_charp(l_oldstring) - register_external(r_unsetenv, [str0], annmodel.s_None, - export_name='ll_os.ll_os_unsetenv', - llimpl=unsetenv_llimpl) REAL_UNSETENV = True diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -95,9 +95,7 @@ def compute_annotation(self): s_result = SomeExternalFunction( self.name, self.signature_args, self.signature_result) - if (self.bookkeeper.annotator.translator.config.translation.sandbox - and not self.safe_not_sandboxed): - s_result.needs_sandboxing = True + assert self.safe_not_sandboxed return s_result @@ -113,6 +111,12 @@ sandboxsafe: use True if the function performs no I/O (safe for --sandbox) """ + if not sandboxsafe: + raise Exception("Don't use the outdated register_external() protocol " + "to invoke external function; use instead " + "rffi.llexternal(). The old register_external() is " + "now only supported with safeboxsafe=True.") + if export_name is None: export_name = function.__name__ params_s = [annotation(arg) for arg in args] diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -18,7 +18,7 @@ "NOT_RPYTHON" return eval("x+40") - register_external(b, [int], result=int) + register_external(b, [int], result=int, sandboxsafe=True) def f(): return b(2) @@ -42,7 +42,7 @@ return y + x register_external(c, [int, int], result=int, llimpl=llimpl, - export_name='ccc') + export_name='ccc', sandboxsafe=True) def f(): return c(3, 4) @@ -62,7 +62,8 @@ tuple as an argument so that register_external's behavior for tuple-taking functions can be verified. """ - register_external(function_with_tuple_arg, [(int,)], int) + register_external(function_with_tuple_arg, [(int,)], int, + sandboxsafe=True) def f(): return function_with_tuple_arg((1,)) @@ -82,11 +83,11 @@ """ def function_with_list(): pass - register_external(function_with_list, [[int]], int) + register_external(function_with_list, [[int]], int, sandboxsafe=True) def function_returning_list(): pass - register_external(function_returning_list, [], [int]) + register_external(function_returning_list, [], [int], sandboxsafe=True) def f(): return function_with_list(function_returning_list()) @@ -100,7 +101,7 @@ str0 = SomeString(no_nul=True) def os_open(s): pass - register_external(os_open, [str0], None) + register_external(os_open, [str0], None, sandboxsafe=True) def f(s): return os_open(s) policy = AnnotatorPolicy() @@ -121,7 +122,7 @@ def os_execve(l): pass - register_external(os_execve, [[str0]], None) + register_external(os_execve, [[str0]], None, sandboxsafe=True) def f(l): return os_execve(l) @@ -149,7 +150,7 @@ def a_llfakeimpl(i): return i * 3 register_external(a, [int], int, llimpl=a_llimpl, - llfakeimpl=a_llfakeimpl) + llfakeimpl=a_llfakeimpl, sandboxsafe=True) def f(i): return a(i) diff --git a/rpython/rtyper/test/test_llinterp.py b/rpython/rtyper/test/test_llinterp.py --- a/rpython/rtyper/test/test_llinterp.py +++ b/rpython/rtyper/test/test_llinterp.py @@ -584,7 +584,8 @@ def raising(): raise OSError(15, "abcd") - ext = register_external(external, [], llimpl=raising, llfakeimpl=raising) + ext = register_external(external, [], llimpl=raising, llfakeimpl=raising, + sandboxsafe=True) def f(): # this is a useful llfakeimpl that raises an exception From pypy.commits at gmail.com Tue Aug 6 09:42:07 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 06 Aug 2019 06:42:07 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Remove support for register_replacement_for(sandboxed_name=...) Message-ID: <5d4983af.1c69fb81.d9648.46b5@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97072:e1fde97711a7 Date: 2019-08-06 15:41 +0200 http://bitbucket.org/pypy/pypy/changeset/e1fde97711a7/ Log: Remove support for register_replacement_for(sandboxed_name=...) diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py --- a/rpython/rlib/objectmodel.py +++ b/rpython/rlib/objectmodel.py @@ -311,19 +311,12 @@ def sc_we_are_translated(ctx): return Constant(True) -def register_replacement_for(replaced_function, sandboxed_name=None): +def register_replacement_for(replaced_function): def wrap(func): from rpython.rtyper.extregistry import ExtRegistryEntry - # to support calling func directly - func._sandbox_external_name = sandboxed_name class ExtRegistry(ExtRegistryEntry): _about_ = replaced_function def compute_annotation(self): - if sandboxed_name: - config = self.bookkeeper.annotator.translator.config - if config.translation.sandbox: - func._sandbox_external_name = sandboxed_name - func._dont_inline_ = True return self.bookkeeper.immutablevalue(func) return func return wrap diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -461,9 +461,7 @@ func = getattr(os, name, None) if func is None: return lambda f: f - return register_replacement_for( - func, - sandboxed_name='ll_os.ll_os_%s' % name) + return register_replacement_for(func) @specialize.arg(0) def handle_posix_error(name, result): diff --git a/rpython/rlib/rtime.py b/rpython/rlib/rtime.py --- a/rpython/rlib/rtime.py +++ b/rpython/rlib/rtime.py @@ -103,9 +103,7 @@ func = getattr(pytime, name, None) if func is None: return lambda f: f - return register_replacement_for( - func, - sandboxed_name='ll_time.ll_time_%s' % name) + return register_replacement_for(func) config = rffi_platform.configure(CConfig) globals().update(config) diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -569,17 +569,6 @@ def getcallable(self, graph): def getconcretetype(v): return self.bindingrepr(v).lowleveltype - if self.annotator.translator.config.translation.sandbox: - try: - name = graph.func._sandbox_external_name - except AttributeError: - pass - else: - args_s = [v.annotation for v in graph.getargs()] - s_result = graph.getreturnvar().annotation - sandboxed = make_sandbox_trampoline(name, args_s, s_result) - return self.getannmixlevel().delayedfunction( - sandboxed, args_s, s_result) return getfunctionptr(graph, getconcretetype) From pypy.commits at gmail.com Tue Aug 6 11:05:10 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 08:05:10 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: update test to match lib-python Message-ID: <5d499726.1c69fb81.b691b.c8b4@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97075:e3c0c0d858a7 Date: 2019-08-06 15:21 +0100 http://bitbucket.org/pypy/pypy/changeset/e3c0c0d858a7/ Log: update test to match lib-python diff --git a/pypy/module/__builtin__/test/apptest_compile.py b/pypy/module/__builtin__/test/apptest_compile.py --- a/pypy/module/__builtin__/test/apptest_compile.py +++ b/pypy/module/__builtin__/test/apptest_compile.py @@ -108,31 +108,32 @@ try: assert False except AssertionError: - return (True, f.__doc__) + return (True, f.__doc__, __debug__) else: - return (False, f.__doc__) + return (False, f.__doc__, __debug__) ''' - def f(): """doc""" - values = [(-1, __debug__, f.__doc__), - (0, True, 'doc'), - (1, False, 'doc'), - (2, False, None)] + def f(): + """doc""" - for optval, debugval, docstring in values: + values = [(-1, __debug__, f.__doc__, __debug__), + (0, True, 'doc', True), + (1, False, 'doc', False), + (2, False, None, False)] + + for optval, *expected in values: # test both direct compilation and compilation via AST codeobjs = [] - codeobjs.append( - compile(codestr, "", "exec", optimize=optval)) + codeobjs.append(compile(codestr, "", "exec", optimize=optval)) tree = ast.parse(codestr) codeobjs.append(compile(tree, "", "exec", optimize=optval)) - for i, code in enumerate(codeobjs): - print(optval, debugval, docstring, i) + print(optval, *expected, i) ns = {} exec(code, ns) rv = ns['f']() - assert rv == (debugval, docstring) + print(rv) + assert rv == tuple(expected) def test_assert_remove(): """Test removal of the asserts with optimize=1.""" From pypy.commits at gmail.com Tue Aug 6 11:05:12 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 08:05:12 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: add space and compile_info parameters to as_constant() Message-ID: <5d499728.1c69fb81.16b9e.8016@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97076:25c246923b60 Date: 2019-08-06 16:02 +0100 http://bitbucket.org/pypy/pypy/changeset/25c246923b60/ Log: add space and compile_info parameters to as_constant() diff --git a/pypy/interpreter/astcompiler/codegen.py b/pypy/interpreter/astcompiler/codegen.py --- a/pypy/interpreter/astcompiler/codegen.py +++ b/pypy/interpreter/astcompiler/codegen.py @@ -542,7 +542,8 @@ def visit_If(self, if_): self.update_position(if_.lineno, True) end = self.new_block() - test_constant = if_.test.as_constant_truth(self.space) + test_constant = if_.test.as_constant_truth( + self.space, self.compile_info) if test_constant == optimize.CONST_FALSE: self.visit_sequence(if_.orelse) elif test_constant == optimize.CONST_TRUE: @@ -686,7 +687,7 @@ def visit_While(self, wh): self.update_position(wh.lineno, True) - test_constant = wh.test.as_constant_truth(self.space) + test_constant = wh.test.as_constant_truth(self.space, self.compile_info) if test_constant == optimize.CONST_FALSE: self.visit_sequence(wh.orelse) else: @@ -1207,7 +1208,7 @@ count = len(elts) if elts is not None else 0 consts_w = [None] * count for i in range(count): - w_value = elts[i].as_constant() + w_value = elts[i].as_constant(self.space, self.compile_info) if w_value is None: # Not all constants return None @@ -1342,11 +1343,16 @@ if len(d.keys) < 0xffff: all_constant_keys_w = [] for key in d.keys: - if key is None or key.as_constant() is None: + if key is None: + constant_key = None + else: + constant_key = key.as_constant( + self.space, self.compile_info) + if constant_key is None: all_constant_keys_w = None break else: - all_constant_keys_w.append(key.as_constant()) + all_constant_keys_w.append(constant_key) for i in range(len(d.values)): key = d.keys[i] is_unpacking = key is None diff --git a/pypy/interpreter/astcompiler/optimize.py b/pypy/interpreter/astcompiler/optimize.py --- a/pypy/interpreter/astcompiler/optimize.py +++ b/pypy/interpreter/astcompiler/optimize.py @@ -20,14 +20,14 @@ class __extend__(ast.AST): - def as_constant_truth(self, space): + def as_constant_truth(self, space, compile_info): """Return the truth of this node if known.""" - const = self.as_constant() + const = self.as_constant(space, compile_info) if const is None: return CONST_NOT_CONST return int(space.is_true(const)) - def as_constant(self): + def as_constant(self, space, compile_info): """Return the value of this node as a wrapped constant if possible.""" return None @@ -47,46 +47,46 @@ class __extend__(ast.Num): - def as_constant(self): + def as_constant(self, space, compile_info): return self.n class __extend__(ast.Str): - def as_constant(self): + def as_constant(self, space, compile_info): return self.s class __extend__(ast.Bytes): - def as_constant(self): + def as_constant(self, space, compile_info): return self.s class __extend__(ast.Ellipsis): - def as_constant_truth(self, space): + def as_constant_truth(self, space, compile_info): return True class __extend__(ast.Constant): - def as_constant(self): + def as_constant(self, space, compile_info): return self.value class __extend__(ast.NameConstant): - def as_constant(self): + def as_constant(self, space, compile_info): return self.value class __extend__(ast.Index): - def as_constant(self): - return self.value.as_constant() + def as_constant(self, space, compile_info): + return self.value.as_constant(space, compile_info) class __extend__(ast.Slice): - def as_constant(self): + def as_constant(self, space, compile_info): # XXX: this ought to return a slice object if all the indices are - # constants, but we don't have a space here. + # constants return None class __extend__(ast.UnaryOp): @@ -189,9 +189,9 @@ return node def visit_BinOp(self, binop): - left = binop.left.as_constant() + left = binop.left.as_constant(self.space, self.compile_info) if left is not None: - right = binop.right.as_constant() + right = binop.right.as_constant(self.space, self.compile_info) if right is not None: op = binop.op try: @@ -218,7 +218,7 @@ return binop def visit_UnaryOp(self, unary): - w_operand = unary.operand.as_constant() + w_operand = unary.operand.as_constant(self.space, self.compile_info) op = unary.op if w_operand is not None: try: @@ -254,7 +254,7 @@ we_are_and = bop.op == ast.And i = 0 while i < len(values) - 1: - truth = values[i].as_constant_truth(self.space) + truth = values[i].as_constant_truth(self.space, self.compile_info) if truth != CONST_NOT_CONST: if (truth != CONST_TRUE) == we_are_and: del values[i + 1:] @@ -268,7 +268,7 @@ return bop def visit_Repr(self, rep): - w_const = rep.value.as_constant() + w_const = rep.value.as_constant(self.space, self.compile_info) if w_const is not None: w_repr = self.space.repr(w_const) return ast.Constant(w_repr, rep.lineno, rep.col_offset) @@ -300,7 +300,7 @@ consts_w = [None]*len(tup.elts) for i in range(len(tup.elts)): node = tup.elts[i] - w_const = node.as_constant() + w_const = node.as_constant(self.space, self.compile_info) if w_const is None: new_elts = self._optimize_constant_star_unpacks(tup.elts) if new_elts is not None: @@ -350,7 +350,7 @@ after_last_star_index = i + 1 new_elts.append(elt) elif const_since_last_star_w is not None: - w_const = elt.as_constant() + w_const = elt.as_constant(self.space, self.compile_info) if w_const is None: new_elts.extend(elts[after_last_star_index:i + 1]) const_since_last_star_w = None @@ -375,9 +375,9 @@ def visit_Subscript(self, subs): if subs.ctx == ast.Load: - w_obj = subs.value.as_constant() + w_obj = subs.value.as_constant(self.space, self.compile_info) if w_obj is not None: - w_idx = subs.slice.as_constant() + w_idx = subs.slice.as_constant(self.space, self.compile_info) if w_idx is not None: try: w_const = self.space.getitem(w_obj, w_idx) From pypy.commits at gmail.com Tue Aug 6 12:09:14 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 06 Aug 2019 09:09:14 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: in-progress: implementing another approach for sandboxing Message-ID: <5d49a62a.1c69fb81.89c00.921f@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97077:f2a2ec0e2a42 Date: 2019-08-06 18:08 +0200 http://bitbucket.org/pypy/pypy/changeset/f2a2ec0e2a42/ Log: in-progress: implementing another approach for sandboxing diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -311,9 +311,6 @@ config.translation.jit = True if config.translation.sandbox: - assert 0, ("--sandbox is not tested nor maintained. If you " - "really want to try it anyway, remove this line in " - "pypy/goal/targetpypystandalone.py.") config.objspace.lonepycfiles = False if config.objspace.usemodules.cpyext: diff --git a/rpython/config/translationoption.py b/rpython/config/translationoption.py --- a/rpython/config/translationoption.py +++ b/rpython/config/translationoption.py @@ -115,8 +115,7 @@ BoolOption("sandbox", "Produce a fully-sandboxed executable", default=False, cmdline="--sandbox", requires=[("translation.thread", False)], - suggests=[("translation.gc", "generation"), - ("translation.gcrootfinder", "shadowstack")]), + suggests=[]), BoolOption("rweakref", "The backend supports RPython-level weakrefs", default=True), diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -29,7 +29,6 @@ from rpython.rtyper.rclass import RootClassRepr from rpython.tool.pairtype import pair from rpython.translator.unsimplify import insert_empty_block -from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline class RTyperBackend(object): diff --git a/rpython/translator/sandbox/_marshal.py b/rpython/translator/sandbox/_marshal.py deleted file mode 100644 --- a/rpython/translator/sandbox/_marshal.py +++ /dev/null @@ -1,695 +0,0 @@ -# Copy of lib_pypy/_marshal.py needed by sandlib -"""Internal Python object serialization - -This module contains functions that can read and write Python values in a binary format. The format is specific to Python, but independent of machine architecture issues (e.g., you can write a Python value to a file on a PC, transport the file to a Sun, and read it back there). Details of the format may change between Python versions. -""" - -# NOTE: This module is used in the Python3 interpreter, but also by -# the "sandboxed" process. It must work for Python2 as well. - -import types -from _codecs import utf_8_decode, utf_8_encode - -try: - intern -except NameError: - from sys import intern - -try: from __pypy__ import builtinify -except ImportError: builtinify = lambda f: f - - -TYPE_NULL = '0' -TYPE_NONE = 'N' -TYPE_FALSE = 'F' -TYPE_TRUE = 'T' -TYPE_STOPITER = 'S' -TYPE_ELLIPSIS = '.' -TYPE_INT = 'i' -TYPE_INT64 = 'I' -TYPE_FLOAT = 'f' -TYPE_COMPLEX = 'x' -TYPE_LONG = 'l' -TYPE_STRING = 's' -TYPE_INTERNED = 't' -TYPE_STRINGREF= 'R' -TYPE_TUPLE = '(' -TYPE_LIST = '[' -TYPE_DICT = '{' -TYPE_CODE = 'c' -TYPE_UNICODE = 'u' -TYPE_UNKNOWN = '?' -TYPE_SET = '<' -TYPE_FROZENSET= '>' - -class _Marshaller: - - dispatch = {} - - def __init__(self, writefunc): - self._write = writefunc - - def dump(self, x): - try: - self.dispatch[type(x)](self, x) - except KeyError: - for tp in type(x).mro(): - func = self.dispatch.get(tp) - if func: - break - else: - raise ValueError("unmarshallable object") - func(self, x) - - def w_long64(self, x): - self.w_long(x) - self.w_long(x>>32) - - def w_long(self, x): - a = chr(x & 0xff) - x >>= 8 - b = chr(x & 0xff) - x >>= 8 - c = chr(x & 0xff) - x >>= 8 - d = chr(x & 0xff) - self._write(a + b + c + d) - - def w_short(self, x): - self._write(chr((x) & 0xff)) - self._write(chr((x>> 8) & 0xff)) - - def dump_none(self, x): - self._write(TYPE_NONE) - dispatch[type(None)] = dump_none - - def dump_bool(self, x): - if x: - self._write(TYPE_TRUE) - else: - self._write(TYPE_FALSE) - dispatch[bool] = dump_bool - - def dump_stopiter(self, x): - if x is not StopIteration: - raise ValueError("unmarshallable object") - self._write(TYPE_STOPITER) - dispatch[type(StopIteration)] = dump_stopiter - - def dump_ellipsis(self, x): - self._write(TYPE_ELLIPSIS) - - try: - dispatch[type(Ellipsis)] = dump_ellipsis - except NameError: - pass - - # In Python3, this function is not used; see dump_long() below. - def dump_int(self, x): - y = x>>31 - if y and y != -1: - self._write(TYPE_INT64) - self.w_long64(x) - else: - self._write(TYPE_INT) - self.w_long(x) - dispatch[int] = dump_int - - def dump_long(self, x): - self._write(TYPE_LONG) - sign = 1 - if x < 0: - sign = -1 - x = -x - digits = [] - while x: - digits.append(x & 0x7FFF) - x = x>>15 - self.w_long(len(digits) * sign) - for d in digits: - self.w_short(d) - try: - long - except NameError: - dispatch[int] = dump_long - else: - dispatch[long] = dump_long - - def dump_float(self, x): - write = self._write - write(TYPE_FLOAT) - s = repr(x) - write(chr(len(s))) - write(s) - dispatch[float] = dump_float - - def dump_complex(self, x): - write = self._write - write(TYPE_COMPLEX) - s = repr(x.real) - write(chr(len(s))) - write(s) - s = repr(x.imag) - write(chr(len(s))) - write(s) - try: - dispatch[complex] = dump_complex - except NameError: - pass - - def dump_string(self, x): - # XXX we can't check for interned strings, yet, - # so we (for now) never create TYPE_INTERNED or TYPE_STRINGREF - self._write(TYPE_STRING) - self.w_long(len(x)) - self._write(x) - dispatch[bytes] = dump_string - - def dump_unicode(self, x): - self._write(TYPE_UNICODE) - #s = x.encode('utf8') - s, len_s = utf_8_encode(x) - self.w_long(len_s) - self._write(s) - try: - unicode - except NameError: - dispatch[str] = dump_unicode - else: - dispatch[unicode] = dump_unicode - - def dump_tuple(self, x): - self._write(TYPE_TUPLE) - self.w_long(len(x)) - for item in x: - self.dump(item) - dispatch[tuple] = dump_tuple - - def dump_list(self, x): - self._write(TYPE_LIST) - self.w_long(len(x)) - for item in x: - self.dump(item) - dispatch[list] = dump_list - - def dump_dict(self, x): - self._write(TYPE_DICT) - for key, value in x.items(): - self.dump(key) - self.dump(value) - self._write(TYPE_NULL) - dispatch[dict] = dump_dict - - def dump_code(self, x): - self._write(TYPE_CODE) - self.w_long(x.co_argcount) - self.w_long(x.co_nlocals) - self.w_long(x.co_stacksize) - self.w_long(x.co_flags) - self.dump(x.co_code) - self.dump(x.co_consts) - self.dump(x.co_names) - self.dump(x.co_varnames) - self.dump(x.co_freevars) - self.dump(x.co_cellvars) - self.dump(x.co_filename) - self.dump(x.co_name) - self.w_long(x.co_firstlineno) - self.dump(x.co_lnotab) - try: - dispatch[types.CodeType] = dump_code - except NameError: - pass - - def dump_set(self, x): - self._write(TYPE_SET) - self.w_long(len(x)) - for each in x: - self.dump(each) - try: - dispatch[set] = dump_set - except NameError: - pass - - def dump_frozenset(self, x): - self._write(TYPE_FROZENSET) - self.w_long(len(x)) - for each in x: - self.dump(each) - try: - dispatch[frozenset] = dump_frozenset - except NameError: - pass - -class _NULL: - pass - -class _StringBuffer: - def __init__(self, value): - self.bufstr = value - self.bufpos = 0 - - def read(self, n): - pos = self.bufpos - newpos = pos + n - ret = self.bufstr[pos : newpos] - self.bufpos = newpos - return ret - - -class _Unmarshaller: - - dispatch = {} - - def __init__(self, readfunc): - self._read = readfunc - self._stringtable = [] - - def load(self): - c = self._read(1) - if not c: - raise EOFError - try: - return self.dispatch[c](self) - except KeyError: - raise ValueError("bad marshal code: %c (%d)" % (c, ord(c))) - - def r_short(self): - lo = ord(self._read(1)) - hi = ord(self._read(1)) - x = lo | (hi<<8) - if x & 0x8000: - x = x - 0x10000 - return x - - def r_long(self): - s = self._read(4) - a = ord(s[0]) - b = ord(s[1]) - c = ord(s[2]) - d = ord(s[3]) - x = a | (b<<8) | (c<<16) | (d<<24) - if d & 0x80 and x > 0: - x = -((1<<32) - x) - return int(x) - else: - return x - - def r_long64(self): - a = ord(self._read(1)) - b = ord(self._read(1)) - c = ord(self._read(1)) - d = ord(self._read(1)) - e = ord(self._read(1)) - f = ord(self._read(1)) - g = ord(self._read(1)) - h = ord(self._read(1)) - x = a | (b<<8) | (c<<16) | (d<<24) - x = x | (e<<32) | (f<<40) | (g<<48) | (h<<56) - if h & 0x80 and x > 0: - x = -((1<<64) - x) - return x - - def load_null(self): - return _NULL - dispatch[TYPE_NULL] = load_null - - def load_none(self): - return None - dispatch[TYPE_NONE] = load_none - - def load_true(self): - return True - dispatch[TYPE_TRUE] = load_true - - def load_false(self): - return False - dispatch[TYPE_FALSE] = load_false - - def load_stopiter(self): - return StopIteration - dispatch[TYPE_STOPITER] = load_stopiter - - def load_ellipsis(self): - return Ellipsis - dispatch[TYPE_ELLIPSIS] = load_ellipsis - - dispatch[TYPE_INT] = r_long - - dispatch[TYPE_INT64] = r_long64 - - def load_long(self): - size = self.r_long() - sign = 1 - if size < 0: - sign = -1 - size = -size - x = 0 - for i in range(size): - d = self.r_short() - x = x | (d<<(i*15)) - return x * sign - dispatch[TYPE_LONG] = load_long - - def load_float(self): - n = ord(self._read(1)) - s = self._read(n) - return float(s) - dispatch[TYPE_FLOAT] = load_float - - def load_complex(self): - n = ord(self._read(1)) - s = self._read(n) - real = float(s) - n = ord(self._read(1)) - s = self._read(n) - imag = float(s) - return complex(real, imag) - dispatch[TYPE_COMPLEX] = load_complex - - def load_string(self): - n = self.r_long() - return self._read(n) - dispatch[TYPE_STRING] = load_string - - def load_interned(self): - n = self.r_long() - ret = intern(self._read(n)) - self._stringtable.append(ret) - return ret - dispatch[TYPE_INTERNED] = load_interned - - def load_stringref(self): - n = self.r_long() - return self._stringtable[n] - dispatch[TYPE_STRINGREF] = load_stringref - - def load_unicode(self): - n = self.r_long() - s = self._read(n) - #ret = s.decode('utf8') - ret, len_ret = utf_8_decode(s) - return ret - dispatch[TYPE_UNICODE] = load_unicode - - def load_tuple(self): - return tuple(self.load_list()) - dispatch[TYPE_TUPLE] = load_tuple - - def load_list(self): - n = self.r_long() - list = [self.load() for i in range(n)] - return list - dispatch[TYPE_LIST] = load_list - - def load_dict(self): - d = {} - while 1: - key = self.load() - if key is _NULL: - break - value = self.load() - d[key] = value - return d - dispatch[TYPE_DICT] = load_dict - - def load_code(self): - argcount = self.r_long() - nlocals = self.r_long() - stacksize = self.r_long() - flags = self.r_long() - code = self.load() - consts = self.load() - names = self.load() - varnames = self.load() - freevars = self.load() - cellvars = self.load() - filename = self.load() - name = self.load() - firstlineno = self.r_long() - lnotab = self.load() - return types.CodeType(argcount, nlocals, stacksize, flags, code, consts, - names, varnames, filename, name, firstlineno, - lnotab, freevars, cellvars) - dispatch[TYPE_CODE] = load_code - - def load_set(self): - n = self.r_long() - args = [self.load() for i in range(n)] - return set(args) - dispatch[TYPE_SET] = load_set - - def load_frozenset(self): - n = self.r_long() - args = [self.load() for i in range(n)] - return frozenset(args) - dispatch[TYPE_FROZENSET] = load_frozenset - -# ________________________________________________________________ - -def _read(self, n): - pos = self.bufpos - newpos = pos + n - if newpos > len(self.bufstr): raise EOFError - ret = self.bufstr[pos : newpos] - self.bufpos = newpos - return ret - -def _read1(self): - ret = self.bufstr[self.bufpos] - self.bufpos += 1 - return ret - -def _r_short(self): - lo = ord(_read1(self)) - hi = ord(_read1(self)) - x = lo | (hi<<8) - if x & 0x8000: - x = x - 0x10000 - return x - -def _r_long(self): - # inlined this most common case - p = self.bufpos - s = self.bufstr - a = ord(s[p]) - b = ord(s[p+1]) - c = ord(s[p+2]) - d = ord(s[p+3]) - self.bufpos += 4 - x = a | (b<<8) | (c<<16) | (d<<24) - if d & 0x80 and x > 0: - x = -((1<<32) - x) - return int(x) - else: - return x - -def _r_long64(self): - a = ord(_read1(self)) - b = ord(_read1(self)) - c = ord(_read1(self)) - d = ord(_read1(self)) - e = ord(_read1(self)) - f = ord(_read1(self)) - g = ord(_read1(self)) - h = ord(_read1(self)) - x = a | (b<<8) | (c<<16) | (d<<24) - x = x | (e<<32) | (f<<40) | (g<<48) | (h<<56) - if h & 0x80 and x > 0: - x = -((1<<64) - x) - return x - -_load_dispatch = {} - -class _FastUnmarshaller: - - dispatch = {} - - def __init__(self, buffer): - self.bufstr = buffer - self.bufpos = 0 - self._stringtable = [] - - def load(self): - # make flow space happy - c = '?' - try: - c = self.bufstr[self.bufpos] - self.bufpos += 1 - return _load_dispatch[c](self) - except KeyError: - raise ValueError("bad marshal code: %c (%d)" % (c, ord(c))) - except IndexError: - raise EOFError - - def load_null(self): - return _NULL - dispatch[TYPE_NULL] = load_null - - def load_none(self): - return None - dispatch[TYPE_NONE] = load_none - - def load_true(self): - return True - dispatch[TYPE_TRUE] = load_true - - def load_false(self): - return False - dispatch[TYPE_FALSE] = load_false - - def load_stopiter(self): - return StopIteration - dispatch[TYPE_STOPITER] = load_stopiter - - def load_ellipsis(self): - return Ellipsis - dispatch[TYPE_ELLIPSIS] = load_ellipsis - - def load_int(self): - return _r_long(self) - dispatch[TYPE_INT] = load_int - - def load_int64(self): - return _r_long64(self) - dispatch[TYPE_INT64] = load_int64 - - def load_long(self): - size = _r_long(self) - sign = 1 - if size < 0: - sign = -1 - size = -size - x = 0 - for i in range(size): - d = _r_short(self) - x = x | (d<<(i*15)) - return x * sign - dispatch[TYPE_LONG] = load_long - - def load_float(self): - n = ord(_read1(self)) - s = _read(self, n) - return float(s) - dispatch[TYPE_FLOAT] = load_float - - def load_complex(self): - n = ord(_read1(self)) - s = _read(self, n) - real = float(s) - n = ord(_read1(self)) - s = _read(self, n) - imag = float(s) - return complex(real, imag) - dispatch[TYPE_COMPLEX] = load_complex - - def load_string(self): - n = _r_long(self) - return _read(self, n) - dispatch[TYPE_STRING] = load_string - - def load_interned(self): - n = _r_long(self) - ret = intern(_read(self, n)) - self._stringtable.append(ret) - return ret - dispatch[TYPE_INTERNED] = load_interned - - def load_stringref(self): - n = _r_long(self) - return self._stringtable[n] - dispatch[TYPE_STRINGREF] = load_stringref - - def load_unicode(self): - n = _r_long(self) - s = _read(self, n) - ret = s.decode('utf8') - return ret - dispatch[TYPE_UNICODE] = load_unicode - - def load_tuple(self): - return tuple(self.load_list()) - dispatch[TYPE_TUPLE] = load_tuple - - def load_list(self): - n = _r_long(self) - list = [] - for i in range(n): - list.append(self.load()) - return list - dispatch[TYPE_LIST] = load_list - - def load_dict(self): - d = {} - while 1: - key = self.load() - if key is _NULL: - break - value = self.load() - d[key] = value - return d - dispatch[TYPE_DICT] = load_dict - - def load_code(self): - argcount = _r_long(self) - nlocals = _r_long(self) - stacksize = _r_long(self) - flags = _r_long(self) - code = self.load() - consts = self.load() - names = self.load() - varnames = self.load() - freevars = self.load() - cellvars = self.load() - filename = self.load() - name = self.load() - firstlineno = _r_long(self) - lnotab = self.load() - return types.CodeType(argcount, nlocals, stacksize, flags, code, consts, - names, varnames, filename, name, firstlineno, - lnotab, freevars, cellvars) - dispatch[TYPE_CODE] = load_code - - def load_set(self): - n = _r_long(self) - args = [self.load() for i in range(n)] - return set(args) - dispatch[TYPE_SET] = load_set - - def load_frozenset(self): - n = _r_long(self) - args = [self.load() for i in range(n)] - return frozenset(args) - dispatch[TYPE_FROZENSET] = load_frozenset - -_load_dispatch = _FastUnmarshaller.dispatch - -# _________________________________________________________________ -# -# user interface - -version = 1 - - at builtinify -def dump(x, f, version=version): - # XXX 'version' is ignored, we always dump in a version-0-compatible format - m = _Marshaller(f.write) - m.dump(x) - - at builtinify -def load(f): - um = _Unmarshaller(f.read) - return um.load() - - at builtinify -def dumps(x, version=version): - # XXX 'version' is ignored, we always dump in a version-0-compatible format - buffer = [] - m = _Marshaller(buffer.append) - m.dump(x) - return ''.join(buffer) - - at builtinify -def loads(s): - um = _FastUnmarshaller(s) - return um.load() diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py --- a/rpython/translator/sandbox/rsandbox.py +++ b/rpython/translator/sandbox/rsandbox.py @@ -5,16 +5,17 @@ """ import py -from rpython.rlib import rmarshal, types +from rpython.rlib import types +from rpython.rlib.objectmodel import specialize from rpython.rlib.signature import signature +from rpython.rlib.unroll import unrolling_iterable # ____________________________________________________________ # # Sandboxing code generator for external functions # -from rpython.rlib import rposix -from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rtyper.lltypesystem import lltype, llmemory, rffi from rpython.rtyper.llannotation import lltype_to_annotation from rpython.rtyper.annlowlevel import MixLevelHelperAnnotator from rpython.tool.ansi_print import AnsiLogger @@ -22,71 +23,6 @@ log = AnsiLogger("sandbox") -# a version of os.read() and os.write() that are not mangled -# by the sandboxing mechanism -ll_read_not_sandboxed = rposix.external('read', - [rffi.INT, rffi.CCHARP, rffi.SIZE_T], - rffi.SIZE_T, - sandboxsafe=True, - _nowrapper=True) - -ll_write_not_sandboxed = rposix.external('write', - [rffi.INT, rffi.CCHARP, rffi.SIZE_T], - rffi.SIZE_T, - sandboxsafe=True, - _nowrapper=True) - - - at signature(types.int(), types.ptr(rffi.CCHARP.TO), types.int(), - returns=types.none()) -def writeall_not_sandboxed(fd, buf, length): - fd = rffi.cast(rffi.INT, fd) - while length > 0: - size = rffi.cast(rffi.SIZE_T, length) - count = rffi.cast(lltype.Signed, ll_write_not_sandboxed(fd, buf, size)) - if count <= 0: - raise IOError - length -= count - buf = lltype.direct_ptradd(lltype.direct_arrayitems(buf), count) - buf = rffi.cast(rffi.CCHARP, buf) - - -class FdLoader(rmarshal.Loader): - def __init__(self, fd): - rmarshal.Loader.__init__(self, "") - self.fd = fd - self.buflen = 4096 - - def need_more_data(self): - buflen = self.buflen - with lltype.scoped_alloc(rffi.CCHARP.TO, buflen) as buf: - buflen = rffi.cast(rffi.SIZE_T, buflen) - fd = rffi.cast(rffi.INT, self.fd) - count = ll_read_not_sandboxed(fd, buf, buflen) - count = rffi.cast(lltype.Signed, count) - if count <= 0: - raise IOError - self.buf += ''.join([buf[i] for i in range(count)]) - self.buflen *= 2 - -def sandboxed_io(buf): - STDIN = 0 - STDOUT = 1 - # send the buffer with the marshalled fnname and input arguments to STDOUT - with lltype.scoped_alloc(rffi.CCHARP.TO, len(buf)) as p: - for i in range(len(buf)): - p[i] = buf[i] - writeall_not_sandboxed(STDOUT, p, len(buf)) - # build a Loader that will get the answer from STDIN - loader = FdLoader(STDIN) - # check for errors - error = load_int(loader) - if error != 0: - reraise_error(error, loader) - else: - # no exception; the caller will decode the actual result - return loader - def reraise_error(error, loader): if error == 1: raise OSError(load_int(loader), "external error") @@ -108,21 +44,51 @@ raise RuntimeError - at signature(types.str(), returns=types.impossible()) -def not_implemented_stub(msg): - STDERR = 2 - with rffi.scoped_str2charp(msg + '\n') as buf: - writeall_not_sandboxed(STDERR, buf, len(msg) + 1) - raise RuntimeError(msg) # XXX in RPython, the msg is ignored +def getkind(TYPE, parent_function): + if TYPE is lltype.Void: + return 'v' + elif isinstance(TYPE, lltype.Primitive): + if TYPE is lltype.Float or TYPE is lltype.SingleFloat: + return 'f' + if TYPE is lltype.LongFloat: + log.WARNING("%r uses a 'long double' argument or return value; " + "sandboxing will export it only as 'double'" % + (parent_function,)) + return 'f' + if TYPE == llmemory.Address: + return 'p' + return 'i' + elif isinstance(TYPE, lltype.Ptr): + return 'p' + else: + log.WARNING("%r: sandboxing does not support argument " + "or return type %r" % (parent_function, TYPE)) + return 'v' -def make_stub(fnname, msg): - """Build always-raising stub function to replace unsupported external.""" - log.WARNING(msg) - def execute(*args): - not_implemented_stub(msg) - execute.__name__ = 'sandboxed_%s' % (fnname,) - return execute +eci = rffi.ExternalCompilationInfo(separate_module_sources=[ + py.path.local(__file__).join('..', 'src', 'rsandbox.c').read(), + ], + post_include_bits=[ + py.path.local(__file__).join('..', 'src', 'rsandbox.h').read(), + ]) +def external(funcname, ARGS, RESULT): + return rffi.llexternal(funcname, ARGS, RESULT, + compilation_info=eci, sandboxsafe=True, + _nowrapper=True) + +rpy_sandbox_arg = { + 'i': external('rpy_sandbox_arg_i', [lltype.UnsignedLongLong], lltype.Void), + 'f': external('rpy_sandbox_arg_f', [lltype.Float], lltype.Void), + 'p': external('rpy_sandbox_arg_p', [llmemory.Address], lltype.Void), +} +rpy_sandbox_res = { + 'v': external('rpy_sandbox_res_v', [rffi.CCHARP], lltype.Void), + 'i': external('rpy_sandbox_res_i', [rffi.CCHARP], lltype.UnsignedLongLong), + 'f': external('rpy_sandbox_res_f', [rffi.CCHARP], lltype.Float), + 'p': external('rpy_sandbox_res_p', [rffi.CCHARP], llmemory.Address), +} + def sig_ll(fnobj): FUNCTYPE = lltype.typeOf(fnobj) @@ -130,47 +96,48 @@ s_result = lltype_to_annotation(FUNCTYPE.RESULT) return args_s, s_result -dump_string = rmarshal.get_marshaller(str) -load_int = rmarshal.get_loader(int) - def get_sandbox_stub(fnobj, rtyper): fnname = fnobj._name + FUNCTYPE = lltype.typeOf(fnobj) + arg_kinds = [getkind(ARG, fnname) for ARG in FUNCTYPE.ARGS] + result_kind = getkind(FUNCTYPE.RESULT, fnname) + + unroll_args = unrolling_iterable([ + (arg_kind, rpy_sandbox_arg[arg_kind], + lltype.typeOf(rpy_sandbox_arg[arg_kind]).TO.ARGS[0]) + for arg_kind in arg_kinds]) + + result_func = rpy_sandbox_res[result_kind] + RESTYPE = FUNCTYPE.RESULT + + name_and_sig = '%s(%s)%s' % (fnname, ''.join(arg_kinds), result_kind) + log(name_and_sig) + name_and_sig = rffi.str2charp(name_and_sig, track_allocation=False) + + def execute(*args): + # + # serialize the arguments + i = 0 + for arg_kind, func, ARGTYPE in unroll_args: + if arg_kind == 'v': + continue + func(rffi.cast(ARGTYPE, args[i])) + i = i + 1 + # + # send the function name and the arguments and wait for an answer + result = result_func(name_and_sig) + # + # result the answer, if any + if RESTYPE is not lltype.Void: + return rffi.cast(RESTYPE, result) + execute.__name__ = 'sandboxed_%s' % (fnname,) + # args_s, s_result = sig_ll(fnobj) - msg = "Not implemented: sandboxing for external function '%s'" % (fnname,) - execute = make_stub(fnname, msg) return _annotate(rtyper, execute, args_s, s_result) -def make_sandbox_trampoline(fnname, args_s, s_result): - """Create a trampoline function with the specified signature. - - The trampoline is meant to be used in place of real calls to the external - function named 'fnname'. It marshals its input arguments, dumps them to - STDOUT, and waits for an answer on STDIN. - """ - try: - dump_arguments = rmarshal.get_marshaller(tuple(args_s)) - load_result = rmarshal.get_loader(s_result) - except (rmarshal.CannotMarshal, rmarshal.CannotUnmarshall) as e: - msg = "Cannot sandbox function '%s': %s" % (fnname, e) - execute = make_stub(fnname, msg) - else: - def execute(*args): - # marshal the function name and input arguments - buf = [] - dump_string(buf, fnname) - dump_arguments(buf, args) - # send the buffer and wait for the answer - loader = sandboxed_io(buf) - # decode the answer - result = load_result(loader) - loader.check_finished() - return result - execute.__name__ = 'sandboxed_%s' % (fnname,) - return execute - - def _annotate(rtyper, f, args_s, s_result): ann = MixLevelHelperAnnotator(rtyper) graph = ann.getgraph(f, args_s, s_result) ann.finish() + ann.backend_optimize() return graph diff --git a/rpython/translator/sandbox/src/rsandbox.c b/rpython/translator/sandbox/src/rsandbox.c new file mode 100644 --- /dev/null +++ b/rpython/translator/sandbox/src/rsandbox.c @@ -0,0 +1,176 @@ +#include +#include + + +#define RPY_SANDBOX_ARGBUF 512 +#define RPY_SANDBOX_NAMEMAX 256 + +#define RPY_FD_STDIN 0 +#define RPY_FD_STDOUT 1 + +static char sand_argbuf[RPY_SANDBOX_ARGBUF]; +static size_t sand_nextarg = RPY_SANDBOX_NAMEMAX; + + +static void sand_writeall(const char *buf, size_t count) +{ + while (count > 0) { + ssize_t result = write(RPY_FD_STDOUT, buf, count); + if (result <= 0) { + if (result == 0) { + fprintf(stderr, "sandbox: write(stdout) gives the result 0, " + "which is not expected\n"); + } + else { + perror("sandbox: write(stdout)"); + } + abort(); + } + if (result > count) { + fprintf(stderr, "sandbox: write(stdout) wrote more data than " + "request, which is not expected\n"); + abort(); + } + buf += result; + count -= result; + } +} + +static void sand_readall(char *buf, size_t count) +{ + while (count > 0) { + ssize_t result = read(RPY_FD_STDIN, buf, count); + if (result <= 0) { + if (result == 0) { + fprintf(stderr, "sandbox: stdin was closed\n"); + } + else { + perror("sandbox: read(stdin)"); + } + abort(); + } + if (result > count) { + fprintf(stderr, "sandbox: read(stdin) returned more data than " + "expected\n"); + abort(); + } + buf += result; + count -= result; + } +} + + +static char *sand_arg_output(size_t size) +{ + char *p = sand_argbuf + sand_nextarg; + sand_nextarg += size; + if (sand_nextarg > RPY_SANDBOX_ARGBUF) { + fprintf(stderr, + "sandbox: argument buffer overflow (RPY_SANDBOX_ARGBUF)\n"); + abort(); + } + return p; +} + +void rpy_sandbox_arg_i(unsigned long long i) +{ + *(unsigned long long *)sand_arg_output(sizeof(unsigned long long)) = i; +} + +void rpy_sandbox_arg_f(double f) +{ + *(double *)sand_arg_output(sizeof(double)) = f; +} + +void rpy_sandbox_arg_p(void *p) +{ + *(void **)sand_arg_output(sizeof(void *)) = p; +} + +struct sand_data_s { + void *data; + size_t size; +}; + +static void sand_interact(const char *name_and_sig, char expected_result, + void *result, size_t result_size) +{ + size_t name_len = strlen(name_and_sig); + assert(name_len > 0); + if (name_len > RPY_SANDBOX_NAMEMAX - 1) { + fprintf(stderr, + "sandbox: function name buffer overflow (RPY_SANDBOX_NAMEMAX)\n"); + abort(); + } + char *p = sand_argbuf + RPY_SANDBOX_NAMEMAX - name_len - 1; + *p = name_len; + memcpy(p + 1, name_and_sig, name_len); + + assert(sand_nextarg >= RPY_SANDBOX_NAMEMAX); + assert(sand_nextarg <= RPY_SANDBOX_ARGBUF); + + sand_writeall(p, sand_nextarg - (p - sand_argbuf)); + sand_nextarg = RPY_SANDBOX_NAMEMAX; + + while (1) { + struct sand_data_s data_hdr; + char command = 0; + sand_readall(&command, 1); + switch (command) { + + case 'v': + case 'i': + case 'f': + case 'p': + if (expected_result != command) { + fprintf(stderr, "sandbox: %s: waiting for a result of type " + "%c but got %c instead\n", name_and_sig, + expected_result, command); + abort(); + } + sand_readall((char *)result, result_size); + return; + + case 'R': + sand_readall((char *)&data_hdr, sizeof(data_hdr)); + sand_writeall(data_hdr.data, data_hdr.size); + break; + + case 'W': + sand_readall((char *)&data_hdr, sizeof(data_hdr)); + sand_readall(data_hdr.data, data_hdr.size); + break; + + default: + fprintf(stderr, "sandbox: protocol error: unexpected byte %d\n", + (int)command); + abort(); + } + } +} + +void rpy_sandbox_res_v(const char *name_and_sig) +{ + sand_interact(name_and_sig, 'v', NULL, 0); +} + +unsigned long long rpy_sandbox_res_i(const char *name_and_sig) +{ + unsigned long long result; + sand_interact(name_and_sig, 'i', &result, sizeof(result)); + return result; +} + +double rpy_sandbox_res_f(const char *name_and_sig) +{ + double result; + sand_interact(name_and_sig, 'f', &result, sizeof(result)); + return result; +} + +void *rpy_sandbox_res_p(const char *name_and_sig) +{ + void *result; + sand_interact(name_and_sig, 'p', &result, sizeof(result)); + return result; +} diff --git a/rpython/translator/sandbox/src/rsandbox.h b/rpython/translator/sandbox/src/rsandbox.h new file mode 100644 --- /dev/null +++ b/rpython/translator/sandbox/src/rsandbox.h @@ -0,0 +1,9 @@ + +void rpy_sandbox_arg_i(unsigned long long i); +void rpy_sandbox_arg_f(double f); +void rpy_sandbox_arg_p(void *p); + +void rpy_sandbox_res_v(const char *name_and_sig); +unsigned long long rpy_sandbox_res_i(const char *name_and_sig); +double rpy_sandbox_res_f(const char *name_and_sig); +void *rpy_sandbox_res_p(const char *name_and_sig); From pypy.commits at gmail.com Tue Aug 6 12:24:27 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 09:24:27 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: remove dead code Message-ID: <5d49a9bb.1c69fb81.feef2.9fcb@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97078:2c5d479229ac Date: 2019-08-06 16:17 +0100 http://bitbucket.org/pypy/pypy/changeset/2c5d479229ac/ Log: remove dead code diff --git a/pypy/interpreter/astcompiler/optimize.py b/pypy/interpreter/astcompiler/optimize.py --- a/pypy/interpreter/astcompiler/optimize.py +++ b/pypy/interpreter/astcompiler/optimize.py @@ -267,13 +267,6 @@ return values[0] return bop - def visit_Repr(self, rep): - w_const = rep.value.as_constant(self.space, self.compile_info) - if w_const is not None: - w_repr = self.space.repr(w_const) - return ast.Constant(w_repr, rep.lineno, rep.col_offset) - return rep - def visit_Name(self, name): """Turn loading None, True, and False into a constant lookup.""" if name.ctx == ast.Del: From pypy.commits at gmail.com Tue Aug 6 12:24:29 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 09:24:29 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: fix Ellipsis.as_constant() Message-ID: <5d49a9bd.1c69fb81.97ed0.11f4@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97079:e6ebcd68aa27 Date: 2019-08-06 16:32 +0100 http://bitbucket.org/pypy/pypy/changeset/e6ebcd68aa27/ Log: fix Ellipsis.as_constant() diff --git a/pypy/interpreter/astcompiler/optimize.py b/pypy/interpreter/astcompiler/optimize.py --- a/pypy/interpreter/astcompiler/optimize.py +++ b/pypy/interpreter/astcompiler/optimize.py @@ -64,9 +64,8 @@ class __extend__(ast.Ellipsis): - - def as_constant_truth(self, space, compile_info): - return True + def as_constant(self, space, compile_info): + return space.w_Ellipsis class __extend__(ast.Constant): From pypy.commits at gmail.com Tue Aug 6 12:24:31 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 09:24:31 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: 'True', 'False' and 'None' are proper keywords now, not reserved names Message-ID: <5d49a9bf.1c69fb81.dcdd0.b532@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97080:d2c9896c2574 Date: 2019-08-06 16:55 +0100 http://bitbucket.org/pypy/pypy/changeset/d2c9896c2574/ Log: 'True', 'False' and 'None' are proper keywords now, not reserved names diff --git a/pypy/interpreter/astcompiler/optimize.py b/pypy/interpreter/astcompiler/optimize.py --- a/pypy/interpreter/astcompiler/optimize.py +++ b/pypy/interpreter/astcompiler/optimize.py @@ -273,12 +273,6 @@ space = self.space iden = name.id w_const = None - if iden == "None": - w_const = space.w_None - elif iden == "True": - w_const = space.w_True - elif iden == "False": - w_const = space.w_False if w_const is not None: return ast.NameConstant(w_const, name.lineno, name.col_offset) return name From pypy.commits at gmail.com Tue Aug 6 12:24:32 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 09:24:32 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: Constant-fold __debug__ at compile time Message-ID: <5d49a9c0.1c69fb81.676e2.4aff@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97081:b9c4efabfa18 Date: 2019-08-06 17:23 +0100 http://bitbucket.org/pypy/pypy/changeset/b9c4efabfa18/ Log: Constant-fold __debug__ at compile time diff --git a/pypy/interpreter/astcompiler/optimize.py b/pypy/interpreter/astcompiler/optimize.py --- a/pypy/interpreter/astcompiler/optimize.py +++ b/pypy/interpreter/astcompiler/optimize.py @@ -73,8 +73,15 @@ def as_constant(self, space, compile_info): return self.value +class __extend__(ast.Name): + def as_constant(self, space, compile_info): + if self.id == '__debug__': + return space.newbool(compile_info.optimize == 0) + else: + return None + + class __extend__(ast.NameConstant): - def as_constant(self, space, compile_info): return self.value @@ -271,8 +278,9 @@ if name.ctx == ast.Del: return name space = self.space - iden = name.id w_const = None + if name.id == '__debug__': + w_const = space.newbool(self.compile_info.optimize == 0) if w_const is not None: return ast.NameConstant(w_const, name.lineno, name.col_offset) return name From pypy.commits at gmail.com Tue Aug 6 15:38:24 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 12:38:24 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: remove obsolete comment Message-ID: <5d49d730.1c69fb81.39205.d936@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97083:04f2e33aa9a7 Date: 2019-08-06 20:37 +0100 http://bitbucket.org/pypy/pypy/changeset/04f2e33aa9a7/ Log: remove obsolete comment diff --git a/pypy/module/__builtin__/test/test_compile.py b/pypy/module/__builtin__/test/test_compile.py --- a/pypy/module/__builtin__/test/test_compile.py +++ b/pypy/module/__builtin__/test/test_compile.py @@ -30,9 +30,3 @@ space.appexec([], """(): exec(compile('assert False', '', 'exec', optimize=-1)) """) - - -# TODO: Check the value of __debug__ inside of the compiled block! -# According to the documentation, it should follow the optimize flag. -# However, cpython3.5.0a0 behaves the same way as PyPy (__debug__ follows -# -O, -OO flags of the interpreter). From pypy.commits at gmail.com Tue Aug 6 17:11:44 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 06 Aug 2019 14:11:44 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: Make handling of sys.flags.optimize closer to CPython and fix compile(..., optimize=-1) Message-ID: <5d49ed10.1c69fb81.f6d50.740b@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97084:5580f03f4baa Date: 2019-08-06 22:10 +0100 http://bitbucket.org/pypy/pypy/changeset/5580f03f4baa/ Log: Make handling of sys.flags.optimize closer to CPython and fix compile(..., optimize=-1) diff --git a/pypy/bin/pyinteractive.py b/pypy/bin/pyinteractive.py --- a/pypy/bin/pyinteractive.py +++ b/pypy/bin/pyinteractive.py @@ -42,7 +42,7 @@ StrOption("warn", "warning control (arg is action:message:category:module:lineno)", default=None, cmdline="-W"), - + ]) pypy_init = gateway.applevel(''' @@ -102,10 +102,9 @@ space.appexec([], """(): import sys flags = list(sys.flags) - flags[6] = 2 + flags[3] = 2 sys.flags = type(sys.flags)(flags) - import __pypy__ - __pypy__.set_debug(False) + __builtins__.__dict__['__debug__'] = False """) # call pypy_find_stdlib: the side-effect is that it sets sys.prefix and @@ -119,7 +118,7 @@ # set warning control options (if any) warn_arg = interactiveconfig.warn if warn_arg is not None: - space.appexec([space.wrap(warn_arg)], """(arg): + space.appexec([space.wrap(warn_arg)], """(arg): import sys sys.warnoptions.append(arg)""") @@ -202,6 +201,6 @@ if __name__ == '__main__': if hasattr(sys, 'setrecursionlimit'): - # for running "python -i pyinteractive.py -Si -- py.py -Si" + # for running "python -i pyinteractive.py -Si -- py.py -Si" sys.setrecursionlimit(3000) sys.exit(main_(sys.argv)) diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py --- a/pypy/interpreter/app_main.py +++ b/pypy/interpreter/app_main.py @@ -540,10 +540,6 @@ sys.flags = type(sys.flags)(flags) sys.dont_write_bytecode = bool(sys.flags.dont_write_bytecode) - if sys.flags.optimize >= 1: - import __pypy__ - __pypy__.set_debug(False) - sys._xoptions = dict(x.split('=', 1) if '=' in x else (x, True) for x in options['_xoptions']) diff --git a/pypy/interpreter/astcompiler/codegen.py b/pypy/interpreter/astcompiler/codegen.py --- a/pypy/interpreter/astcompiler/codegen.py +++ b/pypy/interpreter/astcompiler/codegen.py @@ -507,6 +507,7 @@ def visit_Assert(self, asrt): if self.compile_info.optimize >= 1: return + assert self.compile_info.optimize == 0 self.update_position(asrt.lineno) end = self.new_block() self.emit_jump(ops.JUMP_IF_NOT_DEBUG, end) diff --git a/pypy/interpreter/pycompiler.py b/pypy/interpreter/pycompiler.py --- a/pypy/interpreter/pycompiler.py +++ b/pypy/interpreter/pycompiler.py @@ -117,6 +117,8 @@ check = True if not check: raise oefmt(self.space.w_TypeError, "invalid node type") + if optimize == -1: + optimize = self.space.sys.get_optimize() fut = misc.parse_future(node, self.future_flags.compiler_features) f_flags, f_lineno, f_col = fut @@ -166,6 +168,9 @@ def compile(self, source, filename, mode, flags, hidden_applevel=False, optimize=-1): + if optimize == -1: + optimize = self.space.sys.get_optimize() + assert optimize >= 0 info = pyparse.CompileInfo(filename, mode, flags, hidden_applevel=hidden_applevel, optimize=optimize) mod = self._compile_to_ast(source, info) diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py --- a/pypy/interpreter/pyopcode.py +++ b/pypy/interpreter/pyopcode.py @@ -1181,7 +1181,7 @@ return next_instr def JUMP_IF_NOT_DEBUG(self, jumpby, next_instr): - if not self.space.sys.debug: + if self.space.sys.get_optimize(): next_instr += jumpby return next_instr diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -69,14 +69,14 @@ * hidden_applevel: Will this code unit and sub units be hidden at the applevel? * optimize: optimization level: - -1 = same as interpreter, 0 = no optmiziation, 1 = remove asserts, 2 = remove docstrings. """ def __init__(self, filename, mode="exec", flags=0, future_pos=(0, 0), - hidden_applevel=False, optimize=-1): + hidden_applevel=False, optimize=0): + assert optimize >= 0 rstring.check_str0(filename) self.filename = filename self.mode = mode diff --git a/pypy/module/__builtin__/compiling.py b/pypy/module/__builtin__/compiling.py --- a/pypy/module/__builtin__/compiling.py +++ b/pypy/module/__builtin__/compiling.py @@ -42,6 +42,10 @@ raise oefmt(space.w_ValueError, "compile() arg 3 must be 'exec', 'eval' or 'single'") + if optimize < -1 or optimize > 2: + raise oefmt(space.w_ValueError, + "compile(): invalid optimize value") + if space.isinstance_w(w_source, space.gettypeobject(ast.W_AST.typedef)): if flags & consts.PyCF_ONLY_AST: return w_source diff --git a/pypy/module/__builtin__/test/test_compile.py b/pypy/module/__builtin__/test/test_compile.py --- a/pypy/module/__builtin__/test/test_compile.py +++ b/pypy/module/__builtin__/test/test_compile.py @@ -3,12 +3,18 @@ def setup_method(self, method): space = self.space - self._sys_debug = space.sys.debug + self._w_flags = space.sys.get('flags') # imitate -O - space.sys.debug = False + space.appexec([], """(): + import sys + flags = list(sys.flags) + flags[3] = 1 + sys.flags = type(sys.flags)(flags) + """) def teardown_method(self, method): - self.space.sys.debug = self._sys_debug + space = self.space + space.setitem(space.sys.w_dict, space.newtext('flags'), self._w_flags) def test_O_optmize_0(self): """Test that assert is not ignored if -O flag is set but optimize=0.""" diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -117,14 +117,6 @@ """ Create a new empty list that has an underlying storage of length sizehint """ return space.newlist_hint(sizehint) - at unwrap_spec(debug=int) -def set_debug(space, debug): - debug = bool(debug) - space.sys.debug = debug - space.setitem(space.builtin.w_dict, - space.newtext('__debug__'), - space.newbool(debug)) - @unwrap_spec(estimate=int) def add_memory_pressure(space, estimate): """ Add memory pressure of estimate bytes. Useful when calling a C function diff --git a/pypy/module/__pypy__/moduledef.py b/pypy/module/__pypy__/moduledef.py --- a/pypy/module/__pypy__/moduledef.py +++ b/pypy/module/__pypy__/moduledef.py @@ -72,7 +72,7 @@ interpleveldefs = { 'bufferable': 'interp_buffer.W_Bufferable', } - + class Module(MixedModule): """ PyPy specific "magic" functions. A lot of them are experimental and @@ -106,7 +106,6 @@ 'delitem_if_value_is' : 'interp_dict.delitem_if_value_is', 'move_to_end' : 'interp_dict.move_to_end', 'strategy' : 'interp_magic.strategy', # dict,set,list - 'set_debug' : 'interp_magic.set_debug', 'locals_to_fast' : 'interp_magic.locals_to_fast', 'set_code_callback' : 'interp_magic.set_code_callback', 'decode_long' : 'interp_magic.decode_long', diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -402,11 +402,7 @@ raise oefmt(space.w_ImportError, "Bad magic number in %s", cpathname) #print "loading pyc file:", cpathname code_w = read_compiled_module(space, cpathname, source) - try: - optimize = space.sys.get_flag('optimize') - except RuntimeError: - # during bootstrapping - optimize = 0 + optimize = space.sys.get_optimize() if optimize >= 2: code_w.remove_docstrings(space) diff --git a/pypy/module/sys/moduledef.py b/pypy/module/sys/moduledef.py --- a/pypy/module/sys/moduledef.py +++ b/pypy/module/sys/moduledef.py @@ -25,7 +25,6 @@ self.recursionlimit = 1000 self.defaultencoding = "utf-8" self.filesystemencoding = None - self.debug = True self.track_resources = False self.finalizing = False self.dlopenflags = rdynload._dlopen_default_mode() @@ -239,3 +238,9 @@ def get_state(self, space): from pypy.module.sys import state return state.get(space) + + def get_optimize(self): + try: + return self.get_flag('optimize') + except RuntimeError: # bootstrapping + return 0 From pypy.commits at gmail.com Wed Aug 7 09:38:34 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 06:38:34 -0700 (PDT) Subject: [pypy-commit] extradoc extradoc: Blog post draft Message-ID: <5d4ad45a.1c69fb81.18a4f.030f@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5957:e4443ebd55c3 Date: 2019-08-07 15:38 +0200 http://bitbucket.org/pypy/extradoc/changeset/e4443ebd55c3/ Log: Blog post draft diff --git a/blog/draft/2019-08-sandbox.rst b/blog/draft/2019-08-sandbox.rst new file mode 100644 --- /dev/null +++ b/blog/draft/2019-08-sandbox.rst @@ -0,0 +1,34 @@ +Hi all, + +Anvil_ is a UK-based company sponsoring one month of work to revive PyPy's +"sandbox" mode and upgrade it to PyPy3. Thanks to them, sandboxing will be +given a second life! + +Remember sandboxing? It is (or rather was) a special version of PyPy that runs +in a fully-isolated mode. It gives a safe way to execute arbitrary Python +scripts (*whole* scripts, not small bits of code inside your larger Python +program). Such scripts can be fully untrusted, and they can try to do +anything---there are no syntax-based restrictions, for example---but whatever +they do, any communication with the external world is not actually done but +delegated to the parent process. This is similar but much more flexible than +Linux's Seccomp approach, and it is more lightweight than setting up a full +virtual machine. It also works without operating system support. + +This sandbox mode of PyPy was deprecated long ago because of a lack of +interest, and because it took too much effort for us to maintain it. + +Now we have found that we have an actual user, Anvil_. The work starts now. +Part of my motivation for accepting this work is that I may have found a way to +tweak the protocol on the pipe between the sandboxed PyPy and the parent +controller process. This should make the sandboxed PyPy more resilient against +future developments; at most, in the future some tweaks will be needed in the +controller process but hopefully not deep inside the guts of the sandboxed +PyPy. Among the advantages, such a more robust solution should mean that we +can actually get a working sandboxed PyPy or sandboxed PyPy3 or sandboxed +version of any other interpreter written in RPython---with just an extra +argument when calling ``rpython`` to translate this interpreter. + +Armin Rigo + +.. _Anvil: https://anvil.works + From pypy.commits at gmail.com Wed Aug 7 09:53:32 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 06:53:32 -0700 (PDT) Subject: [pypy-commit] extradoc extradoc: tweaks Message-ID: <5d4ad7dc.1c69fb81.cda74.0de7@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5958:3cfd520fdfe4 Date: 2019-08-07 15:53 +0200 http://bitbucket.org/pypy/extradoc/changeset/3cfd520fdfe4/ Log: tweaks diff --git a/blog/draft/2019-08-sandbox.rst b/blog/draft/2019-08-sandbox.rst --- a/blog/draft/2019-08-sandbox.rst +++ b/blog/draft/2019-08-sandbox.rst @@ -4,7 +4,7 @@ "sandbox" mode and upgrade it to PyPy3. Thanks to them, sandboxing will be given a second life! -Remember sandboxing? It is (or rather was) a special version of PyPy that runs +Remember sandboxing_? It is (or rather was) a special version of PyPy that runs in a fully-isolated mode. It gives a safe way to execute arbitrary Python scripts (*whole* scripts, not small bits of code inside your larger Python program). Such scripts can be fully untrusted, and they can try to do @@ -17,7 +17,10 @@ This sandbox mode of PyPy was deprecated long ago because of a lack of interest, and because it took too much effort for us to maintain it. -Now we have found that we have an actual user, Anvil_. The work starts now. +Now we have found that we have an actual user, Anvil_. As far as I can tell +they are still using a very old version of PyPy, the last one that supported +sandboxing. Thus this contract to modernize it and port it to PyPy3. + Part of my motivation for accepting this work is that I may have found a way to tweak the protocol on the pipe between the sandboxed PyPy and the parent controller process. This should make the sandboxed PyPy more resilient against @@ -26,9 +29,10 @@ PyPy. Among the advantages, such a more robust solution should mean that we can actually get a working sandboxed PyPy or sandboxed PyPy3 or sandboxed version of any other interpreter written in RPython---with just an extra -argument when calling ``rpython`` to translate this interpreter. +argument when calling ``rpython`` to translate this interpreter. If everything +works as planned, sandboxing may be given a second life. Armin Rigo .. _Anvil: https://anvil.works - +.. _sandboxing: http://doc.pypy.org/en/latest/sandbox.html From pypy.commits at gmail.com Wed Aug 7 10:04:27 2019 From: pypy.commits at gmail.com (antocuni) Date: Wed, 07 Aug 2019 07:04:27 -0700 (PDT) Subject: [pypy-commit] extradoc extradoc: few tweaks Message-ID: <5d4ada6b.1c69fb81.2be33.80cd@mx.google.com> Author: Antonio Cuni Branch: extradoc Changeset: r5959:0aa490b09381 Date: 2019-08-07 16:04 +0200 http://bitbucket.org/pypy/extradoc/changeset/0aa490b09381/ Log: few tweaks diff --git a/blog/draft/2019-08-sandbox.rst b/blog/draft/2019-08-sandbox.rst --- a/blog/draft/2019-08-sandbox.rst +++ b/blog/draft/2019-08-sandbox.rst @@ -4,7 +4,7 @@ "sandbox" mode and upgrade it to PyPy3. Thanks to them, sandboxing will be given a second life! -Remember sandboxing_? It is (or rather was) a special version of PyPy that runs +Sandbox_ is a special version of PyPy that runs in a fully-isolated mode. It gives a safe way to execute arbitrary Python scripts (*whole* scripts, not small bits of code inside your larger Python program). Such scripts can be fully untrusted, and they can try to do @@ -14,8 +14,10 @@ Linux's Seccomp approach, and it is more lightweight than setting up a full virtual machine. It also works without operating system support. -This sandbox mode of PyPy was deprecated long ago because of a lack of -interest, and because it took too much effort for us to maintain it. +However, during the course of the years the sandbox mode of PyPy has been +mostly unmaintained and unsupported by the core developers, mostly because of +a lack of interest by users and because it took too much effort to maintain +it. Now we have found that we have an actual user, Anvil_. As far as I can tell they are still using a very old version of PyPy, the last one that supported @@ -24,7 +26,7 @@ Part of my motivation for accepting this work is that I may have found a way to tweak the protocol on the pipe between the sandboxed PyPy and the parent controller process. This should make the sandboxed PyPy more resilient against -future developments; at most, in the future some tweaks will be needed in the +future developments and easier to maintain; at most, in the future some tweaks will be needed in the controller process but hopefully not deep inside the guts of the sandboxed PyPy. Among the advantages, such a more robust solution should mean that we can actually get a working sandboxed PyPy or sandboxed PyPy3 or sandboxed @@ -35,4 +37,4 @@ Armin Rigo .. _Anvil: https://anvil.works -.. _sandboxing: http://doc.pypy.org/en/latest/sandbox.html +.. _sandbox: http://doc.pypy.org/en/latest/sandbox.html From pypy.commits at gmail.com Wed Aug 7 10:06:57 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 07:06:57 -0700 (PDT) Subject: [pypy-commit] extradoc extradoc: Link to https://rpython.readthedocs.io/en/latest/examples.html Message-ID: <5d4adb01.1c69fb81.59e8f.2883@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5960:34919a72c45c Date: 2019-08-07 16:06 +0200 http://bitbucket.org/pypy/extradoc/changeset/34919a72c45c/ Log: Link to https://rpython.readthedocs.io/en/latest/examples.html (thanks matti) diff --git a/blog/draft/2019-08-sandbox.rst b/blog/draft/2019-08-sandbox.rst --- a/blog/draft/2019-08-sandbox.rst +++ b/blog/draft/2019-08-sandbox.rst @@ -30,7 +30,7 @@ controller process but hopefully not deep inside the guts of the sandboxed PyPy. Among the advantages, such a more robust solution should mean that we can actually get a working sandboxed PyPy or sandboxed PyPy3 or sandboxed -version of any other interpreter written in RPython---with just an extra +version of `any other interpreter`_ written in RPython---with just an extra argument when calling ``rpython`` to translate this interpreter. If everything works as planned, sandboxing may be given a second life. @@ -38,3 +38,4 @@ .. _Anvil: https://anvil.works .. _sandbox: http://doc.pypy.org/en/latest/sandbox.html +.. _`any other interpreter`: https://rpython.readthedocs.io/en/latest/examples.html From pypy.commits at gmail.com Wed Aug 7 10:10:23 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 07:10:23 -0700 (PDT) Subject: [pypy-commit] extradoc extradoc: tweaks Message-ID: <5d4adbcf.1c69fb81.a0fa.0d2e@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5961:cb251149f1ea Date: 2019-08-07 16:10 +0200 http://bitbucket.org/pypy/extradoc/changeset/cb251149f1ea/ Log: tweaks diff --git a/blog/draft/2019-08-sandbox.rst b/blog/draft/2019-08-sandbox.rst --- a/blog/draft/2019-08-sandbox.rst +++ b/blog/draft/2019-08-sandbox.rst @@ -4,9 +4,9 @@ "sandbox" mode and upgrade it to PyPy3. Thanks to them, sandboxing will be given a second life! -Sandbox_ is a special version of PyPy that runs -in a fully-isolated mode. It gives a safe way to execute arbitrary Python -scripts (*whole* scripts, not small bits of code inside your larger Python +The `sandboxed PyPy`_ is a special version of PyPy that runs +fully isolated. It gives a safe way to execute arbitrary Python +programs (*whole* programs, not small bits of code inside your larger Python program). Such scripts can be fully untrusted, and they can try to do anything---there are no syntax-based restrictions, for example---but whatever they do, any communication with the external world is not actually done but @@ -37,5 +37,5 @@ Armin Rigo .. _Anvil: https://anvil.works -.. _sandbox: http://doc.pypy.org/en/latest/sandbox.html +.. _`sandboxed PyPy`: http://doc.pypy.org/en/latest/sandbox.html .. _`any other interpreter`: https://rpython.readthedocs.io/en/latest/examples.html From pypy.commits at gmail.com Wed Aug 7 11:42:31 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 08:42:31 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Start to port the tests Message-ID: <5d4af167.1c69fb81.e4092.0f81@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97085:735745c7bd90 Date: 2019-08-07 17:41 +0200 http://bitbucket.org/pypy/pypy/changeset/735745c7bd90/ Log: Start to port the tests diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py --- a/rpython/translator/sandbox/rsandbox.py +++ b/rpython/translator/sandbox/rsandbox.py @@ -23,27 +23,6 @@ log = AnsiLogger("sandbox") -def reraise_error(error, loader): - if error == 1: - raise OSError(load_int(loader), "external error") - elif error == 2: - raise IOError - elif error == 3: - raise OverflowError - elif error == 4: - raise ValueError - elif error == 5: - raise ZeroDivisionError - elif error == 6: - raise MemoryError - elif error == 7: - raise KeyError - elif error == 8: - raise IndexError - else: - raise RuntimeError - - def getkind(TYPE, parent_function): if TYPE is lltype.Void: return 'v' diff --git a/rpython/translator/sandbox/sandboxio.py b/rpython/translator/sandbox/sandboxio.py new file mode 100644 --- /dev/null +++ b/rpython/translator/sandbox/sandboxio.py @@ -0,0 +1,123 @@ +import struct + + +class SandboxError(Exception): + """The sandboxed process misbehaved""" + + +class Ptr(object): + def __init__(self, addr): + self.addr = addr + + def __repr__(self): + return 'Ptr(%s)' % (hex(self.addr),) + + +_ptr_size = struct.calcsize("P") +_ptr_code = 'q' if _ptr_size == 8 else 'i' +_pack_one_ptr = struct.Struct("=" + _ptr_code).pack +_pack_one_longlong = struct.Struct("=q").pack +_pack_one_double = struct.Struct("=d").pack +_pack_two_ptrs = struct.Struct("=" + _ptr_code + _ptr_code).pack +_unpack_one_ptr = struct.Struct("=" + _ptr_code).unpack + + +class SandboxedIO(object): + _message_decoders = {} + + + def __init__(self, popen): + self.popen = popen + self.child_stdin = popen.stdin + self.child_stdout = popen.stdout + + def close(self): + """Kill the subprocess and close the file descriptors to the pipe. + """ + self.popen.terminate() + self.child_stdin.close() + self.child_stdout.close() + + def _read(self, count): + result = self.child_stdout.read(count) + if len(result) != count: + raise SandboxError( + "connection interrupted with the sandboxed process") + return result + + @staticmethod + def _make_message_decoder(data): + i1 = data.find('(') + i2 = data.find(')') + if not (i1 > 0 and i1 < i2 and i2 == len(data) - 2): + raise SandboxError( + "badly formatted data received from the sandboxed process") + pack_args = ['='] + for c in data[i1+1:i2]: + if c == 'p': + pack_args.append(_ptr_code) + elif c == 'i': + pack_args.append('q') + elif c == 'f': + pack_args.append('d') + elif c == 'v': + pass + else: + raise SandboxError( + "unsupported format string in parentheses: %r" % (data,)) + unpacker = struct.Struct(''.join(pack_args)) + decoder = unpacker, data[i1+1:i2] + + SandboxedIO._message_decoders[data] = decoder + return decoder + + def read_message(self): + """Wait for the next message and returns it. Raises EOFError if the + subprocess finished. Raises SandboxError if there is another kind + of detected misbehaviour. + """ + ch = self.child_stdout.read(1) + if len(ch) == 0: + raise EOFError + n = ord(ch) + msg = self._read(n) + decoder = self._message_decoders.get(msg) + if decoder is None: + decoder = self._make_message_decoder(msg) + + unpacker, codes = decoder + raw_args = iter(unpacker.unpack(self._read(unpacker.size))) + args = [] + for c in codes: + if c == 'p': + args.append(Ptr(next(raw_args))) + elif c == 'v': + args.append(None) + else: + args.append(next(raw_args)) + return msg, args + + def read_buffer(self, ptr, length): + g = self.child_stdin + g.write("R" + _pack_two_ptrs(ptr.addr, length)) + g.flush() + return self._read(length) + + def read_charp(self, ptr, maxlen=-1): + g = self.child_stdin + g.write("Z" + _pack_two_ptrs(ptr.addr, maxlen)) + g.flush() + length = _unpack_one_ptr(self._read(_ptr_size))[0] + return self._read(length) + + def write_result(self, result): + g = self.child_stdin + if result is None: + g.write('v') + elif isinstance(result, Ptr): + g.write('p' + _pack_one_ptr(result.addr)) + elif isinstance(result, float): + g.write('f' + _pack_one_double(result)) + else: + g.write('i' + _pack_one_longlong(result)) + g.flush() diff --git a/rpython/translator/sandbox/sandlib.py b/rpython/translator/sandbox/sandlib.py --- a/rpython/translator/sandbox/sandlib.py +++ b/rpython/translator/sandbox/sandlib.py @@ -18,65 +18,6 @@ from rpython.tool.ansi_print import AnsiLogger return AnsiLogger("sandlib") -# Note: we use lib_pypy/marshal.py instead of the built-in marshal -# for two reasons. The built-in module could be made to segfault -# or be attackable in other ways by sending malicious input to -# load(). Also, marshal.load(f) blocks with the GIL held when -# f is a pipe with no data immediately avaialble, preventing the -# _waiting_thread to run. -from rpython.translator.sandbox import _marshal as marshal - -# Non-marshal result types -RESULTTYPE_STATRESULT = object() -RESULTTYPE_LONGLONG = object() - -def read_message(f): - return marshal.load(f) - -def write_message(g, msg, resulttype=None): - if resulttype is None: - if sys.version_info < (2, 4): - marshal.dump(msg, g) - else: - marshal.dump(msg, g, 0) - elif resulttype is RESULTTYPE_STATRESULT: - # Hand-coded marshal for stat results that mimics what rmarshal expects. - # marshal.dump(tuple(msg)) would have been too easy. rmarshal insists - # on 64-bit ints at places, even when the value fits in 32 bits. - import struct - st = tuple(msg) - fmt = "iIIiiiIfff" - buf = [] - buf.append(struct.pack(" Author: Armin Rigo Branch: sandbox-2 Changeset: r97086:18e96c1f9821 Date: 2019-08-07 17:53 +0200 http://bitbucket.org/pypy/pypy/changeset/18e96c1f9821/ Log: Avoid hacking by mutating the lltype function objects diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -885,11 +885,18 @@ if db.sandbox: if (getattr(obj, 'external', None) is not None and not obj._safe_not_sandboxed): - from rpython.translator.sandbox import rsandbox - obj.__dict__['graph'] = rsandbox.get_sandbox_stub( - obj, db.translator.rtyper) - obj.__dict__.pop('_safe_not_sandboxed', None) - obj.__dict__.pop('external', None) + try: + sandbox_mapping = db.sandbox_mapping + except AttributeError: + sandbox_mapping = db.sandbox_mapping = {} + try: + obj = sandbox_mapping[obj] + except KeyError: + from rpython.translator.sandbox import rsandbox + llfunc = rsandbox.get_sandbox_stub( + obj, db.translator.rtyper) + sandbox_mapping[obj] = llfunc._obj + obj = llfunc._obj if forcename: name = forcename else: diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py --- a/rpython/translator/sandbox/rsandbox.py +++ b/rpython/translator/sandbox/rsandbox.py @@ -116,7 +116,7 @@ def _annotate(rtyper, f, args_s, s_result): ann = MixLevelHelperAnnotator(rtyper) - graph = ann.getgraph(f, args_s, s_result) + llfunc = ann.delayedfunction(f, args_s, s_result, needtype=True) ann.finish() ann.backend_optimize() - return graph + return llfunc From pypy.commits at gmail.com Wed Aug 7 12:11:19 2019 From: pypy.commits at gmail.com (rlamy) Date: Wed, 07 Aug 2019 09:11:19 -0700 (PDT) Subject: [pypy-commit] pypy default: remove obsolete test file Message-ID: <5d4af827.1c69fb81.727f6.48b3@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97087:1c261775559f Date: 2019-08-07 17:10 +0100 http://bitbucket.org/pypy/pypy/changeset/1c261775559f/ Log: remove obsolete test file diff --git a/pypy/interpreter/pyparser/test/unittest_samples.py b/pypy/interpreter/pyparser/test/unittest_samples.py deleted file mode 100644 --- a/pypy/interpreter/pyparser/test/unittest_samples.py +++ /dev/null @@ -1,95 +0,0 @@ -"""test module for CPython / PyPy nested tuples comparison""" - -import os, os.path as osp -import sys -from pypy.interpreter.pyparser.pythonutil import python_parse, pypy_parse -from pprint import pprint -from pypy.interpreter.pyparser import grammar -grammar.DEBUG = False -from symbol import sym_name - - -def name(elt): - return "%s[%s]"% (sym_name.get(elt,elt),elt) - -def read_samples_dir(): - return [osp.join('samples', fname) for fname in os.listdir('samples') if fname.endswith('.py')] - -def print_sym_tuple(nested, level=0, limit=15, names=False, trace=()): - buf = [] - if level <= limit: - buf.append("%s(" % (" "*level)) - else: - buf.append("(") - for index, elt in enumerate(nested): - # Test if debugging and if on last element of error path - if trace and not trace[1:] and index == trace[0]: - buf.append('\n----> ') - if type(elt) is int: - if names: - buf.append(name(elt)) - else: - buf.append(str(elt)) - buf.append(', ') - elif type(elt) is str: - buf.append(repr(elt)) - else: - if level < limit: - buf.append('\n') - buf.extend(print_sym_tuple(elt, level+1, limit, - names, trace[1:])) - buf.append(')') - return buf - -def assert_tuples_equal(tup1, tup2, curpos = ()): - for index, (elt1, elt2) in enumerate(zip(tup1, tup2)): - if elt1 != elt2: - if type(elt1) is tuple and type(elt2) is tuple: - assert_tuples_equal(elt1, elt2, curpos + (index,)) - raise AssertionError('Found difference at %s : %s != %s' % - (curpos, name(elt1), name(elt2) ), curpos) - -from time import time, clock -def test_samples( samples ): - time_reports = {} - for sample in samples: - print "testing", sample - tstart1, cstart1 = time(), clock() - pypy_tuples = pypy_parse(sample) - tstart2, cstart2 = time(), clock() - python_tuples = python_parse(sample) - time_reports[sample] = (time() - tstart2, tstart2-tstart1, clock() - cstart2, cstart2-cstart1 ) - #print "-"*10, "PyPy parse results", "-"*10 - #print ''.join(print_sym_tuple(pypy_tuples, names=True)) - #print "-"*10, "CPython parse results", "-"*10 - #print ''.join(print_sym_tuple(python_tuples, names=True)) - print - try: - assert_tuples_equal(pypy_tuples, python_tuples) - except AssertionError as e: - error_path = e.args[-1] - print "ERROR PATH =", error_path - print "="*80 - print file(sample).read() - print "="*80 - print "-"*10, "PyPy parse results", "-"*10 - print ''.join(print_sym_tuple(pypy_tuples, names=True, trace=error_path)) - print "-"*10, "CPython parse results", "-"*10 - print ''.join(print_sym_tuple(python_tuples, names=True, trace=error_path)) - print "Failed on (%s)" % sample - # raise - pprint(time_reports) - -if __name__=="__main__": - import getopt - opts, args = getopt.getopt( sys.argv[1:], "d:", [] ) - for opt, val in opts: - if opt == "-d": - pass -# set_debug(int(val)) - if args: - samples = args - else: - samples = read_samples_dir() - - test_samples( samples ) From pypy.commits at gmail.com Wed Aug 7 12:18:13 2019 From: pypy.commits at gmail.com (rlamy) Date: Wed, 07 Aug 2019 09:18:13 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: remove test for deleted __pypy__.set_debug() Message-ID: <5d4af9c5.1c69fb81.de404.2ce0@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97088:0b9e2d964855 Date: 2019-08-07 17:08 +0100 http://bitbucket.org/pypy/pypy/changeset/0b9e2d964855/ Log: remove test for deleted __pypy__.set_debug() diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py --- a/pypy/interpreter/astcompiler/test/test_compiler.py +++ b/pypy/interpreter/astcompiler/test/test_compiler.py @@ -1041,20 +1041,6 @@ code_w.exec_code(self.space, dict_w, dict_w) self.check(dict_w, expr, result) - def test_assert_skipping(self): - space = self.space - mod = space.getbuiltinmodule('__pypy__') - w_set_debug = space.getattr(mod, space.wrap('set_debug')) - space.call_function(w_set_debug, space.w_False) - - source = """if 1: - assert False - """ - try: - self.run(source) - finally: - space.call_function(w_set_debug, space.w_True) - def test_dont_fold_equal_code_objects(self): yield self.st, "f=lambda:1;g=lambda:1.0;x=g()", 'type(x)', float yield (self.st, "x=(lambda: (-0.0, 0.0), lambda: (0.0, -0.0))[1]()", From pypy.commits at gmail.com Wed Aug 7 12:24:57 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 09:24:57 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: in-progress: fixing the tests Message-ID: <5d4afb59.1c69fb81.25aff.15dc@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97089:1296060ecad0 Date: 2019-08-07 18:24 +0200 http://bitbucket.org/pypy/pypy/changeset/1296060ecad0/ Log: in-progress: fixing the tests diff --git a/rpython/translator/sandbox/sandboxio.py b/rpython/translator/sandbox/sandboxio.py --- a/rpython/translator/sandbox/sandboxio.py +++ b/rpython/translator/sandbox/sandboxio.py @@ -110,6 +110,12 @@ length = _unpack_one_ptr(self._read(_ptr_size))[0] return self._read(length) + def write_buffer(self, ptr, bytes_data): + g = self.child_stdin + g.write("W" + _pack_two_ptrs(ptr.addr, len(bytes_data))) + g.write(bytes_data) + # g.flush() not necessary here + def write_result(self, result): g = self.child_stdin if result is None: diff --git a/rpython/translator/sandbox/test/test_sandbox.py b/rpython/translator/sandbox/test/test_sandboxio.py rename from rpython/translator/sandbox/test/test_sandbox.py rename to rpython/translator/sandbox/test/test_sandboxio.py --- a/rpython/translator/sandbox/test/test_sandbox.py +++ b/rpython/translator/sandbox/test/test_sandboxio.py @@ -23,6 +23,14 @@ signal.alarm(0) signal.signal(signal.SIGALRM, signal.SIG_DFL) +class OUT(object): + def __init__(self, raw): + self.raw = raw + +class RAW(object): + def __init__(self, raw): + self.raw = raw + _NO_RESULT = object() def expect(sandio, fnname, expected_args, result=_NO_RESULT): @@ -34,11 +42,23 @@ assert type(arg) is Ptr arg_str = sandio.read_charp(arg, len(expected_arg) + 100) assert arg_str == expected_arg + elif type(expected_arg) is OUT: + assert type(arg) is Ptr + sandio.write_buffer(arg, expected_arg.raw) + elif type(expected_arg) is RAW: + assert type(arg) is Ptr + arg_str = sandio.read_buffer(arg, len(expected_arg.raw)) + assert arg_str == expected_arg.raw else: assert arg == expected_arg if result is not _NO_RESULT: sandio.write_result(result) +def expect_done(sandio): + with py.test.raises(EOFError): + sandio.read_message() + sandio.close() + def compile(f, gc='ref', **kwds): t = Translation(f, backend='c', sandbox=True, gc=gc, check_str_without_nul=True, **kwds) @@ -62,9 +82,7 @@ sandio = run_in_subprocess(exe) expect(sandio, "open(pii)i", ("/tmp/foobar", os.O_RDONLY, 0777), 77) expect(sandio, "dup(i)i", (77,), 78) - with py.test.raises(EOFError): - sandio.read_message() - sandio.close() + expect_done(sandio) def test_open_dup_rposix(): from rpython.rlib import rposix @@ -78,10 +96,8 @@ exe = compile(entry_point) sandio = run_in_subprocess(exe) expect(sandio, "open(pii)i", ("/tmp/foobar", os.O_RDONLY, 0777), 77) - expect(sandio, "dup(i)i", (77, True), 78) - with py.test.raises(EOFError): - sandio.read_message() - sandio.close() + expect(sandio, "dup(i)i", (77,), 78) + expect_done(sandio) def test_read_write(): def entry_point(argv): @@ -95,15 +111,13 @@ return 0 exe = compile(entry_point) - g, f = run_in_subprocess(exe) - expect(f, g, "ll_os.ll_os_open", ("/tmp/foobar", os.O_RDONLY, 0777), 77) - expect(f, g, "ll_os.ll_os_read", (77, 123), "he\x00llo") - expect(f, g, "ll_os.ll_os_write", (77, "world\x00!\x00"), 42) - expect(f, g, "ll_os.ll_os_close", (77,), None) - g.close() - tail = f.read() - f.close() - assert tail == "" + sandio = run_in_subprocess(exe) + expect(sandio, "open(pii)i", ("/tmp/foobar", os.O_RDONLY, 0777), 77) + expect(sandio, "read(ipi)i", (77, OUT("he\x00llo"), 123), len("he\x00llo")) + sz = len("world\x00!\x00") + expect(sandio, "write(ipi)i", (77, RAW("world\x00!\x00"), sz), 42) + expect(sandio, "close(i)i", (77,), 0) + expect_done(sandio) def test_dup2_access(): def entry_point(argv): From pypy.commits at gmail.com Wed Aug 7 12:32:43 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 09:32:43 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Next test Message-ID: <5d4afd2b.1c69fb81.3d537.1733@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97090:2b59f76840c5 Date: 2019-08-07 18:32 +0200 http://bitbucket.org/pypy/pypy/changeset/2b59f76840c5/ Log: Next test diff --git a/rpython/translator/sandbox/sandboxio.py b/rpython/translator/sandbox/sandboxio.py --- a/rpython/translator/sandbox/sandboxio.py +++ b/rpython/translator/sandbox/sandboxio.py @@ -34,7 +34,8 @@ def close(self): """Kill the subprocess and close the file descriptors to the pipe. """ - self.popen.terminate() + if self.popen.returncode is None: + self.popen.terminate() self.child_stdin.close() self.child_stdout.close() diff --git a/rpython/translator/sandbox/test/test_sandboxio.py b/rpython/translator/sandbox/test/test_sandboxio.py --- a/rpython/translator/sandbox/test/test_sandboxio.py +++ b/rpython/translator/sandbox/test/test_sandboxio.py @@ -57,6 +57,7 @@ def expect_done(sandio): with py.test.raises(EOFError): sandio.read_message() + assert sandio.popen.wait() == 0 # exit code 0 sandio.close() def compile(f, gc='ref', **kwds): @@ -126,13 +127,10 @@ return 1 - y exe = compile(entry_point) - g, f = run_in_subprocess(exe) - expect(f, g, "ll_os.ll_os_dup2", (34, 56, True), None) - expect(f, g, "ll_os.ll_os_access", ("spam", 77), True) - g.close() - tail = f.read() - f.close() - assert tail == "" + sandio = run_in_subprocess(exe) + expect(sandio, "dup2(ii)i", (34, 56), 0) + expect(sandio, "access(pi)i", ("spam", 77), 0) + expect_done(sandio) def test_stat_ftruncate(): from rpython.translator.sandbox.sandlib import RESULTTYPE_STATRESULT From pypy.commits at gmail.com Wed Aug 7 12:56:48 2019 From: pypy.commits at gmail.com (arigo) Date: Wed, 07 Aug 2019 09:56:48 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: More tests, add missing bits Message-ID: <5d4b02d0.1c69fb81.6eec6.aa60@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97091:864af487a9c3 Date: 2019-08-07 18:56 +0200 http://bitbucket.org/pypy/pypy/changeset/864af487a9c3/ Log: More tests, add missing bits diff --git a/rpython/translator/sandbox/sandboxio.py b/rpython/translator/sandbox/sandboxio.py --- a/rpython/translator/sandbox/sandboxio.py +++ b/rpython/translator/sandbox/sandboxio.py @@ -18,6 +18,7 @@ _pack_one_ptr = struct.Struct("=" + _ptr_code).pack _pack_one_longlong = struct.Struct("=q").pack _pack_one_double = struct.Struct("=d").pack +_pack_one_int = struct.Struct("=i").pack _pack_two_ptrs = struct.Struct("=" + _ptr_code + _ptr_code).pack _unpack_one_ptr = struct.Struct("=" + _ptr_code).unpack @@ -128,3 +129,21 @@ else: g.write('i' + _pack_one_longlong(result)) g.flush() + + def set_errno(self, err): + g = self.child_stdin + g.write("E" + _pack_one_int(err)) + # g.flush() not necessary here + + def malloc(self, bytes_data): + g = self.child_stdin + g.write("M" + _pack_one_ptr(len(bytes_data))) + g.write(bytes_data) + g.flush() + addr = _unpack_one_ptr(self._read(_ptr_size))[0] + return Ptr(addr) + + def free(self, ptr): + g = self.child_stdin + g.write("F" + _pack_one_ptr(ptr.addr)) + # g.flush() not necessary here diff --git a/rpython/translator/sandbox/test/test_sandboxio.py b/rpython/translator/sandbox/test/test_sandboxio.py --- a/rpython/translator/sandbox/test/test_sandboxio.py +++ b/rpython/translator/sandbox/test/test_sandboxio.py @@ -1,5 +1,5 @@ import py -import sys, os, time +import sys, os, time, errno import struct import subprocess import signal @@ -31,9 +31,19 @@ def __init__(self, raw): self.raw = raw +class ARG(object): + def __init__(self, index): + self.index = index + +class MALLOC_FREE(object): + def __init__(self, raw): + self.raw = raw + +ANY = object() +NULL = object() _NO_RESULT = object() -def expect(sandio, fnname, expected_args, result=_NO_RESULT): +def expect(sandio, fnname, expected_args, result=_NO_RESULT, errno=_NO_RESULT): msg, args = sandio.read_message() assert msg == fnname assert len(args) == len(expected_args) @@ -49,10 +59,24 @@ assert type(arg) is Ptr arg_str = sandio.read_buffer(arg, len(expected_arg.raw)) assert arg_str == expected_arg.raw + elif expected_arg is ANY: + pass + elif expected_arg is NULL: + assert type(arg) is Ptr + assert arg.addr == 0 else: assert arg == expected_arg + if errno is not _NO_RESULT: + sandio.set_errno(errno) if result is not _NO_RESULT: - sandio.write_result(result) + if type(result) is ARG: + result = args[result.index] + if type(result) is MALLOC_FREE: + ptr = sandio.malloc(result.raw) + sandio.write_result(ptr) + sandio.free(ptr) + else: + sandio.write_result(result) def expect_done(sandio): with py.test.raises(EOFError): @@ -132,6 +156,7 @@ expect(sandio, "access(pi)i", ("spam", 77), 0) expect_done(sandio) + at py.test.mark.skip() def test_stat_ftruncate(): from rpython.translator.sandbox.sandlib import RESULTTYPE_STATRESULT from rpython.rlib.rarithmetic import r_longlong @@ -159,32 +184,27 @@ def test_time(): def entry_point(argv): t = time.time() - os.dup(int(t*1000)) + os.dup(int(t)) return 0 exe = compile(entry_point) - g, f = run_in_subprocess(exe) - expect(f, g, "ll_time.ll_time_time", (), 3.141592) - expect(f, g, "ll_os.ll_os_dup", (3141, True), 3) - g.close() - tail = f.read() - f.close() - assert tail == "" + sandio = run_in_subprocess(exe) + expect(sandio, "gettimeofday(pp)i", (ANY, ANY), -1, errno=errno.ENOSYS) + expect(sandio, "time(p)i", (NULL,), 314159) + expect(sandio, "dup(i)i", (314159,), 3) + expect_done(sandio) def test_getcwd(): def entry_point(argv): t = os.getcwd() - os.dup(len(t)) + os.open(t, os.O_RDONLY, 0777) return 0 exe = compile(entry_point) - g, f = run_in_subprocess(exe) - expect(f, g, "ll_os.ll_os_getcwd", (), "/tmp/foo/bar") - expect(f, g, "ll_os.ll_os_dup", (len("/tmp/foo/bar"), True), 3) - g.close() - tail = f.read() - f.close() - assert tail == "" + sandio = run_in_subprocess(exe) + expect(sandio, "getcwd(pi)p", (OUT("/tmp/foo/bar"), ANY), ARG(0)) + expect(sandio, "open(pii)i", ("/tmp/foo/bar", os.O_RDONLY, 0777), 77) + expect_done(sandio) def test_oserror(): def entry_point(argv): @@ -195,37 +215,22 @@ return 0 exe = compile(entry_point) - g, f = run_in_subprocess(exe) - expect(f, g, "ll_os.ll_os_stat", ("somewhere",), OSError(6321, "egg")) - expect(f, g, "ll_os.ll_os_close", (6321,), None) - g.close() - tail = f.read() - f.close() - assert tail == "" + sandio = run_in_subprocess(exe) + expect(sandio, "stat64(pp)i", ("somewhere", ANY), -1, errno=6321) + expect(sandio, "close(i)i", (6321,), 0) + expect_done(sandio) -def test_hybrid_gc(): +def test_getenv(): def entry_point(argv): - l = [] - for i in range(int(argv[1])): - l.append("x" * int(argv[2])) - return int(len(l) > 1000) + s = os.environ["FOOBAR"] + os.open(s, 0, 0) + return 0 - exe = compile(entry_point, gc='hybrid', lldebug=True) - pipe = subprocess.Popen([exe, '10', '10000'], stdout=subprocess.PIPE, - stdin=subprocess.PIPE) - g = pipe.stdin - f = pipe.stdout - expect(f, g, "ll_os.ll_os_getenv", ("PYPY_GENERATIONGC_NURSERY",), None) - #if sys.platform.startswith('linux'): - # expect(f, g, "ll_os.ll_os_open", ("/proc/cpuinfo", 0, 420), - # OSError(5232, "xyz")) - expect(f, g, "ll_os.ll_os_getenv", ("PYPY_GC_DEBUG",), None) - g.close() - tail = f.read() - f.close() - assert tail == "" - rescode = pipe.wait() - assert rescode == 0 + exe = compile(entry_point) + sandio = run_in_subprocess(exe) + expect(sandio, "getenv(p)p", ("FOOBAR",), MALLOC_FREE("tmp_foo_bar")) + expect(sandio, "open(pii)i", ("tmp_foo_bar", 0, 0), 0) + expect_done(sandio) def test_segfault_1(): class A: From pypy.commits at gmail.com Wed Aug 7 13:50:34 2019 From: pypy.commits at gmail.com (rlamy) Date: Wed, 07 Aug 2019 10:50:34 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: Set default value for 'optimize' param to compile() to -1 Message-ID: <5d4b0f6a.1c69fb81.90253.da5a@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97095:fca8f3eb36c1 Date: 2019-08-07 18:49 +0100 http://bitbucket.org/pypy/pypy/changeset/fca8f3eb36c1/ Log: Set default value for 'optimize' param to compile() to -1 diff --git a/pypy/module/__builtin__/compiling.py b/pypy/module/__builtin__/compiling.py --- a/pypy/module/__builtin__/compiling.py +++ b/pypy/module/__builtin__/compiling.py @@ -13,7 +13,7 @@ @unwrap_spec(filename='fsencode', mode='text', flags=int, dont_inherit=int, optimize=int) def compile(space, w_source, filename, mode, flags=0, dont_inherit=0, - optimize=0): + optimize=-1): """Compile the source string (a Python module, statement or expression) into a code object that can be executed by the exec statement or eval(). The filename will be used for run-time error messages. From pypy.commits at gmail.com Wed Aug 7 23:25:10 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 07 Aug 2019 20:25:10 -0700 (PDT) Subject: [pypy-commit] buildbot default: add aarch64 slave Message-ID: <5d4b9616.1c69fb81.ae704.3dbf@mx.google.com> Author: Matti Picus Branch: Changeset: r1085:6a9387c1c1ad Date: 2019-08-08 06:24 +0300 http://bitbucket.org/pypy/buildbot/changeset/6a9387c1c1ad/ Log: add aarch64 slave diff --git a/bot2/pypybuildbot/builds.py b/bot2/pypybuildbot/builds.py --- a/bot2/pypybuildbot/builds.py +++ b/bot2/pypybuildbot/builds.py @@ -30,6 +30,7 @@ #SpeedOldLock = locks.MasterLock('speed_old_lock', maxCount=2) # bencher4 has 8 cores, 32 GB RAM Bencher4Lock = locks.MasterLock('bencher4_lock', maxCount=4) +AARCH64Lock = locks.MasterLock('aarch64_lock', maxCount=2) # The cross translation machine can accomodate 2 jobs at the same time ARMCrossLock = locks.MasterLock('arm_cpu', maxCount=2) diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -19,13 +19,13 @@ class CustomForceScheduler(ForceScheduler): def force(self, owner, builder_name, **kwargs): if not owner: - raise ValidationError, "Please write your name in the corresponding field." + raise ValidationError("Please write your name in the corresponding field.") return ForceScheduler.force(self, owner, builder_name, **kwargs) # Forbid "stop build" without a reason that starts with "!" def _checkStopBuild(self, reason=""): if ": !" not in reason: - raise ValidationError, "Please write a reason that starts with '!'." + raise ValidationError("Please write a reason that starts with '!'.") return _baseStopBuild(self, reason) _baseStopBuild = Build.stopBuild Build.stopBuild = _checkStopBuild @@ -70,6 +70,7 @@ WinSlaveLock = pypybuilds.WinSlaveLock #SpeedOldLock = pypybuilds.SpeedOldLock Bencher4Lock = pypybuilds.Bencher4Lock +AARCH64Lock = pypybuilds.Bencher4Lock pypyOwnTestFactory = pypybuilds.Own() pypyOwnTestFactoryWin = pypybuilds.Own(platform="win32") @@ -190,6 +191,7 @@ LINUX32OWN = "own-linux-x86-32" LINUX64OWN = "own-linux-x86-64" +AARCH64OWN = "own-linux-aarch64" LINUX_S390XOWN = "own-linux-s390x" MACOSX32OWN = "own-macosx-x86-32" WIN32OWN = "own-win-x86-32" @@ -197,6 +199,7 @@ LINUX32RPYTHON = "rpython-linux-x86-32" LINUX64RPYTHON = "rpython-linux-x86-64" +AARCH64RPYTHON = "rpython-linux-aarch64" LINUX_S390XRPYTHON = "rpython-linux-s390x" MACOSX32RPYTHON = "rpython-macosx-x86-32" WIN32RPYTHON = "rpython-win-x86-32" @@ -211,6 +214,7 @@ JITLINUX32 = "pypy-c-jit-linux-x86-32" JITLINUX64 = "pypy-c-jit-linux-x86-64" +JITAARCH64 = "pypy-c-jit-linux-aarch64" JITLINUX_S390X = 'pypy-c-jit-linux-s390x' JITMACOSX64 = "pypy-c-jit-macosx-x86-64" #JITMACOSX64_2 = "pypy-c-jit-macosx-x86-64-2" @@ -292,10 +296,12 @@ # linux tests LINUX32OWN, # on benchmarker4_32, uses all cores LINUX64OWN, # on bencher4, uses all cores + AARCH64OWN, WIN32OWN, # on SalsaSalsa LINUX_S390XOWN, JITLINUX32, # on benchmarker4_32, uses 1 core JITLINUX64, # on bencher4, uses 1 core + JITAARCH64, JITLINUX_S390X, #APPLVLLINUX32, #APPLVLLINUX64, # on bencher4, uses 1 core @@ -315,6 +321,7 @@ Nightly("nightly-0-01", [ LINUX32RPYTHON, # on benchermarker_32, uses all cores LINUX64RPYTHON, # on bencher4, uses all cores + AARCH64RPYTHON, WIN32RPYTHON, # on SalsaSalsa LINUX_S390XRPYTHON, ], branch='default', hour=0, minute=0, onlyIfChanged=True, @@ -346,7 +353,9 @@ LINUX32OWN, # on bencher4_32, uses all cores JITLINUX32, # on bencher4_32, uses 1 core LINUX64OWN, # on bencher4, uses all cores + AARCH64OWN, JITLINUX64, # on bencher4, uses 1 core + JITAARCH64, JITMACOSX64, # on xerxes JITWIN32, # on SalsaSalsa ], branch="py3.6", hour=3, minute=0, @@ -364,10 +373,12 @@ PYPYBUILDBOT, LINUX32OWN, LINUX64OWN, + AARCH64OWN, MACOSX32OWN, WIN32OWN, LINUX32RPYTHON, LINUX64RPYTHON, + AARCH64RPYTHON, MACOSX32RPYTHON, WIN32RPYTHON, @@ -380,6 +391,7 @@ JITLINUX32, JITLINUX64, + JITAARCH64, JITMACOSX64, JITWIN32, #JITFREEBSD964, @@ -431,6 +443,13 @@ "category": 'linux64', "locks": [Bencher4Lock.access('counting')], }, + {"name": AARCH64OWN, + "slavenames": ["aarch64_aws"], + "builddir": AARCH64OWN, + "factory": pypyOwnTestFactory, + "category": 'linux64', + "locks": [AARCH64Lock.access('counting')], + }, {"name": LINUX64RPYTHON, #"slavenames": ["bencher4", "speed-old"], "slavenames": ["bencher4", "benchmarker64"], @@ -439,6 +458,13 @@ "category": 'linux64', "locks": [Bencher4Lock.access('counting')], }, + {"name": AARCH64RPYTHON, + "slavenames": ["aarch64_aws"], + "builddir": AARCH64RPYTHON, + "factory": pypyRPythonTestFactory, + "category": 'linux64', + "locks": [AARCH64Lock.access('counting')], + }, {"name": APPLVLLINUX32, #"slavenames": ["allegro32"], "slavenames": ["benchmarker32"], @@ -487,6 +513,14 @@ 'category': 'linux64', "locks": [Bencher4Lock.access('counting')], }, + {'name': JITAARCH64, + #'slavenames': ["bencher4", "speed-old"], + 'slavenames': ["aarch64_aws"], + 'builddir': JITAARCH64, + 'factory': pypyJITTranslatedTestFactory64, + 'category': 'linux64', + "locks": [AARCH64Lock.access('counting')], + }, {"name": JITBENCH64, "slavenames": ["benchmarker"], "builddir": JITBENCH64, From pypy.commits at gmail.com Thu Aug 8 03:19:02 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 08 Aug 2019 00:19:02 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d4bcce6.1c69fb81.caa21.58f7@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97096:5a152ed4712a Date: 2019-08-08 09:18 +0200 http://bitbucket.org/pypy/pypy/changeset/5a152ed4712a/ Log: hg merge default diff --git a/pypy/interpreter/pyparser/test/unittest_samples.py b/pypy/interpreter/pyparser/test/unittest_samples.py deleted file mode 100644 --- a/pypy/interpreter/pyparser/test/unittest_samples.py +++ /dev/null @@ -1,95 +0,0 @@ -"""test module for CPython / PyPy nested tuples comparison""" - -import os, os.path as osp -import sys -from pypy.interpreter.pyparser.pythonutil import python_parse, pypy_parse -from pprint import pprint -from pypy.interpreter.pyparser import grammar -grammar.DEBUG = False -from symbol import sym_name - - -def name(elt): - return "%s[%s]"% (sym_name.get(elt,elt),elt) - -def read_samples_dir(): - return [osp.join('samples', fname) for fname in os.listdir('samples') if fname.endswith('.py')] - -def print_sym_tuple(nested, level=0, limit=15, names=False, trace=()): - buf = [] - if level <= limit: - buf.append("%s(" % (" "*level)) - else: - buf.append("(") - for index, elt in enumerate(nested): - # Test if debugging and if on last element of error path - if trace and not trace[1:] and index == trace[0]: - buf.append('\n----> ') - if type(elt) is int: - if names: - buf.append(name(elt)) - else: - buf.append(str(elt)) - buf.append(', ') - elif type(elt) is str: - buf.append(repr(elt)) - else: - if level < limit: - buf.append('\n') - buf.extend(print_sym_tuple(elt, level+1, limit, - names, trace[1:])) - buf.append(')') - return buf - -def assert_tuples_equal(tup1, tup2, curpos = ()): - for index, (elt1, elt2) in enumerate(zip(tup1, tup2)): - if elt1 != elt2: - if type(elt1) is tuple and type(elt2) is tuple: - assert_tuples_equal(elt1, elt2, curpos + (index,)) - raise AssertionError('Found difference at %s : %s != %s' % - (curpos, name(elt1), name(elt2) ), curpos) - -from time import time, clock -def test_samples( samples ): - time_reports = {} - for sample in samples: - print "testing", sample - tstart1, cstart1 = time(), clock() - pypy_tuples = pypy_parse(sample) - tstart2, cstart2 = time(), clock() - python_tuples = python_parse(sample) - time_reports[sample] = (time() - tstart2, tstart2-tstart1, clock() - cstart2, cstart2-cstart1 ) - #print "-"*10, "PyPy parse results", "-"*10 - #print ''.join(print_sym_tuple(pypy_tuples, names=True)) - #print "-"*10, "CPython parse results", "-"*10 - #print ''.join(print_sym_tuple(python_tuples, names=True)) - print - try: - assert_tuples_equal(pypy_tuples, python_tuples) - except AssertionError as e: - error_path = e.args[-1] - print "ERROR PATH =", error_path - print "="*80 - print file(sample).read() - print "="*80 - print "-"*10, "PyPy parse results", "-"*10 - print ''.join(print_sym_tuple(pypy_tuples, names=True, trace=error_path)) - print "-"*10, "CPython parse results", "-"*10 - print ''.join(print_sym_tuple(python_tuples, names=True, trace=error_path)) - print "Failed on (%s)" % sample - # raise - pprint(time_reports) - -if __name__=="__main__": - import getopt - opts, args = getopt.getopt( sys.argv[1:], "d:", [] ) - for opt, val in opts: - if opt == "-d": - pass -# set_debug(int(val)) - if args: - samples = args - else: - samples = read_samples_dir() - - test_samples( samples ) From pypy.commits at gmail.com Thu Aug 8 03:37:26 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 08 Aug 2019 00:37:26 -0700 (PDT) Subject: [pypy-commit] pypy default: Issue #3049 Message-ID: <5d4bd136.1c69fb81.d90d3.e3c8@mx.google.com> Author: Armin Rigo Branch: Changeset: r97097:8819acbf633a Date: 2019-08-08 09:27 +0200 http://bitbucket.org/pypy/pypy/changeset/8819acbf633a/ Log: Issue #3049 There were a missing "except SocketError" converting interp-level exceptions to app-level ones---or, as it turns out, eating them completely in a couple of cases. diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py --- a/pypy/module/_socket/interp_socket.py +++ b/pypy/module/_socket/interp_socket.py @@ -312,7 +312,10 @@ raise converted_error(space, e) if buflen < 0 or buflen > 1024: raise explicit_socket_error(space, "getsockopt buflen out of range") - return space.newbytes(self.sock.getsockopt(level, optname, buflen)) + try: + return space.newbytes(self.sock.getsockopt(level, optname, buflen)) + except SocketError as e: + raise converted_error(space, e) def gettimeout_w(self, space): """gettimeout() -> timeout @@ -438,7 +441,10 @@ setblocking(True) is equivalent to settimeout(None); setblocking(False) is equivalent to settimeout(0.0). """ - self.sock.setblocking(flag) + try: + self.sock.setblocking(flag) + except SocketError as e: + pass # CPython 2 only: never raise anything here @unwrap_spec(level=int, optname=int) def setsockopt_w(self, space, level, optname, w_optval): @@ -477,7 +483,10 @@ timeout = space.float_w(w_timeout) if timeout < 0.0: raise oefmt(space.w_ValueError, "Timeout value out of range") - self.sock.settimeout(timeout) + try: + self.sock.settimeout(timeout) + except SocketError as e: + pass # CPython 2 only: never raise anything here @unwrap_spec(nbytes=int, flags=int) def recv_into_w(self, space, w_buffer, nbytes=0, flags=0): diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -872,6 +872,14 @@ cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) assert cli.family == socket.AF_INET + def test_missing_error_catching(self): + from _socket import socket, error + s = socket() + s.close() + s.settimeout(1) # EBADF, but ignored on Python 2 + s.setblocking(True) # EBADF, but ignored on Python 2 + raises(error, s.getsockopt, 42, 84, 8) # EBADF + class AppTestErrno: spaceconfig = {'usemodules': ['_socket']} From pypy.commits at gmail.com Thu Aug 8 03:37:28 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 08 Aug 2019 00:37:28 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d4bd138.1c69fb81.4dbca.96ae@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97098:d84524cf112f Date: 2019-08-08 09:36 +0200 http://bitbucket.org/pypy/pypy/changeset/d84524cf112f/ Log: hg merge default diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py --- a/pypy/module/_socket/interp_socket.py +++ b/pypy/module/_socket/interp_socket.py @@ -398,7 +398,10 @@ raise converted_error(space, e) if buflen < 0 or buflen > 1024: raise explicit_socket_error(space, "getsockopt buflen out of range") - return space.newbytes(self.sock.getsockopt(level, optname, buflen)) + try: + return space.newbytes(self.sock.getsockopt(level, optname, buflen)) + except SocketError as e: + raise converted_error(space, e) def gettimeout_w(self, space): """gettimeout() -> timeout diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -949,6 +949,13 @@ cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) assert cli.family == socket.AF_INET + def test_missing_error_catching(self): + from _socket import socket, error + s = socket() + s.close() + raises(error, s.settimeout, 1) # EBADF + raises(error, s.setblocking, True) # EBADF + raises(error, s.getsockopt, 42, 84, 8) # EBADF def test_accept_non_inheritable(self): import _socket, os From pypy.commits at gmail.com Thu Aug 8 05:52:06 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 08 Aug 2019 02:52:06 -0700 (PDT) Subject: [pypy-commit] buildbot default: add category for aarch64 Message-ID: <5d4bf0c6.1c69fb81.af1d8.7238@mx.google.com> Author: Matti Picus Branch: Changeset: r1086:a36c3b0d16d1 Date: 2019-08-08 12:51 +0300 http://bitbucket.org/pypy/buildbot/changeset/a36c3b0d16d1/ Log: add category for aarch64 diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -447,7 +447,7 @@ "slavenames": ["aarch64_aws"], "builddir": AARCH64OWN, "factory": pypyOwnTestFactory, - "category": 'linux64', + "category": 'aarch64', "locks": [AARCH64Lock.access('counting')], }, {"name": LINUX64RPYTHON, @@ -462,7 +462,7 @@ "slavenames": ["aarch64_aws"], "builddir": AARCH64RPYTHON, "factory": pypyRPythonTestFactory, - "category": 'linux64', + "category": 'aarch64', "locks": [AARCH64Lock.access('counting')], }, {"name": APPLVLLINUX32, @@ -518,7 +518,7 @@ 'slavenames': ["aarch64_aws"], 'builddir': JITAARCH64, 'factory': pypyJITTranslatedTestFactory64, - 'category': 'linux64', + 'category': 'aarch64', "locks": [AARCH64Lock.access('counting')], }, {"name": JITBENCH64, diff --git a/bot2/pypybuildbot/pypylist.py b/bot2/pypybuildbot/pypylist.py --- a/bot2/pypybuildbot/pypylist.py +++ b/bot2/pypybuildbot/pypylist.py @@ -25,8 +25,9 @@ } PLATFORM_PRIORITY = { - 'linux': 100, - 'linux64': 50, + 'linux64': 100, + 'linux': 50, + 'aarch64': 40, 'osx': 30, 'win32': 20, 's390x': 10, @@ -38,6 +39,7 @@ PLATFORMS = { 'linux': 'linux-x86-32', 'linux64': 'linux-x86-64', + 'aarch64': 'aarch64', 'osx': 'macosx-x86-32', 'osx64': 'macosx-x86-64', 'win32': 'win-x86-32', diff --git a/bot2/pypybuildbot/summary.py b/bot2/pypybuildbot/summary.py --- a/bot2/pypybuildbot/summary.py +++ b/bot2/pypybuildbot/summary.py @@ -665,6 +665,17 @@ class Summary(HtmlResource): + PLATFORM_PRIORITY = { + 'linux64': 1, + 'linux32': 2, + 'aarch64': 3, + 'mac64': 4, + 'win32': 5, + 'linux-s390x': 6, + 'linux-armhf': 7, + 'benchmark-run': 8, + } + def __init__(self, categories=[], branch_order_prefixes=[]): HtmlResource.__init__(self) self.putChild('longrepr', LongRepr()) @@ -826,13 +837,7 @@ break else: branch_key = (len(self.branch_order_prefixes)+1, branch) - for i, catprefix in enumerate(self.categories): - if category.startswith(catprefix): - # kill '-' to make 'linux32' sort before 'linux-armel' - category = category.replace('-', '') - break - else: - i = len(self.categories) + i = self.PLATFORM_PRIORITY.get(category, len(self.categories)) cat_key = (i, category) return cat_key + branch_key From pypy.commits at gmail.com Thu Aug 8 06:53:04 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 08 Aug 2019 03:53:04 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: test, fix formatting with a memoryview Message-ID: <5d4bff10.1c69fb81.6eec6.8364@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97100:e5ea7d6f832f Date: 2019-08-08 13:52 +0300 http://bitbucket.org/pypy/pypy/changeset/e5ea7d6f832f/ Log: test, fix formatting with a memoryview diff --git a/pypy/objspace/std/formatting.py b/pypy/objspace/std/formatting.py --- a/pypy/objspace/std/formatting.py +++ b/pypy/objspace/std/formatting.py @@ -516,7 +516,7 @@ if do_unicode: self.unknown_fmtchar() space = self.space - # cpython explicitly checks for bytes & bytearray + # follow logic in cpython bytesobject.c format_obj if space.isinstance_w(w_value, space.w_bytes): self.std_wp(space.bytes_w(w_value)) return @@ -534,6 +534,11 @@ "__bytes__ returned non-bytes (type '%T')", w_bytes) self.std_wp(space.bytes_w(w_bytes)) return + if space.isinstance_w(w_value, space.w_memoryview): + buf = w_value.buffer_w(space, 0) + # convert the array of the buffer to a py 2 string + self.std_wp(buf.as_str()) + return raise oefmt(space.w_TypeError, "requires bytes, or an object that " diff --git a/pypy/objspace/std/test/test_stringformat.py b/pypy/objspace/std/test/test_stringformat.py --- a/pypy/objspace/std/test/test_stringformat.py +++ b/pypy/objspace/std/test/test_stringformat.py @@ -455,6 +455,7 @@ assert b"<%s>" % Foo() == b"<123>" raises(TypeError, 'b"<%s>" % 42') raises(TypeError, 'b"<%s>" % "?"') + assert b"<%s>" % memoryview(b"X") == b"" class AppTestBytearray: @@ -510,3 +511,4 @@ assert bytearray(b"<%s>") % Foo() == bytearray(b"<123>") raises(TypeError, 'bytearray(b"<%s>") % 42') raises(TypeError, 'bytearray(b"<%s>") % "?"') + assert bytearray(b"<%s>") % memoryview(b"X") == bytearray(b"") From pypy.commits at gmail.com Thu Aug 8 06:53:02 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 08 Aug 2019 03:53:02 -0700 (PDT) Subject: [pypy-commit] pypy stdlib-2.7.16: merge default into branch Message-ID: <5d4bff0e.1c69fb81.4d86d.27b6@mx.google.com> Author: Matti Picus Branch: stdlib-2.7.16 Changeset: r97099:5e792e528dc7 Date: 2019-06-26 17:06 +0300 http://bitbucket.org/pypy/pypy/changeset/5e792e528dc7/ Log: merge default into branch diff too long, truncating to 2000 out of 9174 lines diff --git a/extra_tests/cffi_tests/cffi0/test_parsing.py b/extra_tests/cffi_tests/cffi0/test_parsing.py --- a/extra_tests/cffi_tests/cffi0/test_parsing.py +++ b/extra_tests/cffi_tests/cffi0/test_parsing.py @@ -410,7 +410,17 @@ def test_enum(): ffi = FFI() ffi.cdef(""" - enum Enum { POS = +1, TWO = 2, NIL = 0, NEG = -1, OP = (POS+TWO)-1}; + enum Enum { + POS = +1, + TWO = 2, + NIL = 0, + NEG = -1, + ADDSUB = (POS+TWO)-1, + DIVMULINT = (3 * 3) / 2, + SHIFT = (1 << 3) >> 1, + BINOPS = (0x7 & 0x1) | 0x8, + XOR = 0xf ^ 0xa + }; """) needs_dlopen_none() C = ffi.dlopen(None) @@ -418,7 +428,11 @@ assert C.TWO == 2 assert C.NIL == 0 assert C.NEG == -1 - assert C.OP == 2 + assert C.ADDSUB == 2 + assert C.DIVMULINT == 4 + assert C.SHIFT == 4 + assert C.BINOPS == 0b1001 + assert C.XOR == 0b0101 def test_stdcall(): ffi = FFI() diff --git a/extra_tests/cffi_tests/cffi0/test_verify.py b/extra_tests/cffi_tests/cffi0/test_verify.py --- a/extra_tests/cffi_tests/cffi0/test_verify.py +++ b/extra_tests/cffi_tests/cffi0/test_verify.py @@ -2535,3 +2535,29 @@ x.p = p x.cyclic = x del p, x + +def test_arithmetic_in_cdef(): + for a in [0, 11, 15]: + ffi = FFI() + ffi.cdef(""" + enum FOO { + DIVNN = ((-?) / (-3)), + DIVNP = ((-?) / (+3)), + DIVPN = ((+?) / (-3)), + MODNN = ((-?) % (-3)), + MODNP = ((-?) % (+3)), + MODPN = ((+?) % (-3)), + }; + """.replace('?', str(a))) + lib = ffi.verify(""" + enum FOO { + DIVNN = ((-?) / (-3)), + DIVNP = ((-?) / (+3)), + DIVPN = ((+?) / (-3)), + MODNN = ((-?) % (-3)), + MODNP = ((-?) % (+3)), + MODPN = ((+?) % (-3)), + }; + """.replace('?', str(a))) + # the verify() crashes if the values in the enum are different from + # the values we computed ourselves from the cdef() diff --git a/extra_tests/cffi_tests/cffi0/test_zintegration.py b/extra_tests/cffi_tests/cffi0/test_zintegration.py --- a/extra_tests/cffi_tests/cffi0/test_zintegration.py +++ b/extra_tests/cffi_tests/cffi0/test_zintegration.py @@ -2,11 +2,13 @@ import py, os, sys, shutil import subprocess from extra_tests.cffi_tests.udir import udir +import pytest if sys.platform == 'win32': - py.test.skip('snippets do not run on win32') + pytestmark = pytest.mark.skip('snippets do not run on win32') if sys.version_info < (2, 7): - py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + pytestmark = pytest.mark.skip( + 'fails e.g. on a Debian/Ubuntu which patches virtualenv' ' in a non-2.6-friendly way') def create_venv(name): diff --git a/extra_tests/cffi_tests/cffi1/test_recompiler.py b/extra_tests/cffi_tests/cffi1/test_recompiler.py --- a/extra_tests/cffi_tests/cffi1/test_recompiler.py +++ b/extra_tests/cffi_tests/cffi1/test_recompiler.py @@ -2339,3 +2339,77 @@ typedef int foo_t; struct foo_s { void (*x)(foo_t); }; """) py.test.raises(TypeError, ffi.new, "struct foo_s *") + +def test_from_buffer_struct(): + ffi = FFI() + ffi.cdef("""struct foo_s { int a, b; };""") + lib = verify(ffi, "test_from_buffer_struct_p", """ + struct foo_s { int a, b; }; + """) + p = ffi.new("struct foo_s *", [-219239, 58974983]) + q = ffi.from_buffer("struct foo_s[]", ffi.buffer(p)) + assert ffi.typeof(q) == ffi.typeof("struct foo_s[]") + assert len(q) == 1 + assert q[0].a == p.a + assert q[0].b == p.b + assert q == p + q = ffi.from_buffer("struct foo_s *", ffi.buffer(p)) + assert ffi.typeof(q) == ffi.typeof("struct foo_s *") + assert q.a == p.a + assert q.b == p.b + assert q[0].a == p.a + assert q[0].b == p.b + assert q == p + +def test_unnamed_bitfield_1(): + ffi = FFI() + ffi.cdef("""struct A { char : 1; };""") + lib = verify(ffi, "test_unnamed_bitfield_1", """ + struct A { char : 1; }; + """) + p = ffi.new("struct A *") + assert ffi.sizeof(p[0]) == 1 + # Note: on gcc, the type name is ignored for anonymous bitfields + # and that's why the result is 1. On MSVC, the result is + # sizeof("char") which is also 1. + +def test_unnamed_bitfield_2(): + ffi = FFI() + ffi.cdef("""struct A { + short c : 1; short : 1; short d : 1; short : 1; };""") + lib = verify(ffi, "test_unnamed_bitfield_2", """ + struct A { + short c : 1; short : 1; short d : 1; short : 1; + }; + """) + p = ffi.new("struct A *") + assert ffi.sizeof(p[0]) == ffi.sizeof("short") + +def test_unnamed_bitfield_3(): + ffi = FFI() + ffi.cdef("""struct A { struct { char : 1; char : 1; } b; };""") + lib = verify(ffi, "test_unnamed_bitfield_3", """ + struct A { struct { char : 1; char : 1; } b; }; + """) + p = ffi.new("struct A *") + assert ffi.sizeof(p[0]) == 1 + # Note: on gcc, the type name is ignored for anonymous bitfields + # and that's why the result is 1. On MSVC, the result is + # sizeof("char") which is also 1. + +def test_unnamed_bitfield_4(): + ffi = FFI() + ffi.cdef("""struct A { struct { + unsigned c : 1; unsigned : 1; unsigned d : 1; unsigned : 1; } a; + }; + struct B { struct A a; };""") + lib = verify(ffi, "test_unnamed_bitfield_4", """ + struct A { struct { + unsigned c : 1; unsigned : 1; unsigned d : 1; unsigned : 1; } a; + }; + struct B { struct A a; }; + """) + b = ffi.new("struct B *") + a = ffi.new("struct A *") + assert ffi.sizeof(a[0]) == ffi.sizeof("unsigned") + assert ffi.sizeof(b[0]) == ffi.sizeof(a[0]) diff --git a/extra_tests/cffi_tests/embedding/test_basic.py b/extra_tests/cffi_tests/embedding/test_basic.py --- a/extra_tests/cffi_tests/embedding/test_basic.py +++ b/extra_tests/cffi_tests/embedding/test_basic.py @@ -64,8 +64,8 @@ output = popen.stdout.read() err = popen.wait() if err: - raise OSError("popen failed with exit code %r: %r" % ( - err, args)) + raise OSError(("popen failed with exit code %r: %r\n\n%s" % ( + err, args, output)).rstrip()) print(output.rstrip()) return output diff --git a/extra_tests/cffi_tests/embedding/test_performance.py b/extra_tests/cffi_tests/embedding/test_performance.py --- a/extra_tests/cffi_tests/embedding/test_performance.py +++ b/extra_tests/cffi_tests/embedding/test_performance.py @@ -3,8 +3,8 @@ from extra_tests.cffi_tests.embedding.test_basic import EmbeddingTests if sys.platform == 'win32': - import py - py.test.skip("written with POSIX functions") + import pytest + pytestmark = pytest.mark.skip("written with POSIX functions") class TestPerformance(EmbeddingTests): diff --git a/lib-python/2.7/ctypes/test/test_byteswap.py b/lib-python/2.7/ctypes/test/test_byteswap.py --- a/lib-python/2.7/ctypes/test/test_byteswap.py +++ b/lib-python/2.7/ctypes/test/test_byteswap.py @@ -2,7 +2,6 @@ from binascii import hexlify from ctypes import * -from ctypes.test import xfail def bin(s): return hexlify(memoryview(s)).upper() diff --git a/lib-python/2.7/ctypes/test/test_loading.py b/lib-python/2.7/ctypes/test/test_loading.py --- a/lib-python/2.7/ctypes/test/test_loading.py +++ b/lib-python/2.7/ctypes/test/test_loading.py @@ -2,7 +2,7 @@ import sys, unittest import os from ctypes.util import find_library -from ctypes.test import is_resource_enabled, xfail +from ctypes.test import is_resource_enabled import test.test_support as support libc_name = None @@ -87,7 +87,6 @@ self.assertRaises(AttributeError, dll.__getitem__, 1234) - @xfail @unittest.skipUnless(os.name == "nt", 'Windows-specific test') def test_1703286_A(self): from _ctypes import LoadLibrary, FreeLibrary @@ -99,7 +98,6 @@ handle = LoadLibrary("advapi32") FreeLibrary(handle) - @xfail @unittest.skipUnless(os.name == "nt", 'Windows-specific test') def test_1703286_B(self): # Since on winXP 64-bit advapi32 loads like described diff --git a/lib-python/2.7/distutils/sysconfig_pypy.py b/lib-python/2.7/distutils/sysconfig_pypy.py --- a/lib-python/2.7/distutils/sysconfig_pypy.py +++ b/lib-python/2.7/distutils/sysconfig_pypy.py @@ -86,6 +86,7 @@ arch = platform.machine() g['LDSHARED'] += ' -undefined dynamic_lookup' g['CC'] += ' -arch %s' % (arch,) + g['MACOSX_DEPLOYMENT_TARGET'] = '10.14' global _config_vars _config_vars = g diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py --- a/lib-python/2.7/test/test_ssl.py +++ b/lib-python/2.7/test/test_ssl.py @@ -801,7 +801,11 @@ ctx.set_ciphers("^$:,;?*'dorothyx") @skip_if_broken_ubuntu_ssl - def test_options(self): + def _test_options(self): + ''' + Disable this test, it is too flaky. Different platforms define + different defaults + ''' ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) # OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3 is the default value default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3) diff --git a/lib-python/2.7/test/test_timeit.py b/lib-python/2.7/test/test_timeit.py --- a/lib-python/2.7/test/test_timeit.py +++ b/lib-python/2.7/test/test_timeit.py @@ -317,9 +317,9 @@ def test_main_recommends_perf(self): s = self.run_main(seconds_per_increment=2.0, switches=['-n35', '-s', 'print("CustomSetup")']) self.assertIn(dedent("""\ - WARNING: timeit is a very unreliable tool. use perf or something else for real measurements + WARNING: timeit is a very unreliable tool. use pyperf or something else for real measurements """), s) - self.assertIn("-m pip install perf", s) + self.assertIn("-m pip install pyperf", s) diff --git a/lib-python/2.7/timeit.py b/lib-python/2.7/timeit.py --- a/lib-python/2.7/timeit.py +++ b/lib-python/2.7/timeit.py @@ -308,10 +308,10 @@ return 0 setup = "\n".join(setup) or "pass" - print "WARNING: timeit is a very unreliable tool. use perf or something else for real measurements" + print "WARNING: timeit is a very unreliable tool. use pyperf or something else for real measurements" executable = os.path.basename(sys.executable) - print "%s -m pip install perf" % executable - print "%s -m perf timeit %s" % ( + print "%s -m pip install pyperf" % executable + print "%s -m pyperf timeit %s" % ( executable, " ".join([(arg if arg.startswith("-") else repr(arg)) for arg in origargs]), ) diff --git a/lib-python/conftest.py b/lib-python/conftest.py --- a/lib-python/conftest.py +++ b/lib-python/conftest.py @@ -177,7 +177,7 @@ RegrTest('test_copy_reg.py', core=True), RegrTest('test_cpickle.py', core=True), RegrTest('test_cprofile.py'), - RegrTest('test_crypt.py', usemodules='crypt'), + RegrTest('test_crypt.py'), RegrTest('test_csv.py', usemodules='_csv'), RegrTest('test_ctypes.py', usemodules="_rawffi thread cpyext"), RegrTest('test_curses.py'), diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py @@ -40,6 +40,11 @@ * supported */ static const long Cryptography_HAS_OP_NO_COMPRESSION; +/* Internally invented symbol to tell us if SSL_OP_ENABLE_MIDDLEBOX_COMPAT is + * supported + */ +static const long Cryptography_HAS_OP_ENABLE_MIDDLEBOX_COMPAT; + static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING; static const long Cryptography_HAS_SSL_SET_SSL_CTX; static const long Cryptography_HAS_SSL_OP_NO_TICKET; @@ -73,6 +78,7 @@ static const long SSL_OP_NETSCAPE_CHALLENGE_BUG; static const long SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG; static const long SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG; +static const long SSL_OP_ENABLE_MIDDLEBOX_COMPAT; static const long SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER; static const long SSL_OP_MSIE_SSLV2_RSA_PADDING; static const long SSL_OP_SSLEAY_080_CLIENT_DH_BUG; @@ -562,6 +568,13 @@ const long SSL_OP_NO_COMPRESSION = 0; #endif +#ifdef SSL_OP_ENABLE_MIDDLEBOX_COMPAT +static const long Cryptography_HAS_OP_ENABLE_MIDDLEBOX_COMPAT = 1; +#else +static const long Cryptography_HAS_OP_ENABLE_MIDDLEBOX_COMPAT = 0; +const long SSL_OP_ENABLE_MIDDLEBOX_COMPAT = 0; +#endif + #ifdef SSL_OP_NO_TLSv1_1 static const long Cryptography_HAS_TLSv1_1 = 1; #else diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -87,12 +87,12 @@ PROTOCOL_TLSv1 = 3 PROTOCOL_TLSv1_1 = 4 PROTOCOL_TLSv1_2 = 5 +# PROTOCOL_TLS_CLIENT = 0x10 +# PROTOCOL_TLS_SERVER = 0x11 if lib.Cryptography_HAS_TLSv1_3: HAS_TLSv1_3 = True else: HAS_TLSv1_3 = False -PROTOCOL_TLS_CLIENT = 0x10 -PROTOCOL_TLS_SERVER = 0x11 _PROTOCOL_NAMES = (name for name in dir(lib) if name.startswith('PROTOCOL_')) @@ -219,6 +219,7 @@ def _new__ssl_socket(sslctx, sock, socket_type, server_hostname, ssl_sock): self = _SSLSocket(sslctx) ctx = sslctx.ctx + self.owner = ssl_sock # weakref if server_hostname: if isinstance(server_hostname, unicode): @@ -289,7 +290,8 @@ def owner(self, value): if value is None: self._owner = None - self._owner = weakref.ref(value) + else: + self._owner = weakref.ref(value) @property def context(self): @@ -337,7 +339,7 @@ sockstate = SOCKET_OPERATION_OK if sockstate == SOCKET_HAS_TIMED_OUT: - raise socket.timeout("The handshake operation timed out") + raise SSLError("The handshake operation timed out") elif sockstate == SOCKET_HAS_BEEN_CLOSED: raise SSLError("Underlying socket has been closed.") elif sockstate == SOCKET_TOO_LARGE_FOR_SELECT: @@ -781,10 +783,10 @@ method = lib.SSLv2_method() elif protocol == PROTOCOL_SSLv23: method = lib.SSLv23_method() - elif protocol == PROTOCOL_TLS_CLIENT: - method = lib.SSLv23_client_method() - elif protocol == PROTOCOL_TLS_SERVER: - method = lib.SSLv23_server_method() + # elif protocol == PROTOCOL_TLS_CLIENT: + # method = lib.SSLv23_client_method() + # elif protocol == PROTOCOL_TLS_SERVER: + # method = lib.SSLv23_server_method() else: raise ValueError("invalid protocol version") @@ -795,7 +797,7 @@ # Don't check host name by default self._check_hostname = False - if protocol == PROTOCOL_TLS_CLIENT: + if 0 and protocol == PROTOCOL_TLS_CLIENT: self._check_hostname = True self.verify_mode = CERT_REQUIRED else: @@ -811,7 +813,7 @@ # Minimal security flags for server and client side context. # Client sockets ignore server-side parameters. options |= lib.SSL_OP_NO_COMPRESSION - options |= lib.SSL_OP_CIPHER_SERVER_PREFERENCE + # options |= lib.SSL_OP_CIPHER_SERVER_PREFERENCE options |= lib.SSL_OP_SINGLE_DH_USE options |= lib.SSL_OP_SINGLE_ECDH_USE lib.SSL_CTX_set_options(self.ctx, options) diff --git a/lib_pypy/_cffi_ssl/_stdssl/win32_extra.py b/lib_pypy/_cffi_ssl/_stdssl/win32_extra.py --- a/lib_pypy/_cffi_ssl/_stdssl/win32_extra.py +++ b/lib_pypy/_cffi_ssl/_stdssl/win32_extra.py @@ -1,101 +1,101 @@ from _pypy_openssl import lib, ffi - - -def enum_certificates(store_name): - """Retrieve certificates from Windows' cert store. - -store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide -more cert storages, too. The function returns a list of (bytes, -encoding_type, trust) tuples. The encoding_type flag can be interpreted -with X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. The trust setting is either -a set of OIDs or the boolean True. - """ - hStore = lib.CertOpenStore(lib.CERT_STORE_PROV_SYSTEM_A, 0, ffi.NULL, - lib.CERT_STORE_READONLY_FLAG | lib.CERT_SYSTEM_STORE_LOCAL_MACHINE, - bytes(store_name, "ascii")) - if hStore == ffi.NULL: - raise WindowsError(*ffi.getwinerror()) - - result = [] - pCertCtx = ffi.NULL - try: - while True: - pCertCtx = lib.CertEnumCertificatesInStore(hStore, pCertCtx) - if pCertCtx == ffi.NULL: - break - cert = ffi.buffer(pCertCtx.pbCertEncoded, pCertCtx.cbCertEncoded)[:] - enc = certEncodingType(pCertCtx.dwCertEncodingType) - keyusage = parseKeyUsage(pCertCtx, lib.CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG) - if keyusage is True: - keyusage = parseKeyUsage(pCertCtx, lib.CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG) - result.append((cert, enc, keyusage)) - finally: - if pCertCtx != ffi.NULL: - lib.CertFreeCertificateContext(pCertCtx) - if not lib.CertCloseStore(hStore, 0): - # This error case might shadow another exception. - raise WindowsError(*ffi.getwinerror()) - return result - - -def enum_crls(store_name): - """Retrieve CRLs from Windows' cert store. - -store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide -more cert storages, too. The function returns a list of (bytes, -encoding_type) tuples. The encoding_type flag can be interpreted with -X509_ASN_ENCODING or PKCS_7_ASN_ENCODING.""" - hStore = lib.CertOpenStore(lib.CERT_STORE_PROV_SYSTEM_A, 0, ffi.NULL, - lib.CERT_STORE_READONLY_FLAG | lib.CERT_SYSTEM_STORE_LOCAL_MACHINE, - bytes(store_name, "ascii")) - if hStore == ffi.NULL: - raise WindowsError(*ffi.getwinerror()) - - result = [] - pCrlCtx = ffi.NULL - try: - while True: - pCrlCtx = lib.CertEnumCRLsInStore(hStore, pCrlCtx) - if pCrlCtx == ffi.NULL: - break - crl = ffi.buffer(pCrlCtx.pbCrlEncoded, pCrlCtx.cbCrlEncoded)[:] - enc = certEncodingType(pCrlCtx.dwCertEncodingType) - result.append((crl, enc)) - finally: - if pCrlCtx != ffi.NULL: - lib.CertFreeCRLContext(pCrlCtx) - if not lib.CertCloseStore(hStore, 0): - # This error case might shadow another exception. - raise WindowsError(*ffi.getwinerror()) - return result - - -def certEncodingType(encodingType): - if encodingType == lib.X509_ASN_ENCODING: - return "x509_asn" - if encodingType == lib.PKCS_7_ASN_ENCODING: - return "pkcs_7_asn" - return encodingType - -def parseKeyUsage(pCertCtx, flags): - pSize = ffi.new("DWORD *") - if not lib.CertGetEnhancedKeyUsage(pCertCtx, flags, ffi.NULL, pSize): - error_with_message = ffi.getwinerror() - if error_with_message[0] == lib.CRYPT_E_NOT_FOUND: - return True - raise WindowsError(*error_with_message) - - pUsageMem = ffi.new("char[]", pSize[0]) - pUsage = ffi.cast("PCERT_ENHKEY_USAGE", pUsageMem) - if not lib.CertGetEnhancedKeyUsage(pCertCtx, flags, pUsage, pSize): - error_with_message = ffi.getwinerror() - if error_with_message[0] == lib.CRYPT_E_NOT_FOUND: - return True - raise WindowsError(*error_with_message) - - retval = set() - for i in range(pUsage.cUsageIdentifier): - if pUsage.rgpszUsageIdentifier[i]: - oid = ffi.string(pUsage.rgpszUsageIdentifier[i]).decode('ascii') - retval.add(oid) - return retval + + +def enum_certificates(store_name): + """Retrieve certificates from Windows' cert store. + +store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide +more cert storages, too. The function returns a list of (bytes, +encoding_type, trust) tuples. The encoding_type flag can be interpreted +with X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. The trust setting is either +a set of OIDs or the boolean True. + """ + hStore = lib.CertOpenStore(lib.CERT_STORE_PROV_SYSTEM_A, 0, ffi.NULL, + lib.CERT_STORE_READONLY_FLAG | lib.CERT_SYSTEM_STORE_LOCAL_MACHINE, + bytes(store_name)) + if hStore == ffi.NULL: + raise WindowsError(*ffi.getwinerror()) + + result = [] + pCertCtx = ffi.NULL + try: + while True: + pCertCtx = lib.CertEnumCertificatesInStore(hStore, pCertCtx) + if pCertCtx == ffi.NULL: + break + cert = ffi.buffer(pCertCtx.pbCertEncoded, pCertCtx.cbCertEncoded)[:] + enc = certEncodingType(pCertCtx.dwCertEncodingType) + keyusage = parseKeyUsage(pCertCtx, lib.CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG) + if keyusage is True: + keyusage = parseKeyUsage(pCertCtx, lib.CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG) + result.append((cert, enc, keyusage)) + finally: + if pCertCtx != ffi.NULL: + lib.CertFreeCertificateContext(pCertCtx) + if not lib.CertCloseStore(hStore, 0): + # This error case might shadow another exception. + raise WindowsError(*ffi.getwinerror()) + return result + + +def enum_crls(store_name): + """Retrieve CRLs from Windows' cert store. + +store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide +more cert storages, too. The function returns a list of (bytes, +encoding_type) tuples. The encoding_type flag can be interpreted with +X509_ASN_ENCODING or PKCS_7_ASN_ENCODING.""" + hStore = lib.CertOpenStore(lib.CERT_STORE_PROV_SYSTEM_A, 0, ffi.NULL, + lib.CERT_STORE_READONLY_FLAG | lib.CERT_SYSTEM_STORE_LOCAL_MACHINE, + bytes(store_name)) + if hStore == ffi.NULL: + raise WindowsError(*ffi.getwinerror()) + + result = [] + pCrlCtx = ffi.NULL + try: + while True: + pCrlCtx = lib.CertEnumCRLsInStore(hStore, pCrlCtx) + if pCrlCtx == ffi.NULL: + break + crl = ffi.buffer(pCrlCtx.pbCrlEncoded, pCrlCtx.cbCrlEncoded)[:] + enc = certEncodingType(pCrlCtx.dwCertEncodingType) + result.append((crl, enc)) + finally: + if pCrlCtx != ffi.NULL: + lib.CertFreeCRLContext(pCrlCtx) + if not lib.CertCloseStore(hStore, 0): + # This error case might shadow another exception. + raise WindowsError(*ffi.getwinerror()) + return result + + +def certEncodingType(encodingType): + if encodingType == lib.X509_ASN_ENCODING: + return "x509_asn" + if encodingType == lib.PKCS_7_ASN_ENCODING: + return "pkcs_7_asn" + return encodingType + +def parseKeyUsage(pCertCtx, flags): + pSize = ffi.new("DWORD *") + if not lib.CertGetEnhancedKeyUsage(pCertCtx, flags, ffi.NULL, pSize): + error_with_message = ffi.getwinerror() + if error_with_message[0] == lib.CRYPT_E_NOT_FOUND: + return True + raise WindowsError(*error_with_message) + + pUsageMem = ffi.new("char[]", pSize[0]) + pUsage = ffi.cast("PCERT_ENHKEY_USAGE", pUsageMem) + if not lib.CertGetEnhancedKeyUsage(pCertCtx, flags, pUsage, pSize): + error_with_message = ffi.getwinerror() + if error_with_message[0] == lib.CRYPT_E_NOT_FOUND: + return True + raise WindowsError(*error_with_message) + + retval = set() + for i in range(pUsage.cUsageIdentifier): + if pUsage.rgpszUsageIdentifier[i]: + oid = ffi.string(pUsage.rgpszUsageIdentifier[i]).decode('ascii') + retval.add(oid) + return retval diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.12.3 +Version: 1.13.0 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -5,8 +5,8 @@ from .error import CDefError, FFIError, VerificationError, VerificationMissing from .error import PkgConfigError -__version__ = "1.12.3" -__version_info__ = (1, 12, 3) +__version__ = "1.13.0" +__version_info__ = (1, 13, 0) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h --- a/lib_pypy/cffi/_embedding.h +++ b/lib_pypy/cffi/_embedding.h @@ -145,6 +145,7 @@ int result; PyGILState_STATE state; PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; state = PyGILState_Ensure(); @@ -169,7 +170,7 @@ global_dict = PyDict_New(); if (global_dict == NULL) goto error; - PyObject *builtins = PyEval_GetBuiltins(); + builtins = PyEval_GetBuiltins(); if (builtins == NULL) goto error; if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) @@ -223,7 +224,7 @@ if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.12.3" + "\ncompiled with cffi version: 1.13.0" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -858,19 +858,39 @@ "the actual array length in this context" % exprnode.coord.line) # - if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and - exprnode.op == '+'): - return (self._parse_constant(exprnode.left) + - self._parse_constant(exprnode.right)) - # - if (isinstance(exprnode, pycparser.c_ast.BinaryOp) and - exprnode.op == '-'): - return (self._parse_constant(exprnode.left) - - self._parse_constant(exprnode.right)) + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right # raise FFIError(":%d: unsupported expression: expected a " "simple numeric constant" % exprnode.coord.line) + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + def _build_enum_type(self, explicit_name, decls): if decls is not None: partial = False diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -855,8 +855,9 @@ try: if ftype.is_integer_type() or fbitsize >= 0: # accept all integers, but complain on float or double - prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " - "an integer */" % (fname, cname, fname)) + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) continue # only accept exactly the type declared, except that '[]' # is interpreted as a '*' and so will match any array length. diff --git a/lib_pypy/crypt/__init__.py b/lib_pypy/crypt/__init__.py new file mode 100644 --- /dev/null +++ b/lib_pypy/crypt/__init__.py @@ -0,0 +1,21 @@ +""" +CFFI based implementation of the crypt module +""" + +import sys +import cffi + +ffi = cffi.FFI() +ffi.cdef('char *crypt(char *word, char *salt);') + +try: + lib = ffi.dlopen('crypt') +except OSError: + raise ImportError('crypt not available') + + +def crypt(word, salt): + res = lib.crypt(word, salt) + if not res: + return None + return ffi.string(res) diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -31,13 +31,13 @@ working_modules = default_modules.copy() working_modules.update([ "_socket", "unicodedata", "mmap", "fcntl", "_locale", "pwd", - "select", "zipimport", "_lsprof", "crypt", "signal", "_rawffi", "termios", + "select", "zipimport", "_lsprof", "signal", "_rawffi", "termios", "zlib", "bz2", "struct", "_md5", "_sha", "_minimal_curses", "cStringIO", "thread", "itertools", "pyexpat", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", "_csv", "_cppyy", "_pypyjson", "_jitlog", - #" _ssl", "_hashlib" + #" _ssl", "_hashlib", "crypt" ]) import rpython.rlib.rvmprof.cintf @@ -65,7 +65,8 @@ working_modules.add("_winreg") # unix only modules for name in ["crypt", "fcntl", "pwd", "termios", "_minimal_curses"]: - working_modules.remove(name) + if name in working_modules: + working_modules.remove(name) if name in translation_modules: translation_modules.remove(name) diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -34,3 +34,14 @@ .. branch: fix-vmprof-memory-tracking Fix a bug that prevent memory-tracking in vmprof working on PyPy. + +.. branch: optimizeopt-cleanup + +Cleanup optimizeopt + +.. branch: copystrcontents-in-rewrite + +Remove ``copystrcontent`` and ``copyunicodecontent`` in the backends. +Instead, replace it in ``rewrite.py`` with a direct call to ``memcpy()`` and +new basic operation, ``load_effective_address``, which the backend can +even decide not to implement. diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi from rpython.rtyper.lltypesystem import rffi -VERSION = "1.12.3" +VERSION = "1.13.0" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/cdataobj.py b/pypy/module/_cffi_backend/cdataobj.py --- a/pypy/module/_cffi_backend/cdataobj.py +++ b/pypy/module/_cffi_backend/cdataobj.py @@ -677,11 +677,14 @@ return self.length def _repr_extra(self): - if self.w_keepalive is not None: - name = self.space.type(self.w_keepalive).name + from pypy.module._cffi_backend import ctypearray + if self.w_keepalive is None: + return "buffer RELEASED" + obj_tp_name = self.space.type(self.w_keepalive).name + if isinstance(self.ctype, ctypearray.W_CTypeArray): + return "buffer len %d from '%s' object" % (self.length, obj_tp_name) else: - name = "(released)" - return "buffer len %d from '%s' object" % (self.length, name) + return "buffer from '%s' object" % (obj_tp_name,) def enter_exit(self, exit_now): # for now, limited effect on PyPy diff --git a/pypy/module/_cffi_backend/func.py b/pypy/module/_cffi_backend/func.py --- a/pypy/module/_cffi_backend/func.py +++ b/pypy/module/_cffi_backend/func.py @@ -112,9 +112,10 @@ @unwrap_spec(w_ctype=ctypeobj.W_CType, require_writable=int) def from_buffer(space, w_ctype, w_x, require_writable=0): - from pypy.module._cffi_backend import ctypearray - if not isinstance(w_ctype, ctypearray.W_CTypeArray): - raise oefmt(space.w_TypeError, "expected an array ctype, got '%s'", + from pypy.module._cffi_backend import ctypeptr, ctypearray + if not isinstance(w_ctype, ctypeptr.W_CTypePtrOrArray): + raise oefmt(space.w_TypeError, + "expected a poiunter or array ctype, got '%s'", w_ctype.name) if space.isinstance_w(w_x, space.w_unicode): raise oefmt(space.w_TypeError, @@ -135,33 +136,36 @@ "raw address on PyPy", w_x) # buffersize = buf.getlength() - arraylength = w_ctype.length - if arraylength >= 0: - # it's an array with a fixed length; make sure that the - # buffer contains enough bytes. - if buffersize < w_ctype.size: - raise oefmt(space.w_ValueError, - "buffer is too small (%d bytes) for '%s' (%d bytes)", - buffersize, w_ctype.name, w_ctype.size) + if not isinstance(w_ctype, ctypearray.W_CTypeArray): + arraylength = buffersize # number of bytes, not used so far else: - # it's an open 'array[]' - itemsize = w_ctype.ctitem.size - if itemsize == 1: - # fast path, performance only - arraylength = buffersize - elif itemsize > 0: - # give it as many items as fit the buffer. Ignore a - # partial last element. - arraylength = buffersize / itemsize + arraylength = w_ctype.length + if arraylength >= 0: + # it's an array with a fixed length; make sure that the + # buffer contains enough bytes. + if buffersize < w_ctype.size: + raise oefmt(space.w_ValueError, + "buffer is too small (%d bytes) for '%s' (%d bytes)", + buffersize, w_ctype.name, w_ctype.size) else: - # it's an array 'empty[]'. Unsupported obscure case: - # the problem is that setting the length of the result - # to anything large (like SSIZE_T_MAX) is dangerous, - # because if someone tries to loop over it, it will - # turn effectively into an infinite loop. - raise oefmt(space.w_ZeroDivisionError, - "from_buffer('%s', ..): the actual length of the array " - "cannot be computed", w_ctype.name) + # it's an open 'array[]' + itemsize = w_ctype.ctitem.size + if itemsize == 1: + # fast path, performance only + arraylength = buffersize + elif itemsize > 0: + # give it as many items as fit the buffer. Ignore a + # partial last element. + arraylength = buffersize / itemsize + else: + # it's an array 'empty[]'. Unsupported obscure case: + # the problem is that setting the length of the result + # to anything large (like SSIZE_T_MAX) is dangerous, + # because if someone tries to loop over it, it will + # turn effectively into an infinite loop. + raise oefmt(space.w_ZeroDivisionError, + "from_buffer('%s', ..): the actual length of the array " + "cannot be computed", w_ctype.name) # return cdataobj.W_CDataFromBuffer(space, _cdata, arraylength, w_ctype, buf, w_x) diff --git a/pypy/module/_cffi_backend/newtype.py b/pypy/module/_cffi_backend/newtype.py --- a/pypy/module/_cffi_backend/newtype.py +++ b/pypy/module/_cffi_backend/newtype.py @@ -549,10 +549,11 @@ if sflags & SF_GCC_BIG_ENDIAN: bitshift = 8 * ftype.size - fbitsize- bitshift - fld = ctypestruct.W_CField(ftype, field_offset_bytes, - bitshift, fbitsize, fflags) - fields_list.append(fld) - fields_dict[fname] = fld + if fname != '': + fld = ctypestruct.W_CField(ftype, field_offset_bytes, + bitshift, fbitsize, fflags) + fields_list.append(fld) + fields_dict[fname] = fld if boffset > boffsetmax: boffsetmax = boffset diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.12.3", ("This test_c.py file is for testing a version" +assert __version__ == "1.13.0", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): @@ -3830,7 +3830,9 @@ BIntP = new_pointer_type(BInt) BIntA = new_array_type(BIntP, None) lst = [-12345678, 87654321, 489148] - bytestring = buffer(newp(BIntA, lst))[:] + b'XYZ' + bytestring = bytearray(buffer(newp(BIntA, lst))[:] + b'XYZ') + lst2 = lst + [42, -999999999] + bytestring2 = bytearray(buffer(newp(BIntA, lst2))[:] + b'XYZ') # p1 = from_buffer(BIntA, bytestring) # int[] assert typeof(p1) is BIntA @@ -3844,7 +3846,19 @@ p1[-1] # py.test.raises(TypeError, from_buffer, BInt, bytestring) - py.test.raises(TypeError, from_buffer, BIntP, bytestring) + # + p2 = from_buffer(BIntP, bytestring) # int * + assert p2 == p1 or 'PY_DOT_PY' in globals() + # note: on py.py ^^^, bytearray buffers are not emulated well enough + assert typeof(p2) is BIntP + assert p2[0] == lst[0] + assert p2[1] == lst[1] + assert p2[2] == lst[2] + # hopefully does not crash, but doesn't raise an exception: + p2[3] + p2[-1] + # not enough data even for one, but this is not enforced: + from_buffer(BIntP, b"") # BIntA2 = new_array_type(BIntP, 2) p2 = from_buffer(BIntA2, bytestring) # int[2] @@ -3856,7 +3870,7 @@ p2[2] with pytest.raises(IndexError): p2[-1] - assert p2 == p1 + assert p2 == p1 or 'PY_DOT_PY' in globals() # BIntA4 = new_array_type(BIntP, 4) # int[4]: too big py.test.raises(ValueError, from_buffer, BIntA4, bytestring) @@ -3866,13 +3880,37 @@ ('a2', BInt, -1)]) BStructP = new_pointer_type(BStruct) BStructA = new_array_type(BStructP, None) - p1 = from_buffer(BStructA, bytestring) # struct[] - assert len(p1) == 1 + p1 = from_buffer(BStructA, bytestring2) # struct[] + assert len(p1) == 2 assert typeof(p1) is BStructA - assert p1[0].a1 == lst[0] - assert p1[0].a2 == lst[1] + assert p1[0].a1 == lst2[0] + assert p1[0].a2 == lst2[1] + assert p1[1].a1 == lst2[2] + assert p1[1].a2 == lst2[3] with pytest.raises(IndexError): - p1[1] + p1[2] + with pytest.raises(IndexError): + p1[-1] + assert repr(p1) == "" + # + p2 = from_buffer(BStructP, bytestring2) # 'struct *' + assert p2 == p1 or 'PY_DOT_PY' in globals() + assert typeof(p2) is BStructP + assert p2.a1 == lst2[0] + assert p2.a2 == lst2[1] + assert p2[0].a1 == lst2[0] + assert p2[0].a2 == lst2[1] + assert p2[1].a1 == lst2[2] + assert p2[1].a2 == lst2[3] + # does not crash: + p2[2] + p2[-1] + # not enough data even for one, but this is not enforced: + from_buffer(BStructP, b"") + from_buffer(BStructP, b"1234567") + # + release(p1) + assert repr(p1) == "" # BEmptyStruct = new_struct_type("empty") complete_struct_or_union(BEmptyStruct, [], Ellipsis, 0) @@ -3886,7 +3924,37 @@ p1 = from_buffer(BEmptyStructA5, bytestring) # struct empty[5] assert typeof(p1) is BEmptyStructA5 assert len(p1) == 5 - assert cast(BIntP, p1) == from_buffer(BIntA, bytestring) + assert (cast(BIntP, p1) == from_buffer(BIntA, bytestring) + or 'PY_DOT_PY' in globals()) + # + BVarStruct = new_struct_type("varfoo") + BVarStructP = new_pointer_type(BVarStruct) + complete_struct_or_union(BVarStruct, [('a1', BInt, -1), + ('va', BIntA, -1)]) + with pytest.raises(TypeError): + from_buffer(BVarStruct, bytestring) + pv = from_buffer(BVarStructP, bytestring) # varfoo * + assert pv.a1 == lst[0] + assert pv.va[0] == lst[1] + assert pv.va[1] == lst[2] + assert sizeof(pv[0]) == 1 * size_of_int() + with pytest.raises(TypeError): + len(pv.va) + # hopefully does not crash, but doesn't raise an exception: + pv.va[2] + pv.va[-1] + # not enough data even for one, but this is not enforced: + from_buffer(BVarStructP, b"") + assert repr(pv) == "" + assert repr(pv[0]).startswith("" + assert repr(pv[0]).startswith("" release(p) assert p[2] == b"z" # true so far, but might change to raise RuntimeError + assert repr(p) == "" release(p) # no effect def test_explicit_release_from_buffer_contextmgr(): @@ -4325,6 +4395,7 @@ with p: assert p[2] == b"z" assert p[2] == b"z" # true so far, but might change to raise RuntimeError + assert repr(p) == "" release(p) # no effect def test_explicit_release_bytearray_on_cpython(): diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py --- a/pypy/module/cpyext/bufferobject.py +++ b/pypy/module/cpyext/bufferobject.py @@ -5,6 +5,7 @@ cpython_api, Py_ssize_t, cpython_struct, bootstrap_function, slot_function, PyObjectFields, PyObject) from pypy.module.cpyext.pyobject import make_typedescr, decref, make_ref +from pypy.module.cpyext.buffer import CBuffer from pypy.module.array.interp_array import ArrayBuffer from pypy.objspace.std.bufferobject import W_Buffer @@ -33,7 +34,7 @@ def buffer_attach(space, py_obj, w_obj, w_userdata=None): """ - Fills a newly allocated PyBufferObject with the given (str) buffer object. + Fills a newly allocated PyBufferObject with the given buffer object. """ py_buf = rffi.cast(PyBufferObject, py_obj) py_buf.c_b_offset = 0 @@ -60,7 +61,17 @@ py_buf.c_b_base = make_ref(space, w_base) py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, buf.w_array._charbuf_start()) py_buf.c_b_size = buf.getlength() + elif isinstance(buf, CBuffer): + py_buf.c_b_base = make_ref(space, buf.view.w_obj) + py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, buf.view.ptr) + py_buf.c_b_size = buf.getlength() else: + # Raising in attach will segfault. + # It would be nice if we could handle the error more gracefully + # with something like this + # py_buf.c_b_base = lltype.nullptr(PyObject.TO) + # py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, 0) + # py_buf.c_b_size = buf.getlength() raise oefmt(space.w_NotImplementedError, "buffer flavor not supported") diff --git a/pypy/module/cpyext/test/array.c b/pypy/module/cpyext/test/array.c --- a/pypy/module/cpyext/test/array.c +++ b/pypy/module/cpyext/test/array.c @@ -2573,6 +2573,11 @@ Py_RETURN_NONE; } +static PyObject * +passthrough(PyObject *self, PyObject* args) { + Py_INCREF(args); + return args; +} /*********************** Install Module **************************/ static PyMethodDef a_methods[] = { @@ -2584,6 +2589,7 @@ {"same_dealloc", (PyCFunction)same_dealloc, METH_VARARGS, NULL}, {"getitem", (PyCFunction)getitem, METH_VARARGS, NULL}, {"subclass_with_attribute", (PyCFunction)subclass_with_attribute, METH_VARARGS, NULL}, + {"passthrough", (PyCFunction)passthrough, METH_O, NULL}, {NULL, NULL, 0, NULL} /* Sentinel */ }; diff --git a/pypy/module/cpyext/test/test_arraymodule.py b/pypy/module/cpyext/test/test_arraymodule.py --- a/pypy/module/cpyext/test/test_arraymodule.py +++ b/pypy/module/cpyext/test/test_arraymodule.py @@ -79,6 +79,9 @@ assert str(buffer('a') + arr) == "a" + expected # python2 special cases empty-buffer + obj assert str(buffer('') + arr) == "array('i', [1, 2, 3, 4])" + # make sure buffer_attach is called + buf2 = module.passthrough(buf) + assert str(buf2) == str(buf) def test_releasebuffer(self): module = self.import_module(name='array') diff --git a/pypy/module/pypyjit/test_pypy_c/test_ffi.py b/pypy/module/pypyjit/test_pypy_c/test_ffi.py --- a/pypy/module/pypyjit/test_pypy_c/test_ffi.py +++ b/pypy/module/pypyjit/test_pypy_c/test_ffi.py @@ -425,9 +425,11 @@ setarrayitem_raw(i153, 0, i106, descr=...) p156 = getfield_gc_r(p48, descr=...) i158 = getfield_raw_i(..., descr=...) + i160 = int_sub(i158, 16) + setfield_raw(#, i160, descr=...) setfield_gc(p48, p49, descr=...) setfield_gc(p134, ConstPtr(null), descr=...) - i160 = int_lt(i158, 0) + i160 = int_lt(i160, 0) guard_false(i160, descr=...) jump(..., descr=...) """) diff --git a/pypy/module/thread/test/test_local.py b/pypy/module/thread/test/test_local.py --- a/pypy/module/thread/test/test_local.py +++ b/pypy/module/thread/test/test_local.py @@ -5,6 +5,7 @@ def test_local_1(self): import thread + import gc from thread import _local as tlsobject freed = [] class X: @@ -34,8 +35,9 @@ thread.start_new_thread(f, (i,)) self.waitfor(lambda: len(ok) == 20, delay=3) assert ok == 20*[True] # see stdout/stderr for failures in the threads + gc.collect(); gc.collect(); gc.collect() - self.waitfor(lambda: len(freed) >= 40) + self.waitfor(lambda: len(freed) >= 40, delay=20) assert len(freed) == 40 # in theory, all X objects should have been freed by now. Note that # Python's own thread._local objects suffer from the very same "bug" that diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -78,8 +78,9 @@ W_ModuleDictObject.__init__(w_obj, space, strategy, storage) return w_obj elif instance: - from pypy.objspace.std.mapdict import MapDictStrategy - strategy = space.fromcache(MapDictStrategy) + from pypy.objspace.std.mapdict import make_instance_dict + assert w_type is None + return make_instance_dict(space) elif strdict or module: assert w_type is None strategy = space.fromcache(BytesDictStrategy) diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -753,6 +753,7 @@ self.space = space def get_empty_storage(self): + # mainly used for tests w_result = Object() terminator = self.space.fromcache(get_terminator_for_dicts) w_result._mapdict_init_empty(terminator) @@ -865,6 +866,11 @@ def iteritems(self, w_dict): return MapDictIteratorItems(self.space, self, w_dict) +def make_instance_dict(space): + w_fake_object = Object() + terminator = space.fromcache(get_terminator_for_dicts) + w_fake_object._mapdict_init_empty(terminator) + return w_fake_object.getdict(space) def materialize_r_dict(space, obj, dict_w): map = obj._get_mapdict_map() diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -897,6 +897,17 @@ d = x.__dict__ assert list(__pypy__.reversed_dict(d)) == d.keys()[::-1] + def test_bug_materialize_huge_dict(self): + import __pypy__ + d = __pypy__.newdict("instance") + for i in range(100): + d[str(i)] = i + assert len(d) == 100 + + for key in d: + assert d[key] == int(key) + + class AppTestWithMapDictAndCounters(object): spaceconfig = {"objspace.std.withmethodcachecounter": True} diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py --- a/pypy/objspace/std/unicodeobject.py +++ b/pypy/objspace/std/unicodeobject.py @@ -1,5 +1,7 @@ """The builtin unicode implementation""" +import sys + from rpython.rlib.objectmodel import ( compute_hash, compute_unique_id, import_from_mixin, always_inline, enforceargs, newlist_hint, specialize, we_are_translated) @@ -41,13 +43,12 @@ self._utf8 = utf8str self._length = length self._index_storage = rutf8.null_storage() - if not we_are_translated(): + if not we_are_translated() and not sys.platform == 'win32': # utf8str must always be a valid utf8 string, except maybe with # explicit surrogate characters---which .decode('utf-8') doesn't # special-case in Python 2, which is exactly what we want here assert length == len(utf8str.decode('utf-8')) - @staticmethod def from_utf8builder(builder): return W_UnicodeObject( @@ -1095,11 +1096,11 @@ if rutf8.has_surrogates(utf8): utf8 = rutf8.reencode_utf8_with_surrogates(utf8) return space.newbytes(utf8) - return encode(space, w_obj, encoding, errors) + return encode(space, w_obj, encoding, errors) def decode_object(space, w_obj, encoding, errors): - from pypy.module._codecs.interp_codecs import lookup_codec, decode + from pypy.module._codecs.interp_codecs import lookup_codec, decode if errors is None or errors == 'strict': # fast paths if encoding is None: @@ -1109,7 +1110,7 @@ unicodehelper.check_ascii_or_raise(space, s) return space.newutf8(s, len(s)) if encoding == 'utf-8' or encoding == 'utf8': - if (space.isinstance_w(w_obj, space.w_unicode) or + if (space.isinstance_w(w_obj, space.w_unicode) or space.isinstance_w(w_obj, space.w_bytes)): s = space.utf8_w(w_obj) else: @@ -1718,34 +1719,28 @@ def unicode_to_decimal_w(space, w_unistr): if not isinstance(w_unistr, W_UnicodeObject): raise oefmt(space.w_TypeError, "expected unicode, got '%T'", w_unistr) - unistr = w_unistr._utf8 - result = ['\0'] * w_unistr._length - digits = ['0', '1', '2', '3', '4', - '5', '6', '7', '8', '9'] - res_pos = 0 - iter = rutf8.Utf8StringIterator(unistr) - for uchr in iter: + utf8 = w_unistr._utf8 + result = StringBuilder(w_unistr._len()) + it = rutf8.Utf8StringIterator(utf8) + for uchr in it: if W_UnicodeObject._isspace(uchr): - result[res_pos] = ' ' - res_pos += 1 + result.append(' ') continue - try: - result[res_pos] = digits[unicodedb.decimal(uchr)] - except KeyError: - if 0 < uchr < 256: - result[res_pos] = chr(uchr) - else: + if not (0 < uchr < 256): + try: + uchr = ord('0') + unicodedb.decimal(uchr) + except KeyError: w_encoding = space.newtext('decimal') - pos = iter.get_pos() + pos = it.get_pos() w_start = space.newint(pos) - w_end = space.newint(pos+1) + w_end = space.newint(pos + 1) w_reason = space.newtext('invalid decimal Unicode string') raise OperationError(space.w_UnicodeEncodeError, - space.newtuple([w_encoding, w_unistr, - w_start, w_end, - w_reason])) - res_pos += 1 - return ''.join(result) + space.newtuple([w_encoding, w_unistr, + w_start, w_end, + w_reason])) + result.append(chr(uchr)) + return result.build() _repr_function = rutf8.make_utf8_escape_function( diff --git a/requirements.txt b/requirements.txt --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ +--only-binary vmprof + cffi>=1.4.0 # parse log files in rvmprof tests diff --git a/rpython/doc/jit/optimizer.rst b/rpython/doc/jit/optimizer.rst --- a/rpython/doc/jit/optimizer.rst +++ b/rpython/doc/jit/optimizer.rst @@ -13,7 +13,7 @@ Before some optimizations are explained in more detail, it is essential to understand how traces look like. -The optimizer comes with a test suit. It contains many trace +The optimizer comes with a test suite. It contains many trace examples and you might want to take a look at it (in `rpython/jit/metainterp/optimizeopt/test/*.py`). The allowed operations can be found in `rpython/jit/metainterp/resoperation.py`. @@ -21,7 +21,7 @@ [p0,i0,i1] label(p0, i0, i1) - i2 = getarray_item_raw(p0, i0, descr=) + i2 = getarrayitem_raw(p0, i0, descr=) i3 = int_add(i1,i2) i4 = int_add(i0,1) i5 = int_le(i4, 100) # lower-or-equal @@ -32,7 +32,7 @@ to compare the Python code that constructed the trace:: from array import array - a = array('i',range(101)) + a = array('i', range(101)) sum = 0; i = 0 while i <= 100: # can be seen as label sum += a[i] @@ -131,20 +131,16 @@ Whenever such an operation is encountered (e.g. ``y = x & 0``), no operation is emitted. Instead the variable y is made equal to 0 -(= ``make_equal_to(op.result, 0)``). The variables found in a trace are -instances of Box classes that can be found in -`rpython/jit/metainterp/history.py`. `OptValue` wraps those variables again -and maps the boxes to the optimization values in the optimizer. When a -value is made equal, the two variable's boxes are made to point to the same -`OptValue` instance. +(= ``make_constant_int(op, 0)``). The variables found in a trace are instances +of classes that can be found in `rpython/jit/metainterp/history.py`. When a +value is made equal to another, its box is made to point to the other one. -**NOTE: this OptValue organization is currently being refactored in a branch.** Pure optimization ----------------- -Is interwoven into the basic optimizer. It saves operations, results, -arguments to be known to have pure semantics. +The 'pure' optimizations interwoven into the basic optimizer. It saves +operations, results, arguments to be known to have pure semantics. "Pure" here means the same as the ``jit.elidable`` decorator: free of "observable" side effects and referentially transparent diff --git a/rpython/jit/backend/arm/opassembler.py b/rpython/jit/backend/arm/opassembler.py --- a/rpython/jit/backend/arm/opassembler.py +++ b/rpython/jit/backend/arm/opassembler.py @@ -834,73 +834,12 @@ else: assert 0 - #from ../x86/regalloc.py:928 ff. - def emit_op_copystrcontent(self, op, arglocs, regalloc, fcond): - assert len(arglocs) == 0 - self._emit_copystrcontent(op, regalloc, fcond, is_unicode=False) + def emit_op_load_effective_address(self, op, arglocs, regalloc, fcond): + static_ofs = op.getarg(2).getint() + scale = op.getarg(3).getint() + self._gen_address(arglocs[2], arglocs[0], arglocs[1], scale, static_ofs) return fcond - def emit_op_copyunicodecontent(self, op, arglocs, regalloc, fcond): - assert len(arglocs) == 0 - self._emit_copystrcontent(op, regalloc, fcond, is_unicode=True) - return fcond - - def _emit_copystrcontent(self, op, regalloc, fcond, is_unicode): - # compute the source address - args = op.getarglist() - base_loc = regalloc.rm.make_sure_var_in_reg(args[0], args) - ofs_loc = regalloc.rm.make_sure_var_in_reg(args[2], args) - assert args[0] is not args[1] # forbidden case of aliasing - srcaddr_box = TempVar() - forbidden_vars = [args[1], args[3], args[4], srcaddr_box] - srcaddr_loc = regalloc.rm.force_allocate_reg(srcaddr_box, forbidden_vars) - self._gen_address_inside_string(base_loc, ofs_loc, srcaddr_loc, - is_unicode=is_unicode) - # compute the destination address - base_loc = regalloc.rm.make_sure_var_in_reg(args[1], forbidden_vars) - ofs_loc = regalloc.rm.make_sure_var_in_reg(args[3], forbidden_vars) - forbidden_vars = [args[4], srcaddr_box] - dstaddr_box = TempVar() - dstaddr_loc = regalloc.rm.force_allocate_reg(dstaddr_box, forbidden_vars) - self._gen_address_inside_string(base_loc, ofs_loc, dstaddr_loc, - is_unicode=is_unicode) - # compute the length in bytes - length_box = args[4] - length_loc = regalloc.loc(length_box) - if is_unicode: - forbidden_vars = [srcaddr_box, dstaddr_box] - bytes_box = TempVar() - bytes_loc = regalloc.rm.force_allocate_reg(bytes_box, forbidden_vars) - scale = self._get_unicode_item_scale() - if not length_loc.is_core_reg(): - self.regalloc_mov(length_loc, bytes_loc) - length_loc = bytes_loc - assert length_loc.is_core_reg() - self.mc.MOV_ri(r.ip.value, 1 << scale) - self.mc.MUL(bytes_loc.value, r.ip.value, length_loc.value) - length_box = bytes_box - length_loc = bytes_loc - # call memcpy() - regalloc.before_call() - self.simple_call_no_collect(imm(self.memcpy_addr), - [dstaddr_loc, srcaddr_loc, length_loc]) - regalloc.rm.possibly_free_var(length_box) - regalloc.rm.possibly_free_var(dstaddr_box) - regalloc.rm.possibly_free_var(srcaddr_box) - - def _gen_address_inside_string(self, baseloc, ofsloc, resloc, is_unicode): - if is_unicode: - ofs_items, _, _ = symbolic.get_array_token(rstr.UNICODE, - self.cpu.translate_support_code) - scale = self._get_unicode_item_scale() - else: - ofs_items, itemsize, _ = symbolic.get_array_token(rstr.STR, - self.cpu.translate_support_code) - assert itemsize == 1 - ofs_items -= 1 # for the extra null character - scale = 0 - self._gen_address(resloc, baseloc, ofsloc, scale, ofs_items) - # result = base_loc + (scaled_loc << scale) + static_offset def _gen_address(self, result, base_loc, scaled_loc, scale=0, static_offset=0): assert scaled_loc.is_core_reg() @@ -915,16 +854,6 @@ self.mc.ADD_rr(result.value, base_loc.value, scaled_loc.value) self.mc.ADD_ri(result.value, result.value, static_offset) - def _get_unicode_item_scale(self): - _, itemsize, _ = symbolic.get_array_token(rstr.UNICODE, - self.cpu.translate_support_code) - if itemsize == 4: - return 2 - elif itemsize == 2: - return 1 - else: - raise AssertionError("bad unicode item size") - def store_force_descr(self, op, fail_locs, frame_depth): pos = self.mc.currpos() guard_token = self.build_guard_token(op, frame_depth, fail_locs, pos, c.AL) diff --git a/rpython/jit/backend/arm/regalloc.py b/rpython/jit/backend/arm/regalloc.py --- a/rpython/jit/backend/arm/regalloc.py +++ b/rpython/jit/backend/arm/regalloc.py @@ -873,8 +873,6 @@ prepare_op_gc_load_indexed_r = _prepare_op_gc_load_indexed prepare_op_gc_load_indexed_f = _prepare_op_gc_load_indexed - prepare_op_copystrcontent = void - prepare_op_copyunicodecontent = void prepare_op_zero_array = void def _prepare_op_same_as(self, op, fcond): @@ -899,6 +897,13 @@ resloc = self.force_allocate_reg(op) return [resloc] + def prepare_op_load_effective_address(self, op, fcond): + args = op.getarglist() + arg0 = self.make_sure_var_in_reg(args[0], args) + arg1 = self.make_sure_var_in_reg(args[1], args) + res = self.force_allocate_reg(op) + return [arg0, arg1, res] + def prepare_op_call_malloc_nursery(self, op, fcond): size_box = op.getarg(0) assert isinstance(size_box, ConstInt) diff --git a/rpython/jit/backend/arm/runner.py b/rpython/jit/backend/arm/runner.py --- a/rpython/jit/backend/arm/runner.py +++ b/rpython/jit/backend/arm/runner.py @@ -23,6 +23,7 @@ supports_floats = True supports_longlong = True supports_singlefloats = True + supports_load_effective_address = True from rpython.jit.backend.arm.arch import JITFRAME_FIXED_SIZE all_reg_indexes = range(len(all_regs)) diff --git a/rpython/jit/backend/llsupport/gc.py b/rpython/jit/backend/llsupport/gc.py --- a/rpython/jit/backend/llsupport/gc.py +++ b/rpython/jit/backend/llsupport/gc.py @@ -14,6 +14,7 @@ from rpython.jit.metainterp.support import ptr2int from rpython.jit.backend.llsupport import symbolic, jitframe from rpython.jit.backend.llsupport.symbolic import WORD +from rpython.jit.backend.llsupport.memcpy import memcpy_fn from rpython.jit.backend.llsupport.descr import SizeDescr, ArrayDescr, FieldDescr from rpython.jit.backend.llsupport.descr import GcCache, get_field_descr from rpython.jit.backend.llsupport.descr import get_array_descr @@ -36,6 +37,11 @@ self.fielddescr_vtable = get_field_descr(self, rclass.OBJECT, 'typeptr') self._generated_functions = [] + self.memcpy_fn = memcpy_fn + self.memcpy_descr = get_call_descr(self, + [lltype.Signed, lltype.Signed, lltype.Signed], lltype.Void, + EffectInfo([], [], [], [], [], [], EffectInfo.EF_CANNOT_RAISE, + can_collect=False)) def _setup_str(self): self.str_descr = get_array_descr(self, rstr.STR) diff --git a/rpython/jit/backend/llsupport/rewrite.py b/rpython/jit/backend/llsupport/rewrite.py --- a/rpython/jit/backend/llsupport/rewrite.py +++ b/rpython/jit/backend/llsupport/rewrite.py @@ -34,6 +34,10 @@ - Add COND_CALLs to the write barrier before SETFIELD_GC and SETARRAYITEM_GC operations. + - Rewrites copystrcontent to a call to memcopy + + - XXX does more than that, please write it down + '_write_barrier_applied' contains a dictionary of variable -> None. If a variable is in the dictionary, next setfields can be called without a write barrier. The idea is that an object that was freshly allocated @@ -335,6 +339,10 @@ self.emitting_an_operation_that_can_collect() elif op.getopnum() == rop.LABEL: self.emit_label() + # ---- change COPY{STR|UNICODE}CONTENT into a call ------ + if op.opnum == rop.COPYSTRCONTENT or op.opnum == rop.COPYUNICODECONTENT: + self.rewrite_copy_str_content(op) + continue # ---------- write barriers ---------- if self.gc_ll_descr.write_barrier_descr is not None: if op.getopnum() == rop.SETFIELD_GC: @@ -953,6 +961,61 @@ self.gcrefs_output_list.append(gcref) return index + def rewrite_copy_str_content(self, op): + funcaddr = llmemory.cast_ptr_to_adr(self.gc_ll_descr.memcpy_fn) + memcpy_fn = self.cpu.cast_adr_to_int(funcaddr) + memcpy_descr = self.gc_ll_descr.memcpy_descr + if op.getopnum() == rop.COPYSTRCONTENT: + basesize = self.gc_ll_descr.str_descr.basesize + # because we have one extra item after alloc, the actual address + # of string start is 1 lower, from extra_item_after_malloc + basesize -= 1 + assert self.gc_ll_descr.str_descr.itemsize == 1 + itemscale = 0 + else: + basesize = self.gc_ll_descr.unicode_descr.basesize + itemsize = self.gc_ll_descr.unicode_descr.itemsize + if itemsize == 2: + itemscale = 1 + elif itemsize == 4: + itemscale = 2 + else: + assert False, "unknown size of unicode" + i1 = self.emit_load_effective_address(op.getarg(0), op.getarg(2), + basesize, itemscale) + i2 = self.emit_load_effective_address(op.getarg(1), op.getarg(3), + basesize, itemscale) + if op.getopnum() == rop.COPYSTRCONTENT: + arg = op.getarg(4) + else: + # do some basic constant folding + if isinstance(op.getarg(4), ConstInt): + arg = ConstInt(op.getarg(4).getint() << itemscale) + else: + arg = ResOperation(rop.INT_LSHIFT, + [op.getarg(4), ConstInt(itemscale)]) + self.emit_op(arg) + self.emit_op(ResOperation(rop.CALL_N, + [ConstInt(memcpy_fn), i2, i1, arg], descr=memcpy_descr)) + + def emit_load_effective_address(self, v_gcptr, v_index, base, itemscale): + if self.cpu.supports_load_effective_address: + i1 = ResOperation(rop.LOAD_EFFECTIVE_ADDRESS, + [v_gcptr, v_index, ConstInt(base), + ConstInt(itemscale)]) + self.emit_op(i1) + return i1 + else: + if itemscale > 0: + v_index = ResOperation(rop.INT_LSHIFT, + [v_index, ConstInt(itemscale)]) + self.emit_op(v_index) + i1b = ResOperation(rop.INT_ADD, [v_gcptr, v_index]) + self.emit_op(i1b) + i1 = ResOperation(rop.INT_ADD, [i1b, ConstInt(base)]) + self.emit_op(i1) + return i1 + def remove_constptr(self, c): """Remove all ConstPtrs, and replace them with load_from_gc_table. """ diff --git a/rpython/jit/backend/llsupport/test/test_rewrite.py b/rpython/jit/backend/llsupport/test/test_rewrite.py --- a/rpython/jit/backend/llsupport/test/test_rewrite.py +++ b/rpython/jit/backend/llsupport/test/test_rewrite.py @@ -142,11 +142,16 @@ raw_sfdescr = get_array_descr(self.gc_ll_descr, RAW_SF) # strdescr = self.gc_ll_descr.str_descr + str_basesize = self.gc_ll_descr.str_descr.basesize - 1 unicodedescr = self.gc_ll_descr.unicode_descr strlendescr = strdescr.lendescr unicodelendescr = unicodedescr.lendescr strhashdescr = self.gc_ll_descr.str_hash_descr unicodehashdescr = self.gc_ll_descr.unicode_hash_descr + uni_basesize = unicodedescr.basesize + uni_itemscale = {2: 1, 4: 2}[unicodedescr.itemsize] + memcpy_fn = self.gc_ll_descr.memcpy_fn + memcpy_descr = self.gc_ll_descr.memcpy_descr casmdescr = JitCellToken() clt = FakeLoopToken() @@ -169,6 +174,7 @@ signedframedescr = self.cpu.signedframedescr floatframedescr = self.cpu.floatframedescr casmdescr.compiled_loop_token = clt + # guarddescr = AbstractFailDescr() # @@ -200,6 +206,7 @@ load_constant_offset = True load_supported_factors = (1,2,4,8) + supports_load_effective_address = True translate_support_code = None @@ -237,6 +244,9 @@ self._cache[key] = r return r + def cast_adr_to_int(self, adr): + return llmemory.AddressAsInt(adr) + class TestBoehm(RewriteTests): def setup_method(self, meth): class FakeCPU(BaseFakeCPU): @@ -1436,3 +1446,57 @@ jump() """) assert len(self.gcrefs) == 2 + + def test_rewrite_copystrcontents(self): + self.check_rewrite(""" + [p0, p1, i0, i1, i_len] + copystrcontent(p0, p1, i0, i1, i_len) + """, """ + [p0, p1, i0, i1, i_len] + i2 = load_effective_address(p0, i0, %(str_basesize)s, 0) + i3 = load_effective_address(p1, i1, %(str_basesize)s, 0) + call_n(ConstClass(memcpy_fn), i3, i2, i_len, descr=memcpy_descr) + """) + + def test_rewrite_copystrcontents_without_load_effective_address(self): + self.cpu.supports_load_effective_address = False + self.check_rewrite(""" + [p0, p1, i0, i1, i_len] + copystrcontent(p0, p1, i0, i1, i_len) + """, """ + [p0, p1, i0, i1, i_len] + i2b = int_add(p0, i0) + i2 = int_add(i2b, %(str_basesize)s) + i3b = int_add(p1, i1) + i3 = int_add(i3b, %(str_basesize)s) + call_n(ConstClass(memcpy_fn), i3, i2, i_len, descr=memcpy_descr) + """) + + def test_rewrite_copyunicodecontents(self): + self.check_rewrite(""" + [p0, p1, i0, i1, i_len] + copyunicodecontent(p0, p1, i0, i1, i_len) + """, """ + [p0, p1, i0, i1, i_len] + i2 = load_effective_address(p0, i0, %(uni_basesize)s, %(uni_itemscale)d) + i3 = load_effective_address(p1, i1, %(uni_basesize)s, %(uni_itemscale)d) + i4 = int_lshift(i_len, %(uni_itemscale)d) + call_n(ConstClass(memcpy_fn), i3, i2, i4, descr=memcpy_descr) + """) + + def test_rewrite_copyunicodecontents_without_load_effective_address(self): + self.cpu.supports_load_effective_address = False + self.check_rewrite(""" + [p0, p1, i0, i1, i_len] + copyunicodecontent(p0, p1, i0, i1, i_len) + """, """ + [p0, p1, i0, i1, i_len] + i0s = int_lshift(i0, %(uni_itemscale)d) + i2b = int_add(p0, i0s) + i2 = int_add(i2b, %(uni_basesize)s) + i1s = int_lshift(i1, %(uni_itemscale)d) + i3b = int_add(p1, i1s) + i3 = int_add(i3b, %(uni_basesize)s) + i4 = int_lshift(i_len, %(uni_itemscale)d) + call_n(ConstClass(memcpy_fn), i3, i2, i4, descr=memcpy_descr) + """) diff --git a/rpython/jit/backend/model.py b/rpython/jit/backend/model.py --- a/rpython/jit/backend/model.py +++ b/rpython/jit/backend/model.py @@ -19,6 +19,7 @@ # Boxes and Consts are BoxFloats and ConstFloats. supports_singlefloats = False supports_guard_gc_type = False + supports_load_effective_address = False propagate_exception_descr = None diff --git a/rpython/jit/backend/ppc/opassembler.py b/rpython/jit/backend/ppc/opassembler.py --- a/rpython/jit/backend/ppc/opassembler.py +++ b/rpython/jit/backend/ppc/opassembler.py @@ -966,72 +966,6 @@ pmc.overwrite() -class StrOpAssembler(object): - - _mixin_ = True - - def emit_copystrcontent(self, op, arglocs, regalloc): - self._emit_copycontent(arglocs, is_unicode=False) - - def emit_copyunicodecontent(self, op, arglocs, regalloc): - self._emit_copycontent(arglocs, is_unicode=True) - - def _emit_load_for_copycontent(self, dst, src_ptr, src_ofs, scale): - if src_ofs.is_imm(): - value = src_ofs.value << scale - if value < 32768: - self.mc.addi(dst.value, src_ptr.value, value) - else: - self.mc.load_imm(dst, value) - self.mc.add(dst.value, src_ptr.value, dst.value) - elif scale == 0: - self.mc.add(dst.value, src_ptr.value, src_ofs.value) - else: - self.mc.sldi(dst.value, src_ofs.value, scale) - self.mc.add(dst.value, src_ptr.value, dst.value) - - def _emit_copycontent(self, arglocs, is_unicode): - [src_ptr_loc, dst_ptr_loc, - src_ofs_loc, dst_ofs_loc, length_loc] = arglocs - - if is_unicode: - basesize, itemsize, _ = symbolic.get_array_token(rstr.UNICODE, - self.cpu.translate_support_code) - if itemsize == 2: scale = 1 - elif itemsize == 4: scale = 2 - else: raise AssertionError - else: - basesize, itemsize, _ = symbolic.get_array_token(rstr.STR, - self.cpu.translate_support_code) - assert itemsize == 1 - basesize -= 1 # for the extra null character - scale = 0 - - self._emit_load_for_copycontent(r.r0, src_ptr_loc, src_ofs_loc, scale) - self._emit_load_for_copycontent(r.r2, dst_ptr_loc, dst_ofs_loc, scale) - - if length_loc.is_imm(): - length = length_loc.getint() - self.mc.load_imm(r.r5, length << scale) - else: - if scale > 0: - self.mc.sldi(r.r5.value, length_loc.value, scale) - elif length_loc is not r.r5: - self.mc.mr(r.r5.value, length_loc.value) - - self.mc.mr(r.r4.value, r.r0.value) - self.mc.addi(r.r4.value, r.r4.value, basesize) - self.mc.addi(r.r3.value, r.r2.value, basesize) - - self.mc.load_imm(self.mc.RAW_CALL_REG, self.memcpy_addr) - self.mc.raw_call() - - -class UnicodeOpAssembler(object): - _mixin_ = True - # empty! - - class AllocOpAssembler(object): _mixin_ = True @@ -1336,8 +1270,7 @@ class OpAssembler(IntOpAssembler, GuardOpAssembler, MiscOpAssembler, FieldOpAssembler, - StrOpAssembler, CallOpAssembler, - UnicodeOpAssembler, ForceOpAssembler, + CallOpAssembler, ForceOpAssembler, AllocOpAssembler, FloatOpAssembler, VectorAssembler): _mixin_ = True diff --git a/rpython/jit/backend/ppc/regalloc.py b/rpython/jit/backend/ppc/regalloc.py --- a/rpython/jit/backend/ppc/regalloc.py +++ b/rpython/jit/backend/ppc/regalloc.py @@ -802,18 +802,6 @@ temp_loc = r.SCRATCH2 return [base_loc, temp_loc] - def prepare_copystrcontent(self, op): - src_ptr_loc = self.ensure_reg(op.getarg(0)) - dst_ptr_loc = self.ensure_reg(op.getarg(1)) - src_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(2)) - dst_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(3)) - length_loc = self.ensure_reg_or_any_imm(op.getarg(4)) - self._spill_before_call(gc_level=0) - return [src_ptr_loc, dst_ptr_loc, - src_ofs_loc, dst_ofs_loc, length_loc] - - prepare_copyunicodecontent = prepare_copystrcontent - prepare_same_as_i = helper.prepare_unary_op prepare_same_as_r = helper.prepare_unary_op prepare_same_as_f = helper.prepare_unary_op diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -1222,78 +1222,16 @@ resloc = self.force_allocate_reg(op, [op.getarg(0)]) self.perform(op, [argloc], resloc) - def consider_copystrcontent(self, op): - self._consider_copystrcontent(op, is_unicode=False) - - def consider_copyunicodecontent(self, op): - self._consider_copystrcontent(op, is_unicode=True) - - def _consider_copystrcontent(self, op, is_unicode): - # compute the source address - args = op.getarglist() - base_loc = self.rm.make_sure_var_in_reg(args[0], args) - ofs_loc = self.rm.make_sure_var_in_reg(args[2], args) - assert args[0] is not args[1] # forbidden case of aliasing - srcaddr_box = TempVar() - forbidden_vars = [args[1], args[3], args[4], srcaddr_box] - srcaddr_loc = self.rm.force_allocate_reg(srcaddr_box, forbidden_vars) - self._gen_address_inside_string(base_loc, ofs_loc, srcaddr_loc, - is_unicode=is_unicode) - # compute the destination address - base_loc = self.rm.make_sure_var_in_reg(args[1], forbidden_vars) - ofs_loc = self.rm.make_sure_var_in_reg(args[3], forbidden_vars) - forbidden_vars = [args[4], srcaddr_box] - dstaddr_box = TempVar() - dstaddr_loc = self.rm.force_allocate_reg(dstaddr_box, forbidden_vars) - self._gen_address_inside_string(base_loc, ofs_loc, dstaddr_loc, - is_unicode=is_unicode) - # compute the length in bytes - length_box = args[4] - length_loc = self.loc(length_box) - if is_unicode: - forbidden_vars = [srcaddr_box, dstaddr_box] - bytes_box = TempVar() - bytes_loc = self.rm.force_allocate_reg(bytes_box, forbidden_vars) - scale = self._get_unicode_item_scale() - if not (isinstance(length_loc, ImmedLoc) or - isinstance(length_loc, RegLoc)): - self.assembler.mov(length_loc, bytes_loc) - length_loc = bytes_loc - self.assembler.load_effective_addr(length_loc, 0, scale, bytes_loc) - length_box = bytes_box - length_loc = bytes_loc - # call memcpy() - self.rm.before_call() - self.xrm.before_call() - self.assembler.simple_call_no_collect(imm(self.assembler.memcpy_addr), - [dstaddr_loc, srcaddr_loc, length_loc]) - self.rm.possibly_free_var(length_box) - self.rm.possibly_free_var(dstaddr_box) - self.rm.possibly_free_var(srcaddr_box) - - def _gen_address_inside_string(self, baseloc, ofsloc, resloc, is_unicode): - if is_unicode: - ofs_items, _, _ = symbolic.get_array_token(rstr.UNICODE, - self.translate_support_code) - scale = self._get_unicode_item_scale() - else: - ofs_items, itemsize, _ = symbolic.get_array_token(rstr.STR, - self.translate_support_code) - assert itemsize == 1 - ofs_items -= 1 # for the extra null character - scale = 0 - self.assembler.load_effective_addr(ofsloc, ofs_items, scale, - resloc, baseloc) - - def _get_unicode_item_scale(self): - _, itemsize, _ = symbolic.get_array_token(rstr.UNICODE, - self.translate_support_code) - if itemsize == 4: - return 2 - elif itemsize == 2: - return 1 - else: - raise AssertionError("bad unicode item size") + def consider_load_effective_address(self, op): + p0 = op.getarg(0) + i0 = op.getarg(1) + ploc = self.make_sure_var_in_reg(p0, [i0]) + iloc = self.make_sure_var_in_reg(i0, [p0]) + res = self.rm.force_allocate_reg(op, [p0, i0]) + assert isinstance(op.getarg(2), ConstInt) + assert isinstance(op.getarg(3), ConstInt) + self.assembler.load_effective_addr(iloc, op.getarg(2).getint(), + op.getarg(3).getint(), res, ploc) From pypy.commits at gmail.com Thu Aug 8 09:42:42 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 08 Aug 2019 06:42:42 -0700 (PDT) Subject: [pypy-commit] pypy __debug__-optimize: Close branch __debug__-optimize Message-ID: <5d4c26d2.1c69fb81.1dfe6.4fa2@mx.google.com> Author: Ronan Lamy Branch: __debug__-optimize Changeset: r97101:7d0654888c47 Date: 2019-08-08 13:42 +0000 http://bitbucket.org/pypy/pypy/changeset/7d0654888c47/ Log: Close branch __debug__-optimize From pypy.commits at gmail.com Thu Aug 8 09:43:08 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 08 Aug 2019 06:43:08 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Merged in __debug__-optimize (pull request #662) Message-ID: <5d4c26ec.1c69fb81.b239c.b57f@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97102:4a80ca49fc90 Date: 2019-08-08 13:42 +0000 http://bitbucket.org/pypy/pypy/changeset/4a80ca49fc90/ Log: Merged in __debug__-optimize (pull request #662) Fix handling of __debug__ and sys.flags.optimize diff --git a/lib-python/3/opcode.py b/lib-python/3/opcode.py --- a/lib-python/3/opcode.py +++ b/lib-python/3/opcode.py @@ -224,7 +224,6 @@ hasname.append(201) def_op('CALL_METHOD', 202) # #args not including 'self' def_op('BUILD_LIST_FROM_ARG', 203) -jrel_op('JUMP_IF_NOT_DEBUG', 204) # jump over assert statements def_op('LOAD_REVDB_VAR', 205) # reverse debugger (syntax example: $5) del def_op, name_op, jrel_op, jabs_op diff --git a/lib-python/3/test/test_dis.py b/lib-python/3/test/test_dis.py --- a/lib-python/3/test/test_dis.py +++ b/lib-python/3/test/test_dis.py @@ -146,26 +146,24 @@ 1) pass -# PyPy change: JUMP_IF_NOT_DEBUG -dis_bug1333982 = """\ -%3d 0 JUMP_IF_NOT_DEBUG 26 (to 28) - 2 LOAD_CONST 1 (0) - 4 POP_JUMP_IF_TRUE 28 - 6 LOAD_GLOBAL 0 (AssertionError) - 8 LOAD_CONST 2 ( at 0x..., file "%s", line %d>) - 10 LOAD_CONST 3 ('bug1333982..') - 12 MAKE_FUNCTION 0 - 14 LOAD_FAST 0 (x) - 16 GET_ITER - 18 CALL_FUNCTION 1 +_bug1333982 = """\ +%3d 0 LOAD_CONST 1 (0) + 2 POP_JUMP_IF_TRUE 26 + 4 LOAD_GLOBAL 0 (AssertionError) + 6 LOAD_CONST 2 ( at 0x..., file "%s", line %d>) + 8 LOAD_CONST 3 ('bug1333982..') + 10 MAKE_FUNCTION 0 + 12 LOAD_FAST 0 (x) + 14 GET_ITER + 16 CALL_FUNCTION 1 -%3d 20 LOAD_CONST 4 (1) - 22 BINARY_ADD - 24 CALL_FUNCTION 1 - 26 RAISE_VARARGS 1 +%3d 18 LOAD_CONST 4 (1) + 20 BINARY_ADD + 22 CALL_FUNCTION 1 + 24 RAISE_VARARGS 1 -%3d >> 28 LOAD_CONST 0 (None) - 30 RETURN_VALUE +%3d >> 26 LOAD_CONST 0 (None) + 28 RETURN_VALUE """ % (bug1333982.__code__.co_firstlineno + 1, __file__, bug1333982.__code__.co_firstlineno + 1, diff --git a/pypy/bin/pyinteractive.py b/pypy/bin/pyinteractive.py --- a/pypy/bin/pyinteractive.py +++ b/pypy/bin/pyinteractive.py @@ -42,7 +42,7 @@ StrOption("warn", "warning control (arg is action:message:category:module:lineno)", default=None, cmdline="-W"), - + ]) pypy_init = gateway.applevel(''' @@ -102,10 +102,9 @@ space.appexec([], """(): import sys flags = list(sys.flags) - flags[6] = 2 + flags[3] = 2 sys.flags = type(sys.flags)(flags) - import __pypy__ - __pypy__.set_debug(False) + __builtins__.__dict__['__debug__'] = False """) # call pypy_find_stdlib: the side-effect is that it sets sys.prefix and @@ -119,7 +118,7 @@ # set warning control options (if any) warn_arg = interactiveconfig.warn if warn_arg is not None: - space.appexec([space.wrap(warn_arg)], """(arg): + space.appexec([space.wrap(warn_arg)], """(arg): import sys sys.warnoptions.append(arg)""") @@ -202,6 +201,6 @@ if __name__ == '__main__': if hasattr(sys, 'setrecursionlimit'): - # for running "python -i pyinteractive.py -Si -- py.py -Si" + # for running "python -i pyinteractive.py -Si -- py.py -Si" sys.setrecursionlimit(3000) sys.exit(main_(sys.argv)) diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py --- a/pypy/interpreter/app_main.py +++ b/pypy/interpreter/app_main.py @@ -540,10 +540,6 @@ sys.flags = type(sys.flags)(flags) sys.dont_write_bytecode = bool(sys.flags.dont_write_bytecode) - if sys.flags.optimize >= 1: - import __pypy__ - __pypy__.set_debug(False) - sys._xoptions = dict(x.split('=', 1) if '=' in x else (x, True) for x in options['_xoptions']) diff --git a/pypy/interpreter/astcompiler/assemble.py b/pypy/interpreter/astcompiler/assemble.py --- a/pypy/interpreter/astcompiler/assemble.py +++ b/pypy/interpreter/astcompiler/assemble.py @@ -723,7 +723,6 @@ ops.JUMP_IF_FALSE_OR_POP: 0, ops.POP_JUMP_IF_TRUE: -1, ops.POP_JUMP_IF_FALSE: -1, - ops.JUMP_IF_NOT_DEBUG: 0, ops.SETUP_ANNOTATIONS: 0, ops.STORE_ANNOTATION: -1, diff --git a/pypy/interpreter/astcompiler/codegen.py b/pypy/interpreter/astcompiler/codegen.py --- a/pypy/interpreter/astcompiler/codegen.py +++ b/pypy/interpreter/astcompiler/codegen.py @@ -507,9 +507,9 @@ def visit_Assert(self, asrt): if self.compile_info.optimize >= 1: return + assert self.compile_info.optimize == 0 self.update_position(asrt.lineno) end = self.new_block() - self.emit_jump(ops.JUMP_IF_NOT_DEBUG, end) asrt.test.accept_jump_if(self, True, end) self.emit_op_name(ops.LOAD_GLOBAL, self.names, "AssertionError") if asrt.msg: @@ -542,7 +542,8 @@ def visit_If(self, if_): self.update_position(if_.lineno, True) end = self.new_block() - test_constant = if_.test.as_constant_truth(self.space) + test_constant = if_.test.as_constant_truth( + self.space, self.compile_info) if test_constant == optimize.CONST_FALSE: self.visit_sequence(if_.orelse) elif test_constant == optimize.CONST_TRUE: @@ -686,7 +687,7 @@ def visit_While(self, wh): self.update_position(wh.lineno, True) - test_constant = wh.test.as_constant_truth(self.space) + test_constant = wh.test.as_constant_truth(self.space, self.compile_info) if test_constant == optimize.CONST_FALSE: self.visit_sequence(wh.orelse) else: @@ -1207,7 +1208,7 @@ count = len(elts) if elts is not None else 0 consts_w = [None] * count for i in range(count): - w_value = elts[i].as_constant() + w_value = elts[i].as_constant(self.space, self.compile_info) if w_value is None: # Not all constants return None @@ -1342,11 +1343,16 @@ if len(d.keys) < 0xffff: all_constant_keys_w = [] for key in d.keys: - if key is None or key.as_constant() is None: + if key is None: + constant_key = None + else: + constant_key = key.as_constant( + self.space, self.compile_info) + if constant_key is None: all_constant_keys_w = None break else: - all_constant_keys_w.append(key.as_constant()) + all_constant_keys_w.append(constant_key) for i in range(len(d.values)): key = d.keys[i] is_unpacking = key is None diff --git a/pypy/interpreter/astcompiler/optimize.py b/pypy/interpreter/astcompiler/optimize.py --- a/pypy/interpreter/astcompiler/optimize.py +++ b/pypy/interpreter/astcompiler/optimize.py @@ -20,14 +20,14 @@ class __extend__(ast.AST): - def as_constant_truth(self, space): + def as_constant_truth(self, space, compile_info): """Return the truth of this node if known.""" - const = self.as_constant() + const = self.as_constant(space, compile_info) if const is None: return CONST_NOT_CONST return int(space.is_true(const)) - def as_constant(self): + def as_constant(self, space, compile_info): """Return the value of this node as a wrapped constant if possible.""" return None @@ -47,46 +47,52 @@ class __extend__(ast.Num): - def as_constant(self): + def as_constant(self, space, compile_info): return self.n class __extend__(ast.Str): - def as_constant(self): + def as_constant(self, space, compile_info): return self.s class __extend__(ast.Bytes): - def as_constant(self): + def as_constant(self, space, compile_info): return self.s class __extend__(ast.Ellipsis): - - def as_constant_truth(self, space): - return True + def as_constant(self, space, compile_info): + return space.w_Ellipsis class __extend__(ast.Constant): - def as_constant(self): + def as_constant(self, space, compile_info): return self.value +class __extend__(ast.Name): + def as_constant(self, space, compile_info): + if self.id == '__debug__': + return space.newbool(compile_info.optimize == 0) + else: + return None + + class __extend__(ast.NameConstant): - - def as_constant(self): + def as_constant(self, space, compile_info): return self.value class __extend__(ast.Index): - def as_constant(self): - return self.value.as_constant() + def as_constant(self, space, compile_info): + return self.value.as_constant(space, compile_info) class __extend__(ast.Slice): - def as_constant(self): + def as_constant(self, space, compile_info): # XXX: this ought to return a slice object if all the indices are - # constants, but we don't have a space here. + # constants return None class __extend__(ast.UnaryOp): @@ -189,9 +195,9 @@ return node def visit_BinOp(self, binop): - left = binop.left.as_constant() + left = binop.left.as_constant(self.space, self.compile_info) if left is not None: - right = binop.right.as_constant() + right = binop.right.as_constant(self.space, self.compile_info) if right is not None: op = binop.op try: @@ -218,7 +224,7 @@ return binop def visit_UnaryOp(self, unary): - w_operand = unary.operand.as_constant() + w_operand = unary.operand.as_constant(self.space, self.compile_info) op = unary.op if w_operand is not None: try: @@ -254,7 +260,7 @@ we_are_and = bop.op == ast.And i = 0 while i < len(values) - 1: - truth = values[i].as_constant_truth(self.space) + truth = values[i].as_constant_truth(self.space, self.compile_info) if truth != CONST_NOT_CONST: if (truth != CONST_TRUE) == we_are_and: del values[i + 1:] @@ -267,26 +273,14 @@ return values[0] return bop - def visit_Repr(self, rep): - w_const = rep.value.as_constant() - if w_const is not None: - w_repr = self.space.repr(w_const) - return ast.Constant(w_repr, rep.lineno, rep.col_offset) - return rep - def visit_Name(self, name): """Turn loading None, True, and False into a constant lookup.""" if name.ctx == ast.Del: return name space = self.space - iden = name.id w_const = None - if iden == "None": - w_const = space.w_None - elif iden == "True": - w_const = space.w_True - elif iden == "False": - w_const = space.w_False + if name.id == '__debug__': + w_const = space.newbool(self.compile_info.optimize == 0) if w_const is not None: return ast.NameConstant(w_const, name.lineno, name.col_offset) return name @@ -300,7 +294,7 @@ consts_w = [None]*len(tup.elts) for i in range(len(tup.elts)): node = tup.elts[i] - w_const = node.as_constant() + w_const = node.as_constant(self.space, self.compile_info) if w_const is None: new_elts = self._optimize_constant_star_unpacks(tup.elts) if new_elts is not None: @@ -350,7 +344,7 @@ after_last_star_index = i + 1 new_elts.append(elt) elif const_since_last_star_w is not None: - w_const = elt.as_constant() + w_const = elt.as_constant(self.space, self.compile_info) if w_const is None: new_elts.extend(elts[after_last_star_index:i + 1]) const_since_last_star_w = None @@ -375,9 +369,9 @@ def visit_Subscript(self, subs): if subs.ctx == ast.Load: - w_obj = subs.value.as_constant() + w_obj = subs.value.as_constant(self.space, self.compile_info) if w_obj is not None: - w_idx = subs.slice.as_constant() + w_idx = subs.slice.as_constant(self.space, self.compile_info) if w_idx is not None: try: w_const = self.space.getitem(w_obj, w_idx) diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py --- a/pypy/interpreter/astcompiler/test/test_compiler.py +++ b/pypy/interpreter/astcompiler/test/test_compiler.py @@ -1041,20 +1041,6 @@ code_w.exec_code(self.space, dict_w, dict_w) self.check(dict_w, expr, result) - def test_assert_skipping(self): - space = self.space - mod = space.getbuiltinmodule('__pypy__') - w_set_debug = space.getattr(mod, space.wrap('set_debug')) - space.call_function(w_set_debug, space.w_False) - - source = """if 1: - assert False - """ - try: - self.run(source) - finally: - space.call_function(w_set_debug, space.w_True) - def test_dont_fold_equal_code_objects(self): yield self.st, "f=lambda:1;g=lambda:1.0;x=g()", 'type(x)', float yield (self.st, "x=(lambda: (-0.0, 0.0), lambda: (0.0, -0.0))[1]()", diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py --- a/pypy/interpreter/pycode.py +++ b/pypy/interpreter/pycode.py @@ -39,7 +39,7 @@ # time you make pyc files incompatible. This value ends up in the frozen # importlib, via MAGIC_NUMBER in module/_frozen_importlib/__init__. -pypy_incremental_magic = 176 # bump it by 16 +pypy_incremental_magic = 192 # bump it by 16 assert pypy_incremental_magic % 16 == 0 assert pypy_incremental_magic < 3000 # the magic number of Python 3. There are # no known magic numbers below this value diff --git a/pypy/interpreter/pycompiler.py b/pypy/interpreter/pycompiler.py --- a/pypy/interpreter/pycompiler.py +++ b/pypy/interpreter/pycompiler.py @@ -117,6 +117,8 @@ check = True if not check: raise oefmt(self.space.w_TypeError, "invalid node type") + if optimize == -1: + optimize = self.space.sys.get_optimize() fut = misc.parse_future(node, self.future_flags.compiler_features) f_flags, f_lineno, f_col = fut @@ -166,6 +168,9 @@ def compile(self, source, filename, mode, flags, hidden_applevel=False, optimize=-1): + if optimize == -1: + optimize = self.space.sys.get_optimize() + assert optimize >= 0 info = pyparse.CompileInfo(filename, mode, flags, hidden_applevel=hidden_applevel, optimize=optimize) mod = self._compile_to_ast(source, info) diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py --- a/pypy/interpreter/pyopcode.py +++ b/pypy/interpreter/pyopcode.py @@ -211,8 +211,6 @@ next_instr = self.JUMP_FORWARD(oparg, next_instr) elif opcode == opcodedesc.JUMP_IF_FALSE_OR_POP.index: next_instr = self.JUMP_IF_FALSE_OR_POP(oparg, next_instr) - elif opcode == opcodedesc.JUMP_IF_NOT_DEBUG.index: - next_instr = self.JUMP_IF_NOT_DEBUG(oparg, next_instr) elif opcode == opcodedesc.JUMP_IF_TRUE_OR_POP.index: next_instr = self.JUMP_IF_TRUE_OR_POP(oparg, next_instr) elif opcode == opcodedesc.POP_JUMP_IF_FALSE.index: @@ -1180,11 +1178,6 @@ self.popvalue() return next_instr - def JUMP_IF_NOT_DEBUG(self, jumpby, next_instr): - if not self.space.sys.debug: - next_instr += jumpby - return next_instr - def GET_ITER(self, oparg, next_instr): w_iterable = self.popvalue() w_iterator = self.space.iter(w_iterable) diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -69,14 +69,14 @@ * hidden_applevel: Will this code unit and sub units be hidden at the applevel? * optimize: optimization level: - -1 = same as interpreter, 0 = no optmiziation, 1 = remove asserts, 2 = remove docstrings. """ def __init__(self, filename, mode="exec", flags=0, future_pos=(0, 0), - hidden_applevel=False, optimize=-1): + hidden_applevel=False, optimize=0): + assert optimize >= 0 rstring.check_str0(filename) self.filename = filename self.mode = mode diff --git a/pypy/module/__builtin__/compiling.py b/pypy/module/__builtin__/compiling.py --- a/pypy/module/__builtin__/compiling.py +++ b/pypy/module/__builtin__/compiling.py @@ -13,7 +13,7 @@ @unwrap_spec(filename='fsencode', mode='text', flags=int, dont_inherit=int, optimize=int) def compile(space, w_source, filename, mode, flags=0, dont_inherit=0, - optimize=0): + optimize=-1): """Compile the source string (a Python module, statement or expression) into a code object that can be executed by the exec statement or eval(). The filename will be used for run-time error messages. @@ -42,6 +42,10 @@ raise oefmt(space.w_ValueError, "compile() arg 3 must be 'exec', 'eval' or 'single'") + if optimize < -1 or optimize > 2: + raise oefmt(space.w_ValueError, + "compile(): invalid optimize value") + if space.isinstance_w(w_source, space.gettypeobject(ast.W_AST.typedef)): if flags & consts.PyCF_ONLY_AST: return w_source diff --git a/pypy/module/__builtin__/test/apptest_compile.py b/pypy/module/__builtin__/test/apptest_compile.py --- a/pypy/module/__builtin__/test/apptest_compile.py +++ b/pypy/module/__builtin__/test/apptest_compile.py @@ -108,31 +108,32 @@ try: assert False except AssertionError: - return (True, f.__doc__) + return (True, f.__doc__, __debug__) else: - return (False, f.__doc__) + return (False, f.__doc__, __debug__) ''' - def f(): """doc""" - values = [(-1, __debug__, f.__doc__), - (0, True, 'doc'), - (1, False, 'doc'), - (2, False, None)] + def f(): + """doc""" - for optval, debugval, docstring in values: + values = [(-1, __debug__, f.__doc__, __debug__), + (0, True, 'doc', True), + (1, False, 'doc', False), + (2, False, None, False)] + + for optval, *expected in values: # test both direct compilation and compilation via AST codeobjs = [] - codeobjs.append( - compile(codestr, "", "exec", optimize=optval)) + codeobjs.append(compile(codestr, "", "exec", optimize=optval)) tree = ast.parse(codestr) codeobjs.append(compile(tree, "", "exec", optimize=optval)) - for i, code in enumerate(codeobjs): - print(optval, debugval, docstring, i) + print(optval, *expected, i) ns = {} exec(code, ns) rv = ns['f']() - assert rv == (debugval, docstring) + print(rv) + assert rv == tuple(expected) def test_assert_remove(): """Test removal of the asserts with optimize=1.""" diff --git a/pypy/module/__builtin__/test/test_compile.py b/pypy/module/__builtin__/test/test_compile.py --- a/pypy/module/__builtin__/test/test_compile.py +++ b/pypy/module/__builtin__/test/test_compile.py @@ -3,12 +3,18 @@ def setup_method(self, method): space = self.space - self._sys_debug = space.sys.debug + self._w_flags = space.sys.get('flags') # imitate -O - space.sys.debug = False + space.appexec([], """(): + import sys + flags = list(sys.flags) + flags[3] = 1 + sys.flags = type(sys.flags)(flags) + """) def teardown_method(self, method): - self.space.sys.debug = self._sys_debug + space = self.space + space.setitem(space.sys.w_dict, space.newtext('flags'), self._w_flags) def test_O_optmize_0(self): """Test that assert is not ignored if -O flag is set but optimize=0.""" @@ -30,9 +36,3 @@ space.appexec([], """(): exec(compile('assert False', '', 'exec', optimize=-1)) """) - - -# TODO: Check the value of __debug__ inside of the compiled block! -# According to the documentation, it should follow the optimize flag. -# However, cpython3.5.0a0 behaves the same way as PyPy (__debug__ follows -# -O, -OO flags of the interpreter). diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -117,14 +117,6 @@ """ Create a new empty list that has an underlying storage of length sizehint """ return space.newlist_hint(sizehint) - at unwrap_spec(debug=int) -def set_debug(space, debug): - debug = bool(debug) - space.sys.debug = debug - space.setitem(space.builtin.w_dict, - space.newtext('__debug__'), - space.newbool(debug)) - @unwrap_spec(estimate=int) def add_memory_pressure(space, estimate): """ Add memory pressure of estimate bytes. Useful when calling a C function diff --git a/pypy/module/__pypy__/moduledef.py b/pypy/module/__pypy__/moduledef.py --- a/pypy/module/__pypy__/moduledef.py +++ b/pypy/module/__pypy__/moduledef.py @@ -72,7 +72,7 @@ interpleveldefs = { 'bufferable': 'interp_buffer.W_Bufferable', } - + class Module(MixedModule): """ PyPy specific "magic" functions. A lot of them are experimental and @@ -106,7 +106,6 @@ 'delitem_if_value_is' : 'interp_dict.delitem_if_value_is', 'move_to_end' : 'interp_dict.move_to_end', 'strategy' : 'interp_magic.strategy', # dict,set,list - 'set_debug' : 'interp_magic.set_debug', 'locals_to_fast' : 'interp_magic.locals_to_fast', 'set_code_callback' : 'interp_magic.set_code_callback', 'decode_long' : 'interp_magic.decode_long', diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -402,11 +402,7 @@ raise oefmt(space.w_ImportError, "Bad magic number in %s", cpathname) #print "loading pyc file:", cpathname code_w = read_compiled_module(space, cpathname, source) - try: - optimize = space.sys.get_flag('optimize') - except RuntimeError: - # during bootstrapping - optimize = 0 + optimize = space.sys.get_optimize() if optimize >= 2: code_w.remove_docstrings(space) diff --git a/pypy/module/sys/moduledef.py b/pypy/module/sys/moduledef.py --- a/pypy/module/sys/moduledef.py +++ b/pypy/module/sys/moduledef.py @@ -25,7 +25,6 @@ self.recursionlimit = 1000 self.defaultencoding = "utf-8" self.filesystemencoding = None - self.debug = True self.track_resources = False self.finalizing = False self.dlopenflags = rdynload._dlopen_default_mode() @@ -239,3 +238,9 @@ def get_state(self, space): from pypy.module.sys import state return state.get(space) + + def get_optimize(self): + try: + return self.get_flag('optimize') + except RuntimeError: # bootstrapping + return 0 diff --git a/pypy/tool/opcode3.py b/pypy/tool/opcode3.py --- a/pypy/tool/opcode3.py +++ b/pypy/tool/opcode3.py @@ -224,6 +224,5 @@ hasname.append(201) def_op('CALL_METHOD', 202) # #args not including 'self' def_op('BUILD_LIST_FROM_ARG', 203) -jrel_op('JUMP_IF_NOT_DEBUG', 204) # jump over assert statements del def_op, name_op, jrel_op, jabs_op From pypy.commits at gmail.com Thu Aug 8 10:16:47 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 08 Aug 2019 07:16:47 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: document branch Message-ID: <5d4c2ecf.1c69fb81.7639.a629@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97103:8c882c594b3f Date: 2019-08-08 14:35 +0100 http://bitbucket.org/pypy/pypy/changeset/8c882c594b3f/ Log: document branch diff --git a/pypy/doc/whatsnew-pypy3-head.rst b/pypy/doc/whatsnew-pypy3-head.rst --- a/pypy/doc/whatsnew-pypy3-head.rst +++ b/pypy/doc/whatsnew-pypy3-head.rst @@ -34,3 +34,8 @@ .. branch: stdlib-3.6.9 Update standard library to version 3.6.9 + +.. branch: __debug__-optimize + +Fix handling of __debug__, sys.flags.optimize, and '-O' command-line flag to +match CPython 3.6. From pypy.commits at gmail.com Thu Aug 8 11:00:33 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 08 Aug 2019 08:00:33 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Fix bpo-25862 Message-ID: <5d4c3911.1c69fb81.13348.22b5@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97104:8b8aaf492f23 Date: 2019-08-08 15:59 +0100 http://bitbucket.org/pypy/pypy/changeset/8b8aaf492f23/ Log: Fix bpo-25862 diff --git a/pypy/module/_io/interp_textio.py b/pypy/module/_io/interp_textio.py --- a/pypy/module/_io/interp_textio.py +++ b/pypy/module/_io/interp_textio.py @@ -746,6 +746,7 @@ check_decoded(space, w_decoded) w_result = space.newtext(self.decoded.get_chars(-1)) w_final = space.add(w_result, w_decoded) + self.decoded.reset() self.snapshot = None return w_final @@ -892,6 +893,7 @@ if needflush: space.call_method(self.w_buffer, "flush") + self.decoded.reset() self.snapshot = None if self.w_decoder: diff --git a/pypy/module/_io/test/test_textio.py b/pypy/module/_io/test/test_textio.py --- a/pypy/module/_io/test/test_textio.py +++ b/pypy/module/_io/test/test_textio.py @@ -458,6 +458,18 @@ t.__init__(_io.BytesIO()) assert t.read(0) == u'' + def test_issue25862(self): + # CPython issue #25862 + # Assertion failures occurred in tell() after read() and write(). + from _io import TextIOWrapper, BytesIO + t = TextIOWrapper(BytesIO(b'test'), encoding='ascii') + t.read(1) + t.read() + t.tell() + t = TextIOWrapper(BytesIO(b'test'), encoding='ascii') + t.read(1) + t.write('x') + t.tell() class AppTestIncrementalNewlineDecoder: def test_newline_decoder(self): From pypy.commits at gmail.com Thu Aug 8 11:16:54 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 08 Aug 2019 08:16:54 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: When requested, dump compilation information, which the controller Message-ID: <5d4c3ce6.1c69fb81.409b9.e108@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97105:8655e95a8718 Date: 2019-08-08 17:16 +0200 http://bitbucket.org/pypy/pypy/changeset/8655e95a8718/ Log: When requested, dump compilation information, which the controller process can use to check for bad versions or missing functions diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py --- a/rpython/translator/c/genc.py +++ b/rpython/translator/c/genc.py @@ -928,6 +928,10 @@ fi = incfilename.open('w') fi.write('#ifndef _PY_COMMON_HEADER_H\n#define _PY_COMMON_HEADER_H\n') + if database.sandbox: + from rpython.translator.sandbox import rsandbox + eci = eci.merge(rsandbox.extra_eci(database.translator.rtyper)) + # # Header # diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py --- a/rpython/translator/sandbox/rsandbox.py +++ b/rpython/translator/sandbox/rsandbox.py @@ -4,6 +4,7 @@ and wait for an answer on STDIN. Enable with 'translate.py --sandbox'. """ import py +import sys from rpython.rlib import types from rpython.rlib.objectmodel import specialize @@ -45,16 +46,28 @@ return 'v' -eci = rffi.ExternalCompilationInfo(separate_module_sources=[ +def extra_eci(rtyper): + from rpython.translator.c.support import c_string_constant + + sandboxed_functions = getattr(rtyper, '_sandboxed_functions', []) + dump = ( + "Version: 20001\n" + + "Platform: %s\n" % sys.platform + + "Funcs: %s" % ' '.join(sorted(sandboxed_functions)) + ) + dump = c_string_constant(dump).replace('\n', '\\\n') + + return rffi.ExternalCompilationInfo(separate_module_sources=[ + '#define RPY_SANDBOX_DUMP %s\n' % (dump,) + py.path.local(__file__).join('..', 'src', 'rsandbox.c').read(), ], post_include_bits=[ py.path.local(__file__).join('..', 'src', 'rsandbox.h').read(), ]) + def external(funcname, ARGS, RESULT): return rffi.llexternal(funcname, ARGS, RESULT, - compilation_info=eci, sandboxsafe=True, - _nowrapper=True) + sandboxsafe=True, _nowrapper=True) rpy_sandbox_arg = { 'i': external('rpy_sandbox_arg_i', [lltype.UnsignedLongLong], lltype.Void), @@ -89,7 +102,12 @@ result_func = rpy_sandbox_res[result_kind] RESTYPE = FUNCTYPE.RESULT + try: + lst = rtyper._sandboxed_functions + except AttributeError: + lst = rtyper._sandboxed_functions = [] name_and_sig = '%s(%s)%s' % (fnname, ''.join(arg_kinds), result_kind) + lst.append(name_and_sig) log(name_and_sig) name_and_sig = rffi.str2charp(name_and_sig, track_allocation=False) diff --git a/rpython/translator/sandbox/src/rsandbox.c b/rpython/translator/sandbox/src/rsandbox.c --- a/rpython/translator/sandbox/src/rsandbox.c +++ b/rpython/translator/sandbox/src/rsandbox.c @@ -1,6 +1,9 @@ #include +#include #include #include +#include +#include #define RPY_SANDBOX_ARGBUF 512 @@ -11,6 +14,7 @@ static char sand_argbuf[RPY_SANDBOX_ARGBUF]; static size_t sand_nextarg = RPY_SANDBOX_NAMEMAX; +static int sand_dump_checked = 0; static void sand_writeall(const char *buf, size_t count) @@ -89,18 +93,39 @@ *(void **)sand_arg_output(sizeof(void *)) = p; } +static void sand_dump_check(void) +{ + const char *p = getenv("RPY_SANDBOX_DUMP"); + if (p && p[0]) { + puts(RPY_SANDBOX_DUMP); + exit(0); + } + sand_dump_checked = 1; +} + struct sand_data_s { void *data; size_t size; }; +static void sand_assert(int condition) +{ + if (!condition) { + fprintf(stderr, "sandbox: internal assert failed\n"); + abort(); + } +} + static void sand_interact(const char *name_and_sig, char expected_result, void *result, size_t result_size) { int saved_errno = errno; + if (!sand_dump_checked) + sand_dump_check(); + size_t name_len = strlen(name_and_sig); - assert(name_len > 0); + sand_assert(name_len > 0); if (name_len > RPY_SANDBOX_NAMEMAX - 1) { fprintf(stderr, "sandbox: function name buffer overflow (RPY_SANDBOX_NAMEMAX)\n"); @@ -110,8 +135,8 @@ *p = name_len; memcpy(p + 1, name_and_sig, name_len); - assert(sand_nextarg >= RPY_SANDBOX_NAMEMAX); - assert(sand_nextarg <= RPY_SANDBOX_ARGBUF); + sand_assert(sand_nextarg >= RPY_SANDBOX_NAMEMAX); + sand_assert(sand_nextarg <= RPY_SANDBOX_ARGBUF); sand_writeall(p, sand_nextarg - (p - sand_argbuf)); sand_nextarg = RPY_SANDBOX_NAMEMAX; diff --git a/rpython/translator/sandbox/test/test_sandboxio.py b/rpython/translator/sandbox/test/test_sandboxio.py --- a/rpython/translator/sandbox/test/test_sandboxio.py +++ b/rpython/translator/sandbox/test/test_sandboxio.py @@ -389,3 +389,17 @@ out = os.path.join("tmp", "spam") + '\n' expect(sandio, "write(ipi)i", (1, RAW(out), len(out)), len(out)) expect_done(sandio) + +def test_sandbox_dump(): + def entry_point(argv): + print "hello world!\n" + os.close(42) + return 0 + + exe = compile(entry_point) + popen = subprocess.Popen([exe], stdout=subprocess.PIPE, + env={"RPY_SANDBOX_DUMP": "1"}) + dump = popen.stdout.read() + popen.wait() + assert dump == ("Version: 20001\nPlatform: %s\n" % (sys.platform,) + + "Funcs: close(i)i write(ipi)i\n") From pypy.commits at gmail.com Thu Aug 8 11:28:43 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 08 Aug 2019 08:28:43 -0700 (PDT) Subject: [pypy-commit] pypy default: Move app-level tests to apptest_longobject.py Message-ID: <5d4c3fab.1c69fb81.324d6.57d1@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97106:5a0b5a0945e0 Date: 2019-08-08 16:27 +0100 http://bitbucket.org/pypy/pypy/changeset/5a0b5a0945e0/ Log: Move app-level tests to apptest_longobject.py diff --git a/pypy/objspace/std/test/apptest_longobject.py b/pypy/objspace/std/test/apptest_longobject.py new file mode 100644 --- /dev/null +++ b/pypy/objspace/std/test/apptest_longobject.py @@ -0,0 +1,409 @@ +from pytest import raises +import sys +import math +import operator + +def test_trunc(): + assert math.trunc(1L) == 1L + assert math.trunc(-1L) == -1L + +def test_add(): + x = 123L + assert int(x + 12443L) == 123 + 12443 + x = -20 + assert x + 2 + 3L + True == -14L + +def test_sub(): + assert int(58543L - 12332L) == 58543 - 12332 + assert int(58543L - 12332) == 58543 - 12332 + assert int(58543 - 12332L) == 58543 - 12332 + x = 237123838281233L + assert x * 12 == x * 12L + +def test_mul(): + x = 363L + assert x * 2 ** 40 == x << 40 + +def test_truediv(): + exec "from __future__ import division; a = 31415926L / 10000000L" + assert a == 3.1415926 + +def test_floordiv(): + x = 31415926L + a = x // 10000000L + assert a == 3L + +def test_int_floordiv(): + x = 3000L + a = x // 1000 + assert a == 3L + + x = 3000L + a = x // -1000 + assert a == -3L + + x = 3000L + raises(ZeroDivisionError, "x // 0") + + n = sys.maxint+1 + assert n / int(-n) == -1L + +def test_numerator_denominator(): + assert (1L).numerator == 1L + assert (1L).denominator == 1L + assert (42L).numerator == 42L + assert (42L).denominator == 1L + +def test_compare(): + Z = 0 + ZL = 0L + + assert Z == ZL + assert not (Z != ZL) + assert ZL == Z + assert not (ZL != Z) + assert Z <= ZL + assert not (Z < ZL) + assert ZL <= ZL + assert not (ZL < ZL) + + for BIG in (1L, 1L << 62, 1L << 9999): + assert not (Z == BIG) + assert Z != BIG + assert not (BIG == Z) + assert BIG != Z + assert not (ZL == BIG) + assert ZL != BIG + assert Z <= BIG + assert Z < BIG + assert not (BIG <= Z) + assert not (BIG < Z) + assert ZL <= BIG + assert ZL < BIG + assert not (BIG <= ZL) + assert not (BIG < ZL) + assert not (Z <= -BIG) + assert not (Z < -BIG) + assert -BIG <= Z + assert -BIG < Z + assert not (ZL <= -BIG) + assert not (ZL < -BIG) + assert -BIG <= ZL + assert -BIG < ZL + # + assert not (BIG < int(BIG)) + assert (BIG <= int(BIG)) + assert (BIG == int(BIG)) + assert not (BIG != int(BIG)) + assert not (BIG > int(BIG)) + assert (BIG >= int(BIG)) + # + assert (BIG < int(BIG)+1) + assert (BIG <= int(BIG)+1) + assert not (BIG == int(BIG)+1) + assert (BIG != int(BIG)+1) + assert not (BIG > int(BIG)+1) + assert not (BIG >= int(BIG)+1) + # + assert not (BIG < int(BIG)-1) + assert not (BIG <= int(BIG)-1) + assert not (BIG == int(BIG)-1) + assert (BIG != int(BIG)-1) + assert (BIG > int(BIG)-1) + assert (BIG >= int(BIG)-1) + # + assert not (int(BIG) < BIG) + assert (int(BIG) <= BIG) + assert (int(BIG) == BIG) + assert not (int(BIG) != BIG) + assert not (int(BIG) > BIG) + assert (int(BIG) >= BIG) + # + assert not (int(BIG)+1 < BIG) + assert not (int(BIG)+1 <= BIG) + assert not (int(BIG)+1 == BIG) + assert (int(BIG)+1 != BIG) + assert (int(BIG)+1 > BIG) + assert (int(BIG)+1 >= BIG) + # + assert (int(BIG)-1 < BIG) + assert (int(BIG)-1 <= BIG) + assert not (int(BIG)-1 == BIG) + assert (int(BIG)-1 != BIG) + assert not (int(BIG)-1 > BIG) + assert not (int(BIG)-1 >= BIG) + +def test_conversion(): + class long2(long): + pass + x = 1L + x = long2(x<<100) + y = int(x) + assert type(y) == long + assert type(+long2(5)) is long + assert type(long2(5) << 0) is long + assert type(long2(5) >> 0) is long + assert type(long2(5) + 0) is long + assert type(long2(5) - 0) is long + assert type(long2(5) * 1) is long + assert type(1 * long2(5)) is long + assert type(0 + long2(5)) is long + assert type(-long2(0)) is long + assert type(long2(5) // 1) is long + +def test_shift(): + assert 65l >> 2l == 16l + assert 65l >> 2 == 16l + assert 65 >> 2l == 16l + assert 65l << 2l == 65l * 4 + assert 65l << 2 == 65l * 4 + assert 65 << 2l == 65l * 4 + raises(ValueError, "1L << -1L") + raises(ValueError, "1L << -1") + raises(OverflowError, "1L << (2 ** 100)") + raises(ValueError, "1L >> -1L") + raises(ValueError, "1L >> -1") + raises(OverflowError, "1L >> (2 ** 100)") + +def test_pow(): + x = 0L + assert pow(x, 0L, 1L) == 0L + assert pow(-1L, -1L) == -1.0 + assert pow(2 ** 68, 0.5) == 2.0 ** 34 + assert pow(2 ** 68, 2) == 2 ** 136 + raises(TypeError, pow, 2l, -1, 3) + raises(ValueError, pow, 2l, 5, 0) + + # some rpow tests + assert pow(0, 0L, 1L) == 0L + assert pow(-1, -1L) == -1.0 + +def test_int_pow(): + x = 2L + assert pow(x, 2) == 4L + assert pow(x, 2, 2) == 0L + assert pow(x, 2, 3L) == 1L + +def test_getnewargs(): + assert 0L .__getnewargs__() == (0L,) + assert (-1L) .__getnewargs__() == (-1L,) + +def test_divmod(): + def check_division(x, y): + q, r = divmod(x, y) + pab, pba = x*y, y*x + assert pab == pba + assert q == x // y + assert r == x % y + assert x == q*y + r + if y > 0: + assert 0 <= r < y + else: + assert y < r <= 0 + for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: + for y in [-105566530L, -1L, 1L, 1034522340L]: + print "checking division for %s, %s" % (x, y) + check_division(x, y) + check_division(x, int(y)) + check_division(int(x), y) + # special case from python tests: + s1 = 33 + s2 = 2 + x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 + x >>= s1*16 + y = 10953035502453784575 + y >>= s2*16 + x = 0x3FE0003FFFFC0001FFFL + y = 0x9800FFC1L + check_division(x, y) + raises(ZeroDivisionError, "x // 0L") + raises(ZeroDivisionError, "x % 0L") + raises(ZeroDivisionError, divmod, x, 0L) + raises(ZeroDivisionError, "x // 0") + raises(ZeroDivisionError, "x % 0") + raises(ZeroDivisionError, divmod, x, 0) + +def test_int_divmod(): + q, r = divmod(100L, 11) + assert q == 9L + assert r == 1L + +def test_format(): + assert repr(12345678901234567890) == '12345678901234567890L' + assert str(12345678901234567890) == '12345678901234567890' + assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' + assert oct(01234567012345670L) == '01234567012345670L' + +def test_bits(): + x = 0xAAAAAAAAL + assert x | 0x55555555L == 0xFFFFFFFFL + assert x & 0x55555555L == 0x00000000L + assert x ^ 0x55555555L == 0xFFFFFFFFL + assert -x | 0x55555555L == -0xAAAAAAA9L + assert x | 0x555555555L == 0x5FFFFFFFFL + assert x & 0x555555555L == 0x000000000L + assert x ^ 0x555555555L == 0x5FFFFFFFFL + +def test_hash(): + # ints have the same hash as equal longs + for i in range(-4, 14): + assert hash(i) == hash(long(i)) == long(i).__hash__() + # might check too much -- it's ok to change the hashing algorithm + assert hash(123456789L) == 123456789 + assert hash(1234567890123456789L) in ( + -1895067127, # with 32-bit platforms + 1234567890123456789) # with 64-bit platforms + +def test_math_log(): + raises(ValueError, math.log, 0L) + raises(ValueError, math.log, -1L) + raises(ValueError, math.log, -2L) + raises(ValueError, math.log, -(1L << 10000)) + #raises(ValueError, math.log, 0) + raises(ValueError, math.log, -1) + raises(ValueError, math.log, -2) + +def test_long(): + n = -sys.maxint-1 + assert long(n) == n + assert str(long(n)) == str(n) + a = buffer('123') + assert long(a) == 123L + +def test_huge_longs(): + x = 1L + huge = x << 40000L + raises(OverflowError, float, huge) + raises(OverflowError, operator.truediv, huge, 3) + raises(OverflowError, operator.truediv, huge, 3L) + +def test_just_trunc(): + class myint(object): + def __trunc__(self): + return 42 + assert long(myint()) == 42 + +def test_override___long__(): + class mylong(long): + def __long__(self): + return 42L + assert long(mylong(21)) == 42L + class myotherlong(long): + pass + assert long(myotherlong(21)) == 21L + +def test___long__(): + class A(object): + def __long__(self): + return 42 + assert long(A()) == 42L + class B(object): + def __int__(self): + return 42 + raises(TypeError, long, B()) + + class LongSubclass(long): + pass + class ReturnsLongSubclass(object): + def __long__(self): + return LongSubclass(42L) + n = long(ReturnsLongSubclass()) + assert n == 42 + assert type(n) is LongSubclass + +def test_trunc_returns(): + # but!: (blame CPython 2.7) + class Integral(object): + def __int__(self): + return 42 + class TruncReturnsNonLong(object): + def __trunc__(self): + return Integral() + n = long(TruncReturnsNonLong()) + assert type(n) is long + assert n == 42 + + class LongSubclass(long): + pass + class TruncReturnsNonInt(object): + def __trunc__(self): + return LongSubclass(42) + n = long(TruncReturnsNonInt()) + assert n == 42 + assert type(n) is LongSubclass + +def test_long_before_string(): + class A(str): + def __long__(self): + return 42 + assert long(A('abc')) == 42 + +def test_long_errors(): + raises(TypeError, long, 12, 12) + raises(ValueError, long, 'xxxxxx?', 12) + +def test_conjugate(): + assert (7L).conjugate() == 7L + assert (-7L).conjugate() == -7L + + class L(long): + pass + + assert type(L(7).conjugate()) is long + + class L(long): + def __pos__(self): + return 43 + assert L(7).conjugate() == 7L + +def test_bit_length(): + assert 8L.bit_length() == 4 + assert (-1<<40).bit_length() == 41 + assert ((2**31)-1).bit_length() == 31 + +def test_negative_zero(): + x = eval("-0L") + assert x == 0L + +def test_mix_int_and_long(): + class IntLongMixClass(object): + def __int__(self): + return 42L + + def __long__(self): + return 64 + + mixIntAndLong = IntLongMixClass() + as_long = long(mixIntAndLong) + assert type(as_long) is long + assert as_long == 64 + +def test_long_real(): + class A(long): pass + b = A(5).real + assert type(b) is long + +def test__int__(): + class A(long): + def __int__(self): + return 42 + + assert int(long(3)) == long(3) + assert int(A(13)) == 42 + +def test_long_error_msg(): + e = raises(TypeError, long, []) + assert str(e.value) == ( + "long() argument must be a string or a number, not 'list'") + +def test_coerce(): + assert 3L.__coerce__(4L) == (3L, 4L) + assert 3L.__coerce__(4) == (3, 4) + assert 3L.__coerce__(object()) == NotImplemented + +def test_linear_long_base_16(): + # never finishes if long(_, 16) is not linear-time + size = 100000 + n = "a" * size + expected = (2 << (size * 4)) // 3 + assert long(n, 16) == expected diff --git a/pypy/objspace/std/test/test_longobject.py b/pypy/objspace/std/test/test_longobject.py --- a/pypy/objspace/std/test/test_longobject.py +++ b/pypy/objspace/std/test/test_longobject.py @@ -1,4 +1,3 @@ -import py from pypy.objspace.std import longobject as lobj from rpython.rlib.rbigint import rbigint @@ -36,417 +35,3 @@ x &= r.MASK w_obj = space.newlong_from_rarith_int(r(x)) assert space.bigint_w(w_obj).eq(rbigint.fromlong(x)) - - -class AppTestLong: - def test_trunc(self): - import math - assert math.trunc(1L) == 1L - assert math.trunc(-1L) == -1L - - def test_add(self): - x = 123L - assert int(x + 12443L) == 123 + 12443 - x = -20 - assert x + 2 + 3L + True == -14L - - def test_sub(self): - assert int(58543L - 12332L) == 58543 - 12332 - assert int(58543L - 12332) == 58543 - 12332 - assert int(58543 - 12332L) == 58543 - 12332 - x = 237123838281233L - assert x * 12 == x * 12L - - def test_mul(self): - x = 363L - assert x * 2 ** 40 == x << 40 - - def test_truediv(self): - exec "from __future__ import division; a = 31415926L / 10000000L" - assert a == 3.1415926 - - def test_floordiv(self): - x = 31415926L - a = x // 10000000L - assert a == 3L - - def test_int_floordiv(self): - import sys - - x = 3000L - a = x // 1000 - assert a == 3L - - x = 3000L - a = x // -1000 - assert a == -3L - - x = 3000L - raises(ZeroDivisionError, "x // 0") - - n = sys.maxint+1 - assert n / int(-n) == -1L - - def test_numerator_denominator(self): - assert (1L).numerator == 1L - assert (1L).denominator == 1L - assert (42L).numerator == 42L - assert (42L).denominator == 1L - - def test_compare(self): - Z = 0 - ZL = 0L - - assert Z == ZL - assert not (Z != ZL) - assert ZL == Z - assert not (ZL != Z) - assert Z <= ZL - assert not (Z < ZL) - assert ZL <= ZL - assert not (ZL < ZL) - - for BIG in (1L, 1L << 62, 1L << 9999): - assert not (Z == BIG) - assert Z != BIG - assert not (BIG == Z) - assert BIG != Z - assert not (ZL == BIG) - assert ZL != BIG - assert Z <= BIG - assert Z < BIG - assert not (BIG <= Z) - assert not (BIG < Z) - assert ZL <= BIG - assert ZL < BIG - assert not (BIG <= ZL) - assert not (BIG < ZL) - assert not (Z <= -BIG) - assert not (Z < -BIG) - assert -BIG <= Z - assert -BIG < Z - assert not (ZL <= -BIG) - assert not (ZL < -BIG) - assert -BIG <= ZL - assert -BIG < ZL - # - assert not (BIG < int(BIG)) - assert (BIG <= int(BIG)) - assert (BIG == int(BIG)) - assert not (BIG != int(BIG)) - assert not (BIG > int(BIG)) - assert (BIG >= int(BIG)) - # - assert (BIG < int(BIG)+1) - assert (BIG <= int(BIG)+1) - assert not (BIG == int(BIG)+1) - assert (BIG != int(BIG)+1) - assert not (BIG > int(BIG)+1) - assert not (BIG >= int(BIG)+1) - # - assert not (BIG < int(BIG)-1) - assert not (BIG <= int(BIG)-1) - assert not (BIG == int(BIG)-1) - assert (BIG != int(BIG)-1) - assert (BIG > int(BIG)-1) - assert (BIG >= int(BIG)-1) - # - assert not (int(BIG) < BIG) - assert (int(BIG) <= BIG) - assert (int(BIG) == BIG) - assert not (int(BIG) != BIG) - assert not (int(BIG) > BIG) - assert (int(BIG) >= BIG) - # - assert not (int(BIG)+1 < BIG) - assert not (int(BIG)+1 <= BIG) - assert not (int(BIG)+1 == BIG) - assert (int(BIG)+1 != BIG) - assert (int(BIG)+1 > BIG) - assert (int(BIG)+1 >= BIG) - # - assert (int(BIG)-1 < BIG) - assert (int(BIG)-1 <= BIG) - assert not (int(BIG)-1 == BIG) - assert (int(BIG)-1 != BIG) - assert not (int(BIG)-1 > BIG) - assert not (int(BIG)-1 >= BIG) - - def test_conversion(self): - class long2(long): - pass - x = 1L - x = long2(x<<100) - y = int(x) - assert type(y) == long - assert type(+long2(5)) is long - assert type(long2(5) << 0) is long - assert type(long2(5) >> 0) is long - assert type(long2(5) + 0) is long - assert type(long2(5) - 0) is long - assert type(long2(5) * 1) is long - assert type(1 * long2(5)) is long - assert type(0 + long2(5)) is long - assert type(-long2(0)) is long - assert type(long2(5) // 1) is long - - def test_shift(self): - assert 65l >> 2l == 16l - assert 65l >> 2 == 16l - assert 65 >> 2l == 16l - assert 65l << 2l == 65l * 4 - assert 65l << 2 == 65l * 4 - assert 65 << 2l == 65l * 4 - raises(ValueError, "1L << -1L") - raises(ValueError, "1L << -1") - raises(OverflowError, "1L << (2 ** 100)") - raises(ValueError, "1L >> -1L") - raises(ValueError, "1L >> -1") - raises(OverflowError, "1L >> (2 ** 100)") - - def test_pow(self): - x = 0L - assert pow(x, 0L, 1L) == 0L - assert pow(-1L, -1L) == -1.0 - assert pow(2 ** 68, 0.5) == 2.0 ** 34 - assert pow(2 ** 68, 2) == 2 ** 136 - raises(TypeError, pow, 2l, -1, 3) - raises(ValueError, pow, 2l, 5, 0) - - # some rpow tests - assert pow(0, 0L, 1L) == 0L - assert pow(-1, -1L) == -1.0 - - def test_int_pow(self): - x = 2L - assert pow(x, 2) == 4L - assert pow(x, 2, 2) == 0L - assert pow(x, 2, 3L) == 1L - - def test_getnewargs(self): - assert 0L .__getnewargs__() == (0L,) - assert (-1L) .__getnewargs__() == (-1L,) - - def test_divmod(self): - def check_division(x, y): - q, r = divmod(x, y) - pab, pba = x*y, y*x - assert pab == pba - assert q == x // y - assert r == x % y - assert x == q*y + r - if y > 0: - assert 0 <= r < y - else: - assert y < r <= 0 - for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: - for y in [-105566530L, -1L, 1L, 1034522340L]: - print "checking division for %s, %s" % (x, y) - check_division(x, y) - check_division(x, int(y)) - check_division(int(x), y) - # special case from python tests: - s1 = 33 - s2 = 2 - x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 - x >>= s1*16 - y = 10953035502453784575 - y >>= s2*16 - x = 0x3FE0003FFFFC0001FFFL - y = 0x9800FFC1L - check_division(x, y) - raises(ZeroDivisionError, "x // 0L") - raises(ZeroDivisionError, "x % 0L") - raises(ZeroDivisionError, divmod, x, 0L) - raises(ZeroDivisionError, "x // 0") - raises(ZeroDivisionError, "x % 0") - raises(ZeroDivisionError, divmod, x, 0) - - def test_int_divmod(self): - q, r = divmod(100L, 11) - assert q == 9L - assert r == 1L - - def test_format(self): - assert repr(12345678901234567890) == '12345678901234567890L' - assert str(12345678901234567890) == '12345678901234567890' - assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' - assert oct(01234567012345670L) == '01234567012345670L' - - def test_bits(self): - x = 0xAAAAAAAAL - assert x | 0x55555555L == 0xFFFFFFFFL - assert x & 0x55555555L == 0x00000000L - assert x ^ 0x55555555L == 0xFFFFFFFFL - assert -x | 0x55555555L == -0xAAAAAAA9L - assert x | 0x555555555L == 0x5FFFFFFFFL - assert x & 0x555555555L == 0x000000000L - assert x ^ 0x555555555L == 0x5FFFFFFFFL - - def test_hash(self): - # ints have the same hash as equal longs - for i in range(-4, 14): - assert hash(i) == hash(long(i)) == long(i).__hash__() - # might check too much -- it's ok to change the hashing algorithm - assert hash(123456789L) == 123456789 - assert hash(1234567890123456789L) in ( - -1895067127, # with 32-bit platforms - 1234567890123456789) # with 64-bit platforms - - def test_math_log(self): - import math - raises(ValueError, math.log, 0L) - raises(ValueError, math.log, -1L) - raises(ValueError, math.log, -2L) - raises(ValueError, math.log, -(1L << 10000)) - #raises(ValueError, math.log, 0) - raises(ValueError, math.log, -1) - raises(ValueError, math.log, -2) - - def test_long(self): - import sys - n = -sys.maxint-1 - assert long(n) == n - assert str(long(n)) == str(n) - a = buffer('123') - assert long(a) == 123L - - def test_huge_longs(self): - import operator - x = 1L - huge = x << 40000L - raises(OverflowError, float, huge) - raises(OverflowError, operator.truediv, huge, 3) - raises(OverflowError, operator.truediv, huge, 3L) - - def test_just_trunc(self): - class myint(object): - def __trunc__(self): - return 42 - assert long(myint()) == 42 - - def test_override___long__(self): - class mylong(long): - def __long__(self): - return 42L - assert long(mylong(21)) == 42L - class myotherlong(long): - pass - assert long(myotherlong(21)) == 21L - - def test___long__(self): - class A(object): - def __long__(self): - return 42 - assert long(A()) == 42L - class B(object): - def __int__(self): - return 42 - raises(TypeError, long, B()) - - class LongSubclass(long): - pass - class ReturnsLongSubclass(object): - def __long__(self): - return LongSubclass(42L) - n = long(ReturnsLongSubclass()) - assert n == 42 - assert type(n) is LongSubclass - - def test_trunc_returns(self): - # but!: (blame CPython 2.7) - class Integral(object): - def __int__(self): - return 42 - class TruncReturnsNonLong(object): - def __trunc__(self): - return Integral() - n = long(TruncReturnsNonLong()) - assert type(n) is long - assert n == 42 - - class LongSubclass(long): - pass - class TruncReturnsNonInt(object): - def __trunc__(self): - return LongSubclass(42) - n = long(TruncReturnsNonInt()) - assert n == 42 - assert type(n) is LongSubclass - - def test_long_before_string(self): - class A(str): - def __long__(self): - return 42 - assert long(A('abc')) == 42 - - def test_long_errors(self): - raises(TypeError, long, 12, 12) - raises(ValueError, long, 'xxxxxx?', 12) - - def test_conjugate(self): - assert (7L).conjugate() == 7L - assert (-7L).conjugate() == -7L - - class L(long): - pass - - assert type(L(7).conjugate()) is long - - class L(long): - def __pos__(self): - return 43 - assert L(7).conjugate() == 7L - - def test_bit_length(self): - assert 8L.bit_length() == 4 - assert (-1<<40).bit_length() == 41 - assert ((2**31)-1).bit_length() == 31 - - def test_negative_zero(self): - x = eval("-0L") - assert x == 0L - - def test_mix_int_and_long(self): - class IntLongMixClass(object): - def __int__(self): - return 42L - - def __long__(self): - return 64 - - mixIntAndLong = IntLongMixClass() - as_long = long(mixIntAndLong) - assert type(as_long) is long - assert as_long == 64 - - def test_long_real(self): - class A(long): pass - b = A(5).real - assert type(b) is long - - def test__int__(self): - class A(long): - def __int__(self): - return 42 - - assert int(long(3)) == long(3) - assert int(A(13)) == 42 - - def test_long_error_msg(self): - e = raises(TypeError, long, []) - assert str(e.value) == ( - "long() argument must be a string or a number, not 'list'") - - def test_coerce(self): - assert 3L.__coerce__(4L) == (3L, 4L) - assert 3L.__coerce__(4) == (3, 4) - assert 3L.__coerce__(object()) == NotImplemented - - def test_linear_long_base_16(self): - # never finishes if long(_, 16) is not linear-time - size = 100000 - n = "a" * size - expected = (2 << (size * 4)) // 3 - assert long(n, 16) == expected - From pypy.commits at gmail.com Thu Aug 8 11:47:34 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 08 Aug 2019 08:47:34 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Move app-level tests to apptest_longobject.py Message-ID: <5d4c4416.1c69fb81.12b6b.4e8a@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97108:d6fbbd74e9d4 Date: 2019-08-08 16:45 +0100 http://bitbucket.org/pypy/pypy/changeset/d6fbbd74e9d4/ Log: Move app-level tests to apptest_longobject.py diff --git a/pypy/objspace/std/test/apptest_longobject.py b/pypy/objspace/std/test/apptest_longobject.py new file mode 100644 --- /dev/null +++ b/pypy/objspace/std/test/apptest_longobject.py @@ -0,0 +1,472 @@ +from pytest import raises + +def _long(obj): + # XXX: currently returns a W_LongObject but might return + # W_IntObject in the future + huge = 1 << 65 + return obj + huge - huge + +def test_trunc(): + import math + assert math.trunc(_long(1)) == _long(1) + assert math.trunc(-_long(1)) == -_long(1) + +def test_add(): + x = _long(123) + assert int(x + _long(12443)) == 123 + 12443 + x = -20 + assert x + 2 + _long(3) + True == -_long(14) + +def test_sub(): + assert int(_long(58543) - _long(12332)) == 58543 - 12332 + assert int(_long(58543) - 12332) == 58543 - 12332 + assert int(58543 - _long(12332)) == 58543 - 12332 + x = _long(237123838281233) + assert x * 12 == x * _long(12) + +def test_mul(): + x = _long(363) + assert x * 2 ** 40 == x << 40 + +def test_truediv(): + a = _long(31415926) / _long(10000000) + assert a == 3.1415926 + +def test_floordiv(): + x = _long(31415926) + a = x // _long(10000000) + assert a == _long(3) + +def test_int_floordiv(): + import sys + long = _long + + x = long(3000) + a = x // 1000 + assert a == 3 + + x = long(3000) + a = x // -1000 + assert a == -3 + + x = long(3000) + raises(ZeroDivisionError, "x // 0") + + n = sys.maxsize + 1 + assert n / int(-n) == long(-1) + +def test_numerator_denominator(): + assert (_long(1)).numerator == _long(1) + assert (_long(1)).denominator == _long(1) + assert (_long(42)).numerator == _long(42) + assert (_long(42)).denominator == _long(1) + +def test_compare(): + Z = 0 + ZL = _long(0) + + assert Z == ZL + assert not (Z != ZL) + assert ZL == Z + assert not (ZL != Z) + assert Z <= ZL + assert not (Z < ZL) + assert ZL <= ZL + assert not (ZL < ZL) + + for BIG in (_long(1), _long(1) << 62, _long(1) << 9999): + assert not (Z == BIG) + assert Z != BIG + assert not (BIG == Z) + assert BIG != Z + assert not (ZL == BIG) + assert ZL != BIG + assert Z <= BIG + assert Z < BIG + assert not (BIG <= Z) + assert not (BIG < Z) + assert ZL <= BIG + assert ZL < BIG + assert not (BIG <= ZL) + assert not (BIG < ZL) + assert not (Z <= -BIG) + assert not (Z < -BIG) + assert -BIG <= Z + assert -BIG < Z + assert not (ZL <= -BIG) + assert not (ZL < -BIG) + assert -BIG <= ZL + assert -BIG < ZL + # + assert not (BIG < int(BIG)) + assert (BIG <= int(BIG)) + assert (BIG == int(BIG)) + assert not (BIG != int(BIG)) + assert not (BIG > int(BIG)) + assert (BIG >= int(BIG)) + # + assert (BIG < int(BIG)+1) + assert (BIG <= int(BIG)+1) + assert not (BIG == int(BIG)+1) + assert (BIG != int(BIG)+1) + assert not (BIG > int(BIG)+1) + assert not (BIG >= int(BIG)+1) + # + assert not (BIG < int(BIG)-1) + assert not (BIG <= int(BIG)-1) + assert not (BIG == int(BIG)-1) + assert (BIG != int(BIG)-1) + assert (BIG > int(BIG)-1) + assert (BIG >= int(BIG)-1) + # + assert not (int(BIG) < BIG) + assert (int(BIG) <= BIG) + assert (int(BIG) == BIG) + assert not (int(BIG) != BIG) + assert not (int(BIG) > BIG) + assert (int(BIG) >= BIG) + # + assert not (int(BIG)+1 < BIG) + assert not (int(BIG)+1 <= BIG) + assert not (int(BIG)+1 == BIG) + assert (int(BIG)+1 != BIG) + assert (int(BIG)+1 > BIG) + assert (int(BIG)+1 >= BIG) + # + assert (int(BIG)-1 < BIG) + assert (int(BIG)-1 <= BIG) + assert not (int(BIG)-1 == BIG) + assert (int(BIG)-1 != BIG) + assert not (int(BIG)-1 > BIG) + assert not (int(BIG)-1 >= BIG) + +def test_conversion(): + class long2(int): + pass + x = _long(1) + x = long2(x<<100) + y = int(x) + assert type(y) == int + assert type(+long2(5)) is int + assert type(long2(5) << 0) is int + assert type(long2(5) >> 0) is int + assert type(long2(5) + 0) is int + assert type(long2(5) - 0) is int + assert type(long2(5) * 1) is int + assert type(1 * long2(5)) is int + assert type(0 + long2(5)) is int + assert type(-long2(0)) is int + assert type(long2(5) // 1) is int + +def test_shift(): + long = _long + assert long(65) >> long(2) == long(16) + assert long(65) >> 2 == long(16) + assert 65 >> long(2) == long(16) + assert long(65) << long(2) == long(65) * 4 + assert long(65) << 2 == long(65) * 4 + assert 65 << long(2) == long(65) * 4 + raises(ValueError, "long(1) << long(-1)") + raises(ValueError, "long(1) << -1") + raises(OverflowError, "long(1) << (2 ** 100)") + raises(ValueError, "long(1) >> long(-1)") + raises(ValueError, "long(1) >> -1") + raises(OverflowError, "long(1) >> (2 ** 100)") + +def test_pow(): + long = _long + x = _long(0) + assert pow(x, _long(0), _long(1)) == _long(0) + assert pow(-_long(1), -_long(1)) == -1.0 + assert pow(2 ** 68, 0.5) == 2.0 ** 34 + assert pow(2 ** 68, 2) == 2 ** 136 + raises(ValueError, pow, long(2), -1, 3) + raises(ValueError, pow, long(2), 5, 0) + + # some rpow tests + assert pow(0, long(0), long(1)) == long(0) + assert pow(-1, long(-1)) == -1.0 + +def test_int_pow(): + long = _long + x = long(2) + assert pow(x, 2) == long(4) + assert pow(x, 2, 2) == long(0) + assert pow(x, 2, long(3)) == long(1) + +def test_getnewargs(): + assert _long(0) .__getnewargs__() == (_long(0),) + assert (-_long(1)) .__getnewargs__() == (-_long(1),) + +def test_divmod(): + long = _long + def check_division(x, y): + q, r = divmod(x, y) + pab, pba = x*y, y*x + assert pab == pba + assert q == x // y + assert r == x % y + assert x == q*y + r + if y > 0: + assert 0 <= r < y + else: + assert y < r <= 0 + for x in [-_long(1), _long(0), _long(1), _long(2) ** 100 - 1, -_long(2) ** 100 - 1]: + for y in [-_long(105566530), -_long(1), _long(1), _long(1034522340)]: + print("checking division for %s, %s" % (x, y)) + check_division(x, y) + check_division(x, int(y)) + check_division(int(x), y) + # special case from python tests: + s1 = 33 + s2 = 2 + x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 + x >>= s1*16 + y = 10953035502453784575 + y >>= s2*16 + x = 0x3FE0003FFFFC0001FFF + y = _long(0x9800FFC1) + check_division(x, y) + raises(ZeroDivisionError, "x // _long(0)") + divmod(3, _long(4)) + raises(ZeroDivisionError, "x % long(0)") + raises(ZeroDivisionError, divmod, x, long(0)) + raises(ZeroDivisionError, "x // 0") + raises(ZeroDivisionError, "x % 0") + raises(ZeroDivisionError, divmod, x, 0) + +def test_int_divmod(): + long = _long + q, r = divmod(long(100), 11) + assert q == 9 + assert r == 1 + +def test_format(): + assert repr(12345678901234567890) == '12345678901234567890' + assert str(12345678901234567890) == '12345678901234567890' + assert hex(_long(0x1234567890ABCDEF)) == '0x1234567890abcdef' + assert oct(_long(0o1234567012345670)) == '0o1234567012345670' + +def test_bits(): + x = _long(0xAAAAAAAA) + assert x | _long(0x55555555) == _long(0xFFFFFFFF) + assert x & _long(0x55555555) == _long(0x00000000) + assert x ^ _long(0x55555555) == _long(0xFFFFFFFF) + assert -x | _long(0x55555555) == -_long(0xAAAAAAA9) + assert x | _long(0x555555555) == _long(0x5FFFFFFFF) + assert x & _long(0x555555555) == _long(0x000000000) + assert x ^ _long(0x555555555) == _long(0x5FFFFFFFF) + +def test_hash(): + import sys + modulus = sys.hash_info.modulus + def longhash(x): + return hash(_long(x)) + for x in (list(range(200)) + + [1234567890123456789, 18446743523953737727, + 987685321987685321987685321987685321987685321, + 10**50]): + y = x % modulus + assert longhash(x) == longhash(y) + assert longhash(-x) == longhash(-y) + assert longhash(modulus - 1) == modulus - 1 + assert longhash(modulus) == 0 + assert longhash(modulus + 1) == 1 + + assert longhash(-1) == -2 + value = -(modulus + 1) + assert longhash(value) == -2 + assert longhash(value * 2 + 1) == -2 + assert longhash(value * 4 + 3) == -2 + +def test_hash_2(): + class AAA: + def __hash__(a): + return _long(-1) + assert hash(AAA()) == -2 + +def test_math_log(): + import math + raises(ValueError, math.log, _long(0)) + raises(ValueError, math.log, -_long(1)) + raises(ValueError, math.log, -_long(2)) + raises(ValueError, math.log, -(_long(1) << 10000)) + #raises(ValueError, math.log, 0) + raises(ValueError, math.log, -1) + raises(ValueError, math.log, -2) + +def test_long(): + import sys + n = -sys.maxsize-1 + assert int(n) == n + assert str(int(n)) == str(n) + a = memoryview(b'123') + assert int(a) == _long(123) + +def test_huge_longs(): + import operator + x = _long(1) + huge = x << _long(40000) + raises(OverflowError, float, huge) + raises(OverflowError, operator.truediv, huge, 3) + raises(OverflowError, operator.truediv, huge, _long(3)) + +def test_just_trunc(): + class myint(object): + def __trunc__(self): + return 42 + assert int(myint()) == 42 + +def test_override___int__(): + class myint(int): + def __int__(self): + return 42 + assert int(myint(21)) == 42 + class myotherint(int): + pass + assert int(myotherint(21)) == 21 + +def test___int__(): + class A(object): + def __int__(self): + return 42 + assert int(A()) == 42 + + class IntSubclass(int): + pass + class ReturnsIntSubclass(object): + def __int__(self): + return IntSubclass(42) + n = int(ReturnsIntSubclass()) + assert n == 42 + # cpython 3.6 fixed behaviour to actually return type int here + assert type(n) is int + +def test_trunc_returns(): + # but!: (blame CPython 2.7) + class Integral(object): + def __int__(self): + return 42 + class TruncReturnsNonInt(object): + def __trunc__(self): + return Integral() + n = int(TruncReturnsNonInt()) + assert type(n) is int + assert n == 42 + + class IntSubclass(int): + pass + class TruncReturnsNonInt(object): + def __trunc__(self): + return IntSubclass(42) + n = int(TruncReturnsNonInt()) + assert n == 42 + assert type(n) is int + +def test_long_before_string(): + class A(str): + def __int__(self): + return 42 + assert int(A('abc')) == 42 + +def test_conjugate(): + assert (_long(7)).conjugate() == _long(7) + assert (-_long(7)).conjugate() == -_long(7) + + class L(int): + pass + + assert type(L(7).conjugate()) is int + + class L(int): + def __pos__(self): + return 43 + assert L(7).conjugate() == _long(7) + +def test_bit_length(): + assert _long(8).bit_length() == 4 + assert (-1<<40).bit_length() == 41 + assert ((2**31)-1).bit_length() == 31 + +def test_from_bytes(): + assert int.from_bytes(b'c', 'little') == 99 + assert int.from_bytes(b'\x01\x01', 'little') == 257 + assert int.from_bytes(b'\x01\x00', 'big') == 256 + assert int.from_bytes(b'\x00\x80', 'little', signed=True) == -32768 + assert int.from_bytes([255, 0, 0], 'big', signed=True) == -65536 + raises(TypeError, int.from_bytes, 0, 'big') + raises(TypeError, int.from_bytes, '', 'big') + raises(ValueError, int.from_bytes, b'c', 'foo') + +def test_to_bytes(): + assert 65535 .to_bytes(2, 'big') == b'\xff\xff' + assert (-8388608).to_bytes(3, 'little', signed=True) == b'\x00\x00\x80' + raises(OverflowError, (-5).to_bytes, 1, 'big') + raises(ValueError, (-5).to_bytes, 1, 'foo') + assert 65535 .to_bytes(length=2, byteorder='big') == b'\xff\xff' + +def test_negative_zero(): + x = eval("-_long(0)") + assert x == _long(0) + +def test_long_real(): + class A(int): pass + b = A(5).real + assert type(b) is int + +#@py.test.mark.skipif("not config.option.runappdirect and sys.maxunicode == 0xffff") +def test_long_from_unicode(): + raises(ValueError, int, '123L') + assert int('L', 22) == 21 + s = '\U0001D7CF\U0001D7CE' # 𝟏𝟎 + assert int(s) == 10 + +def test_long_from_bytes(): + assert int(b'1234') == 1234 + +def test_invalid_literal_message(): + try: + int('hello àèìò') + except ValueError as e: + assert 'hello àèìò' in str(e) + else: + assert False, 'did not raise' + +def test_base_overflow(): + raises(ValueError, int, '42', 2**63) + +def test_long_real(): + class A(int): pass + b = A(5).real + assert type(b) is int + +def test__int__(): + class A(int): + def __int__(self): + return 42 + + assert int(int(3)) == int(3) + assert int(A(13)) == 42 + +def test_long_error_msg(): + e = raises(TypeError, int, []) + assert str(e.value) == ( + "int() argument must be a string, a bytes-like object " + "or a number, not 'list'") + +def test_linear_long_base_16(): + # never finishes if int(_, 16) is not linear-time + size = 100000 + n = "a" * size + expected = (2 << (size * 4)) // 3 + assert int(n, 16) == expected + +def test_large_identity(): + import sys + if '__pypy__' not in sys.builtin_module_names: + skip('PyPy only') + a = sys.maxsize + 1 + b = sys.maxsize + 2 + assert a is not b + b -= 1 + assert a is b diff --git a/pypy/objspace/std/test/test_longobject.py b/pypy/objspace/std/test/test_longobject.py --- a/pypy/objspace/std/test/test_longobject.py +++ b/pypy/objspace/std/test/test_longobject.py @@ -1,5 +1,4 @@ # -*- encoding: utf-8 -*- -import py from pypy.objspace.std import longobject as lobj from rpython.rlib.rbigint import rbigint @@ -37,477 +36,3 @@ x &= r.MASK w_obj = space.newlong_from_rarith_int(r(x)) assert space.bigint_w(w_obj).eq(rbigint.fromlong(x)) - - -class AppTestLong: - - def w__long(self, obj): - # XXX: currently returns a W_LongObject but might return - # W_IntObject in the future - huge = 1 << 65 - return obj + huge - huge - - def test_trunc(self): - import math - assert math.trunc(self._long(1)) == self._long(1) - assert math.trunc(-self._long(1)) == -self._long(1) - - def test_add(self): - x = self._long(123) - assert int(x + self._long(12443)) == 123 + 12443 - x = -20 - assert x + 2 + self._long(3) + True == -self._long(14) - - def test_sub(self): - assert int(self._long(58543) - self._long(12332)) == 58543 - 12332 - assert int(self._long(58543) - 12332) == 58543 - 12332 - assert int(58543 - self._long(12332)) == 58543 - 12332 - x = self._long(237123838281233) - assert x * 12 == x * self._long(12) - - def test_mul(self): - x = self._long(363) - assert x * 2 ** 40 == x << 40 - - def test_truediv(self): - a = self._long(31415926) / self._long(10000000) - assert a == 3.1415926 - - def test_floordiv(self): - x = self._long(31415926) - a = x // self._long(10000000) - assert a == self._long(3) - - def test_int_floordiv(self): - import sys - long = self._long - - x = long(3000) - a = x // 1000 - assert a == 3 - - x = long(3000) - a = x // -1000 - assert a == -3 - - x = long(3000) - raises(ZeroDivisionError, "x // 0") - - n = sys.maxsize + 1 - assert n / int(-n) == long(-1) - - def test_numerator_denominator(self): - assert (self._long(1)).numerator == self._long(1) - assert (self._long(1)).denominator == self._long(1) - assert (self._long(42)).numerator == self._long(42) - assert (self._long(42)).denominator == self._long(1) - - def test_compare(self): - Z = 0 - ZL = self._long(0) - - assert Z == ZL - assert not (Z != ZL) - assert ZL == Z - assert not (ZL != Z) - assert Z <= ZL - assert not (Z < ZL) - assert ZL <= ZL - assert not (ZL < ZL) - - for BIG in (self._long(1), self._long(1) << 62, self._long(1) << 9999): - assert not (Z == BIG) - assert Z != BIG - assert not (BIG == Z) - assert BIG != Z - assert not (ZL == BIG) - assert ZL != BIG - assert Z <= BIG - assert Z < BIG - assert not (BIG <= Z) - assert not (BIG < Z) - assert ZL <= BIG - assert ZL < BIG - assert not (BIG <= ZL) - assert not (BIG < ZL) - assert not (Z <= -BIG) - assert not (Z < -BIG) - assert -BIG <= Z - assert -BIG < Z - assert not (ZL <= -BIG) - assert not (ZL < -BIG) - assert -BIG <= ZL - assert -BIG < ZL - # - assert not (BIG < int(BIG)) - assert (BIG <= int(BIG)) - assert (BIG == int(BIG)) - assert not (BIG != int(BIG)) - assert not (BIG > int(BIG)) - assert (BIG >= int(BIG)) - # - assert (BIG < int(BIG)+1) - assert (BIG <= int(BIG)+1) - assert not (BIG == int(BIG)+1) - assert (BIG != int(BIG)+1) - assert not (BIG > int(BIG)+1) - assert not (BIG >= int(BIG)+1) - # - assert not (BIG < int(BIG)-1) - assert not (BIG <= int(BIG)-1) - assert not (BIG == int(BIG)-1) - assert (BIG != int(BIG)-1) - assert (BIG > int(BIG)-1) - assert (BIG >= int(BIG)-1) - # - assert not (int(BIG) < BIG) - assert (int(BIG) <= BIG) - assert (int(BIG) == BIG) - assert not (int(BIG) != BIG) - assert not (int(BIG) > BIG) - assert (int(BIG) >= BIG) - # - assert not (int(BIG)+1 < BIG) - assert not (int(BIG)+1 <= BIG) - assert not (int(BIG)+1 == BIG) - assert (int(BIG)+1 != BIG) - assert (int(BIG)+1 > BIG) - assert (int(BIG)+1 >= BIG) - # - assert (int(BIG)-1 < BIG) - assert (int(BIG)-1 <= BIG) - assert not (int(BIG)-1 == BIG) - assert (int(BIG)-1 != BIG) - assert not (int(BIG)-1 > BIG) - assert not (int(BIG)-1 >= BIG) - - def test_conversion(self): - class long2(int): - pass - x = self._long(1) - x = long2(x<<100) - y = int(x) - assert type(y) == int - assert type(+long2(5)) is int - assert type(long2(5) << 0) is int - assert type(long2(5) >> 0) is int - assert type(long2(5) + 0) is int - assert type(long2(5) - 0) is int - assert type(long2(5) * 1) is int - assert type(1 * long2(5)) is int - assert type(0 + long2(5)) is int - assert type(-long2(0)) is int - assert type(long2(5) // 1) is int - - def test_shift(self): - long = self._long - assert long(65) >> long(2) == long(16) - assert long(65) >> 2 == long(16) - assert 65 >> long(2) == long(16) - assert long(65) << long(2) == long(65) * 4 - assert long(65) << 2 == long(65) * 4 - assert 65 << long(2) == long(65) * 4 - raises(ValueError, "long(1) << long(-1)") - raises(ValueError, "long(1) << -1") - raises(OverflowError, "long(1) << (2 ** 100)") - raises(ValueError, "long(1) >> long(-1)") - raises(ValueError, "long(1) >> -1") - raises(OverflowError, "long(1) >> (2 ** 100)") - - def test_pow(self): - long = self._long - x = self._long(0) - assert pow(x, self._long(0), self._long(1)) == self._long(0) - assert pow(-self._long(1), -self._long(1)) == -1.0 - assert pow(2 ** 68, 0.5) == 2.0 ** 34 - assert pow(2 ** 68, 2) == 2 ** 136 - raises(ValueError, pow, long(2), -1, 3) - raises(ValueError, pow, long(2), 5, 0) - - # some rpow tests - assert pow(0, long(0), long(1)) == long(0) - assert pow(-1, long(-1)) == -1.0 - - def test_int_pow(self): - long = self._long - x = long(2) - assert pow(x, 2) == long(4) - assert pow(x, 2, 2) == long(0) - assert pow(x, 2, long(3)) == long(1) - - def test_getnewargs(self): - assert self._long(0) .__getnewargs__() == (self._long(0),) - assert (-self._long(1)) .__getnewargs__() == (-self._long(1),) - - def test_divmod(self): - long = self._long - def check_division(x, y): - q, r = divmod(x, y) - pab, pba = x*y, y*x - assert pab == pba - assert q == x // y - assert r == x % y - assert x == q*y + r - if y > 0: - assert 0 <= r < y - else: - assert y < r <= 0 - for x in [-self._long(1), self._long(0), self._long(1), self._long(2) ** 100 - 1, -self._long(2) ** 100 - 1]: - for y in [-self._long(105566530), -self._long(1), self._long(1), self._long(1034522340)]: - print("checking division for %s, %s" % (x, y)) - check_division(x, y) - check_division(x, int(y)) - check_division(int(x), y) - # special case from python tests: - s1 = 33 - s2 = 2 - x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 - x >>= s1*16 - y = 10953035502453784575 - y >>= s2*16 - x = 0x3FE0003FFFFC0001FFF - y = self._long(0x9800FFC1) - check_division(x, y) - raises(ZeroDivisionError, "x // self._long(0)") - divmod(3, self._long(4)) - raises(ZeroDivisionError, "x % long(0)") - raises(ZeroDivisionError, divmod, x, long(0)) - raises(ZeroDivisionError, "x // 0") - raises(ZeroDivisionError, "x % 0") - raises(ZeroDivisionError, divmod, x, 0) - - def test_int_divmod(self): - long = self._long - q, r = divmod(long(100), 11) - assert q == 9 - assert r == 1 - - def test_format(self): - assert repr(12345678901234567890) == '12345678901234567890' - assert str(12345678901234567890) == '12345678901234567890' - assert hex(self._long(0x1234567890ABCDEF)) == '0x1234567890abcdef' - assert oct(self._long(0o1234567012345670)) == '0o1234567012345670' - - def test_bits(self): - x = self._long(0xAAAAAAAA) - assert x | self._long(0x55555555) == self._long(0xFFFFFFFF) - assert x & self._long(0x55555555) == self._long(0x00000000) - assert x ^ self._long(0x55555555) == self._long(0xFFFFFFFF) - assert -x | self._long(0x55555555) == -self._long(0xAAAAAAA9) - assert x | self._long(0x555555555) == self._long(0x5FFFFFFFF) - assert x & self._long(0x555555555) == self._long(0x000000000) - assert x ^ self._long(0x555555555) == self._long(0x5FFFFFFFF) - - def test_hash(self): - import sys - modulus = sys.hash_info.modulus - def longhash(x): - return hash(self._long(x)) - for x in (list(range(200)) + - [1234567890123456789, 18446743523953737727, - 987685321987685321987685321987685321987685321, - 10**50]): - y = x % modulus - assert longhash(x) == longhash(y) - assert longhash(-x) == longhash(-y) - assert longhash(modulus - 1) == modulus - 1 - assert longhash(modulus) == 0 - assert longhash(modulus + 1) == 1 - - assert longhash(-1) == -2 - value = -(modulus + 1) - assert longhash(value) == -2 - assert longhash(value * 2 + 1) == -2 - assert longhash(value * 4 + 3) == -2 - - def test_hash_2(self): - class AAA: - def __hash__(a): - return self._long(-1) - assert hash(AAA()) == -2 - - def test_math_log(self): - import math - raises(ValueError, math.log, self._long(0)) - raises(ValueError, math.log, -self._long(1)) - raises(ValueError, math.log, -self._long(2)) - raises(ValueError, math.log, -(self._long(1) << 10000)) - #raises(ValueError, math.log, 0) - raises(ValueError, math.log, -1) - raises(ValueError, math.log, -2) - - def test_long(self): - import sys - n = -sys.maxsize-1 - assert int(n) == n - assert str(int(n)) == str(n) - a = memoryview(b'123') - assert int(a) == self._long(123) - - def test_huge_longs(self): - import operator - x = self._long(1) - huge = x << self._long(40000) - raises(OverflowError, float, huge) - raises(OverflowError, operator.truediv, huge, 3) - raises(OverflowError, operator.truediv, huge, self._long(3)) - - def test_just_trunc(self): - class myint(object): - def __trunc__(self): - return 42 - assert int(myint()) == 42 - - def test_override___int__(self): - class myint(int): - def __int__(self): - return 42 - assert int(myint(21)) == 42 - class myotherint(int): - pass - assert int(myotherint(21)) == 21 - - def test___int__(self): - class A(object): - def __int__(self): - return 42 - assert int(A()) == 42 - - class IntSubclass(int): - pass - class ReturnsIntSubclass(object): - def __int__(self): - return IntSubclass(42) - n = int(ReturnsIntSubclass()) - assert n == 42 - # cpython 3.6 fixed behaviour to actually return type int here - assert type(n) is int - - def test_trunc_returns(self): - # but!: (blame CPython 2.7) - class Integral(object): - def __int__(self): - return 42 - class TruncReturnsNonInt(object): - def __trunc__(self): - return Integral() - n = int(TruncReturnsNonInt()) - assert type(n) is int - assert n == 42 - - class IntSubclass(int): - pass - class TruncReturnsNonInt(object): - def __trunc__(self): - return IntSubclass(42) - n = int(TruncReturnsNonInt()) - assert n == 42 - assert type(n) is int - - def test_long_before_string(self): - class A(str): - def __int__(self): - return 42 - assert int(A('abc')) == 42 - - def test_conjugate(self): - assert (self._long(7)).conjugate() == self._long(7) - assert (-self._long(7)).conjugate() == -self._long(7) - - class L(int): - pass - - assert type(L(7).conjugate()) is int - - class L(int): - def __pos__(self): - return 43 - assert L(7).conjugate() == self._long(7) - - def test_bit_length(self): - assert self._long(8).bit_length() == 4 - assert (-1<<40).bit_length() == 41 - assert ((2**31)-1).bit_length() == 31 - - def test_from_bytes(self): - assert int.from_bytes(b'c', 'little') == 99 - assert int.from_bytes(b'\x01\x01', 'little') == 257 - assert int.from_bytes(b'\x01\x00', 'big') == 256 - assert int.from_bytes(b'\x00\x80', 'little', signed=True) == -32768 - assert int.from_bytes([255, 0, 0], 'big', signed=True) == -65536 - raises(TypeError, int.from_bytes, 0, 'big') - raises(TypeError, int.from_bytes, '', 'big') - raises(ValueError, int.from_bytes, b'c', 'foo') - - def test_to_bytes(self): - assert 65535 .to_bytes(2, 'big') == b'\xff\xff' - assert (-8388608).to_bytes(3, 'little', signed=True) == b'\x00\x00\x80' - raises(OverflowError, (-5).to_bytes, 1, 'big') - raises(ValueError, (-5).to_bytes, 1, 'foo') - assert 65535 .to_bytes(length=2, byteorder='big') == b'\xff\xff' - - def test_negative_zero(self): - x = eval("-self._long(0)") - assert x == self._long(0) - - def test_long_real(self): - class A(int): pass - b = A(5).real - assert type(b) is int - - @py.test.mark.skipif("not config.option.runappdirect and sys.maxunicode == 0xffff") - def test_long_from_unicode(self): - raises(ValueError, int, '123L') - assert int('L', 22) == 21 - s = '\U0001D7CF\U0001D7CE' # 𝟏𝟎 - assert int(s) == 10 - - def test_long_from_bytes(self): - assert int(b'1234') == 1234 - - def test_invalid_literal_message(self): - try: - int('hello àèìò') - except ValueError as e: - assert 'hello àèìò' in str(e) - else: - assert False, 'did not raise' - - def test_base_overflow(self): - raises(ValueError, int, '42', 2**63) - - def test_long_real(self): - class A(int): pass - b = A(5).real - assert type(b) is int - - def test__int__(self): - class A(int): - def __int__(self): - return 42 - - assert int(int(3)) == int(3) - assert int(A(13)) == 42 - - def test_long_error_msg(self): - e = raises(TypeError, int, []) - assert str(e.value) == ( - "int() argument must be a string, a bytes-like object " - "or a number, not 'list'") - - def test_linear_long_base_16(self): - # never finishes if int(_, 16) is not linear-time - size = 100000 - n = "a" * size - expected = (2 << (size * 4)) // 3 - assert int(n, 16) == expected - - def test_large_identity(self): - import sys - if '__pypy__' not in sys.builtin_module_names: - skip('PyPy only') - a = sys.maxsize + 1 - b = sys.maxsize + 2 - assert a is not b - b -= 1 - assert a is b From pypy.commits at gmail.com Thu Aug 8 13:03:16 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 08 Aug 2019 10:03:16 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: test, fix raising SystemExit in atexit Message-ID: <5d4c55d4.1c69fb81.d9e5.3c3c@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97109:c85847cc3955 Date: 2019-08-08 20:02 +0300 http://bitbucket.org/pypy/pypy/changeset/c85847cc3955/ Log: test, fix raising SystemExit in atexit diff --git a/pypy/module/atexit/app_atexit.py b/pypy/module/atexit/app_atexit.py --- a/pypy/module/atexit/app_atexit.py +++ b/pypy/module/atexit/app_atexit.py @@ -26,13 +26,13 @@ try: func(*args, **kwargs) except BaseException as e: + last_exc = e + last_tb = e.__traceback__ if not isinstance(e, SystemExit): import traceback # obscure: we can't use sys.exc_info() here because this # function is an appleveldef which marks its frame as # hidden - last_exc = e - last_tb = e.__traceback__ traceback.print_exception(type(last_exc), last_exc, last_tb) clear() diff --git a/pypy/module/atexit/test/test_atexit.py b/pypy/module/atexit/test/test_atexit.py --- a/pypy/module/atexit/test/test_atexit.py +++ b/pypy/module/atexit/test/test_atexit.py @@ -25,3 +25,10 @@ import atexit atexit.register(lambda: 1, 0, 0, (x for x in (1,2)), 0, 0) raises(TypeError, atexit._run_exitfuncs) + + def test_systemexit(self): + import atexit + def f(): + raise SystemExit() + atexit.register(f) + raises(SystemExit, atexit._run_exitfuncs) From pypy.commits at gmail.com Thu Aug 8 15:33:55 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 08 Aug 2019 12:33:55 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: try to debug persistent buildbot failure that does not replicate locally Message-ID: <5d4c7923.1c69fb81.a4fe8.efb4@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97110:29d097325970 Date: 2019-08-08 22:33 +0300 http://bitbucket.org/pypy/pypy/changeset/29d097325970/ Log: try to debug persistent buildbot failure that does not replicate locally diff --git a/pypy/module/__pypy__/test/test_signal.py b/pypy/module/__pypy__/test/test_signal.py --- a/pypy/module/__pypy__/test/test_signal.py +++ b/pypy/module/__pypy__/test/test_signal.py @@ -28,17 +28,17 @@ import __pypy__, _thread, signal, time, sys def subthread(): - print('subthread started') + sys.stderr.write('subthread started') try: with __pypy__.thread.signals_enabled: _thread.interrupt_main() for i in range(10): - print('x') + sys.stderr.write('x') time.sleep(0.25) except BaseException as e: interrupted.append(e) finally: - print('subthread stops, interrupted=%r' % (interrupted,)) + sys.stderr.write('subthread stops, interrupted=%r' % (interrupted,)) done.append(None) # This is normally called by app_main.py @@ -47,20 +47,20 @@ if sys.platform.startswith('win'): # Windows seems to hang on _setmode when the first print comes from # a thread, so make sure we've initialized io - sys.stdout + sys.stderr for i in range(10): __pypy__.thread._signals_exit() try: done = [] interrupted = [] - print('--- start ---') + sys.stderr.write('--- start ---') _thread.start_new_thread(subthread, ()) for j in range(30): if len(done): break - print('.') + sys.stderr.write('.') time.sleep(0.25) - print('main thread loop done') + sys.stderr.write('main thread loop done') assert len(done) == 1 assert len(interrupted) == 1 assert 'KeyboardInterrupt' in interrupted[0].__class__.__name__ From pypy.commits at gmail.com Fri Aug 9 04:58:36 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 01:58:36 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Add a couple of modules to the sandbox Message-ID: <5d4d35bc.1c69fb81.72a69.cc31@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97112:3d0f6980ad49 Date: 2019-08-09 10:57 +0200 http://bitbucket.org/pypy/pypy/changeset/3d0f6980ad49/ Log: Add a couple of modules to the sandbox diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -40,6 +40,12 @@ #" _ssl", "_hashlib", "crypt" ]) +# --sandbox +sandbox_modules = default_modules.copy() +sandbox_modules.update([ + "struct", "cStringIO", "itertools", "array", "binascii", +]) + import rpython.rlib.rvmprof.cintf if rpython.rlib.rvmprof.cintf.IS_SUPPORTED: working_modules.add('_vmprof') @@ -261,7 +267,7 @@ def enable_allworkingmodules(config): modules = working_modules.copy() if config.translation.sandbox: - modules = default_modules + modules = sandbox_modules.copy() if config.translation.reverse_debugger: for mod in reverse_debugger_disable_modules: setattr(config.objspace.usemodules, mod, False) From pypy.commits at gmail.com Fri Aug 9 06:44:41 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 03:44:41 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Mark a few functions as sandboxsafe=True, because I don't think Message-ID: <5d4d4e99.1c69fb81.d9e5.d932@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97113:72768e4f4ec6 Date: 2019-08-09 12:44 +0200 http://bitbucket.org/pypy/pypy/changeset/72768e4f4ec6/ Log: Mark a few functions as sandboxsafe=True, because I don't think they can cause any problem diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -112,7 +112,6 @@ ) CLOCKS_PER_SEC = platform.ConstantInteger("CLOCKS_PER_SEC") clock_t = platform.SimpleType("clock_t", rffi.ULONG) - has_gettimeofday = platform.Has('gettimeofday') if _POSIX: calling_conv = 'c' @@ -167,19 +166,17 @@ tm = cConfig.tm glob_buf = lltype.malloc(tm, flavor='raw', zero=True, immortal=True) -if cConfig.has_gettimeofday: - c_gettimeofday = external('gettimeofday', [rffi.VOIDP, rffi.VOIDP], rffi.INT) TM_P = lltype.Ptr(tm) c_time = external('time', [rffi.TIME_TP], rffi.TIME_T) -c_ctime = external('ctime', [rffi.TIME_TP], rffi.CCHARP) +c_ctime = external('ctime', [rffi.TIME_TP], rffi.CCHARP, sandboxsafe=True) c_gmtime = external('gmtime', [rffi.TIME_TP], TM_P, - save_err=rffi.RFFI_SAVE_ERRNO) -c_mktime = external('mktime', [TM_P], rffi.TIME_T) -c_asctime = external('asctime', [TM_P], rffi.CCHARP) + save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe=True) +c_mktime = external('mktime', [TM_P], rffi.TIME_T, sandboxsafe=True) +c_asctime = external('asctime', [TM_P], rffi.CCHARP, sandboxsafe=True) c_localtime = external('localtime', [rffi.TIME_TP], TM_P, - save_err=rffi.RFFI_SAVE_ERRNO) + save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe=True) if _POSIX: - c_tzset = external('tzset', [], lltype.Void) + c_tzset = external('tzset', [], lltype.Void, sandboxsafe=True) if _WIN: win_eci = ExternalCompilationInfo( includes = ["time.h"], @@ -218,7 +215,7 @@ rffi.INT, win_eci, calling_conv='c') c_strftime = external('strftime', [rffi.CCHARP, rffi.SIZE_T, rffi.CCHARP, TM_P], - rffi.SIZE_T) + rffi.SIZE_T, sandboxsafe=True) def _init_accept2dyear(space): if os.environ.get("PYTHONY2K"): diff --git a/rpython/rlib/entrypoint.py b/rpython/rlib/entrypoint.py --- a/rpython/rlib/entrypoint.py +++ b/rpython/rlib/entrypoint.py @@ -41,7 +41,9 @@ return deco -pypy_debug_catch_fatal_exception = rffi.llexternal('pypy_debug_catch_fatal_exception', [], lltype.Void) +pypy_debug_catch_fatal_exception = rffi.llexternal( + 'pypy_debug_catch_fatal_exception', [], lltype.Void, + sandboxsafe=True) def entrypoint_highlevel(key, argtypes, c_name=None): """ diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -1079,7 +1079,7 @@ # for more details. If this get's fixed we can use lltype.Signed # again. (The exact same issue occurs on ppc64 big-endian.) c_func = external(name, [rffi.INT], lltype.Signed, - macro=_MACRO_ON_POSIX) + macro=_MACRO_ON_POSIX, sandboxsafe=True) returning_int = name in ('WEXITSTATUS', 'WSTOPSIG', 'WTERMSIG') @replace_os_function(name) @@ -1990,9 +1990,12 @@ if sys.platform != 'win32': # These are actually macros on some/most systems - c_makedev = external('makedev', [rffi.INT, rffi.INT], rffi.INT, macro=True) - c_major = external('major', [rffi.INT], rffi.INT, macro=True) - c_minor = external('minor', [rffi.INT], rffi.INT, macro=True) + c_makedev = external('makedev', [rffi.INT, rffi.INT], rffi.INT, macro=True, + sandboxsafe=True) + c_major = external('major', [rffi.INT], rffi.INT, macro=True, + sandboxsafe=True) + c_minor = external('minor', [rffi.INT], rffi.INT, macro=True, + sandboxsafe=True) @replace_os_function('makedev') def makedev(maj, min): From pypy.commits at gmail.com Fri Aug 9 08:51:59 2019 From: pypy.commits at gmail.com (mattip) Date: Fri, 09 Aug 2019 05:51:59 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: revert 29d097325970, skip this test when untranslated Message-ID: <5d4d6c6f.1c69fb81.2c41c.2c5c@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97114:f4c733706018 Date: 2019-08-09 15:51 +0300 http://bitbucket.org/pypy/pypy/changeset/f4c733706018/ Log: revert 29d097325970, skip this test when untranslated diff --git a/pypy/module/__pypy__/test/test_signal.py b/pypy/module/__pypy__/test/test_signal.py --- a/pypy/module/__pypy__/test/test_signal.py +++ b/pypy/module/__pypy__/test/test_signal.py @@ -16,6 +16,9 @@ class AppTestThreadSignal(GenericTestThread): spaceconfig = dict(usemodules=['__pypy__', 'thread', 'signal', 'time']) + def setup_class(cls): + cls.w_runappdirect = cls.space.wrap(cls.runappdirect) + def test_exit_twice(self): import __pypy__, _thread __pypy__.thread._signals_exit() @@ -25,20 +28,23 @@ __pypy__.thread._signals_enter() def test_enable_signals(self): + if not self.runappdirect: + skip("test is flaky when untranslated on bencher4 and aarch64") + import __pypy__, _thread, signal, time, sys def subthread(): - sys.stderr.write('subthread started') + print('subthread started') try: with __pypy__.thread.signals_enabled: _thread.interrupt_main() for i in range(10): - sys.stderr.write('x') + print('x') time.sleep(0.25) except BaseException as e: interrupted.append(e) finally: - sys.stderr.write('subthread stops, interrupted=%r' % (interrupted,)) + print('subthread stops, interrupted=%r' % (interrupted,)) done.append(None) # This is normally called by app_main.py @@ -54,13 +60,13 @@ try: done = [] interrupted = [] - sys.stderr.write('--- start ---') + print('--- start ---') _thread.start_new_thread(subthread, ()) for j in range(30): if len(done): break - sys.stderr.write('.') + print('.') time.sleep(0.25) - sys.stderr.write('main thread loop done') + print('main thread loop done') assert len(done) == 1 assert len(interrupted) == 1 assert 'KeyboardInterrupt' in interrupted[0].__class__.__name__ From pypy.commits at gmail.com Fri Aug 9 09:21:27 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 06:21:27 -0700 (PDT) Subject: [pypy-commit] pypy default: Correctly wrap the I/O errors we can get when importing modules Message-ID: <5d4d7357.1c69fb81.a6f5e.6945@mx.google.com> Author: Armin Rigo Branch: Changeset: r97115:7907cb768f09 Date: 2019-08-09 15:09 +0200 http://bitbucket.org/pypy/pypy/changeset/7907cb768f09/ Log: Correctly wrap the I/O errors we can get when importing modules (hopefully) diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -11,6 +11,7 @@ from pypy.interpreter.baseobjspace import W_Root, CannotHaveLock from pypy.interpreter.eval import Code from pypy.interpreter.pycode import PyCode +from pypy.interpreter.streamutil import wrap_streamerror from rpython.rlib import streamio, jit from rpython.rlib.streamio import StreamErrors from rpython.rlib.objectmodel import we_are_translated, specialize @@ -659,13 +660,14 @@ if find_info.modtype == PY_SOURCE: return load_source_module( space, w_modulename, w_mod, - find_info.filename, find_info.stream.readall(), + find_info.filename, _wrap_readall(space, find_info.stream), find_info.stream.try_to_find_file_descriptor()) elif find_info.modtype == PY_COMPILED: - magic = _r_long(find_info.stream) - timestamp = _r_long(find_info.stream) + magic = _wrap_r_long(space, find_info.stream) + timestamp = _wrap_r_long(space, find_info.stream) return load_compiled_module(space, w_modulename, w_mod, find_info.filename, - magic, timestamp, find_info.stream.readall()) + magic, timestamp, + _wrap_readall(space, find_info.stream)) elif find_info.modtype == PKG_DIRECTORY: w_path = space.newlist([space.newtext(find_info.filename)]) space.setattr(w_mod, space.newtext('__path__'), w_path) @@ -677,10 +679,7 @@ w_mod = load_module(space, w_modulename, find_info, reuse=True) finally: - try: - find_info.stream.close() - except StreamErrors: - pass + _close_ignore(find_info.stream) return w_mod elif find_info.modtype == C_EXTENSION and has_so_extension(space): return load_c_extension(space, find_info.filename, @@ -712,10 +711,7 @@ if find_info: stream = find_info.stream if stream: - try: - stream.close() - except StreamErrors: - pass + _close_ignore(stream) if tentative: return None @@ -770,7 +766,7 @@ return load_module(space, w_modulename, find_info, reuse=True) finally: if find_info.stream: - find_info.stream.close() + _wrap_close(space, find_info.stream) except: # load_module probably removed name from modules because of # the error. Put back the original module object. @@ -936,12 +932,10 @@ if stream: # existing and up-to-date .pyc file try: - code_w = read_compiled_module(space, cpathname, stream.readall()) + code_w = read_compiled_module(space, cpathname, + _wrap_readall(space, stream)) finally: - try: - stream.close() - except StreamErrors: - pass + _close_ignore(stream) space.setattr(w_mod, space.newtext('__file__'), space.newtext(cpathname)) else: code_w = parse_source_module(space, pathname, source) @@ -1007,6 +1001,35 @@ d = x & 0xff stream.write(chr(a) + chr(b) + chr(c) + chr(d)) +def _wrap_r_long(space, stream): + """like _r_long(), but raising app-level exceptions""" + try: + return _r_long(stream) + except StreamErrors as e: + raise wrap_streamerror(space, e) + +def _wrap_readall(space, stream): + """stream.readall(), but raising app-level exceptions""" + try: + return stream.readall() + except StreamErrors as e: + raise wrap_streamerror(space, e) + +def _wrap_close(space, stream): + """stream.close(), but raising app-level exceptions""" + try: + stream.close() + except StreamErrors as e: + raise wrap_streamerror(space, e) + +def _close_ignore(stream): + """stream.close(), but ignoring any stream exception""" + try: + stream.close() + except StreamErrors as e: + pass + + def check_compiled_module(space, pycfilename, expected_mtime): """ Check if a pyc file's magic number and mtime match. @@ -1025,10 +1048,7 @@ return stream except StreamErrors: if stream: - try: - stream.close() - except StreamErrors: - pass + _close_ignore(stream) return None # XXX! must not eat all exceptions, e.g. # Out of file descriptors. diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -104,9 +104,10 @@ w_mod = importing.load_source_module( space, w_modulename, w_mod, - filename, stream.readall(), stream.try_to_find_file_descriptor()) + filename, importing._wrap_readall(space, stream), + stream.try_to_find_file_descriptor()) if space.is_none(w_file): - stream.close() + importing._wrap_close(space, stream) return w_mod @unwrap_spec(filename='fsencode', check_afterwards=int) @@ -115,14 +116,15 @@ # the function 'imp._run_compiled_module' is a pypy-only extension stream = get_file(space, w_file, filename, 'rb') - magic = importing._r_long(stream) - timestamp = importing._r_long(stream) + magic = importing._wrap_r_long(space, stream) + timestamp = importing._wrap_r_long(space, stream) w_mod = importing.load_compiled_module( space, w_modulename, w_module, filename, magic, timestamp, - stream.readall(), check_afterwards=check_afterwards) + importing._wrap_readall(space, stream), + check_afterwards=check_afterwards) if space.is_none(w_file): - stream.close() + importing._wrap_close(space, stream) return w_mod @unwrap_spec(filename='fsencode') From pypy.commits at gmail.com Fri Aug 9 09:21:29 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 06:21:29 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: hg merge default Message-ID: <5d4d7359.1c69fb81.ae52d.2fb3@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97116:3c789662afb2 Date: 2019-08-09 15:11 +0200 http://bitbucket.org/pypy/pypy/changeset/3c789662afb2/ Log: hg merge default diff --git a/pypy/interpreter/pyparser/test/unittest_samples.py b/pypy/interpreter/pyparser/test/unittest_samples.py deleted file mode 100644 --- a/pypy/interpreter/pyparser/test/unittest_samples.py +++ /dev/null @@ -1,95 +0,0 @@ -"""test module for CPython / PyPy nested tuples comparison""" - -import os, os.path as osp -import sys -from pypy.interpreter.pyparser.pythonutil import python_parse, pypy_parse -from pprint import pprint -from pypy.interpreter.pyparser import grammar -grammar.DEBUG = False -from symbol import sym_name - - -def name(elt): - return "%s[%s]"% (sym_name.get(elt,elt),elt) - -def read_samples_dir(): - return [osp.join('samples', fname) for fname in os.listdir('samples') if fname.endswith('.py')] - -def print_sym_tuple(nested, level=0, limit=15, names=False, trace=()): - buf = [] - if level <= limit: - buf.append("%s(" % (" "*level)) - else: - buf.append("(") - for index, elt in enumerate(nested): - # Test if debugging and if on last element of error path - if trace and not trace[1:] and index == trace[0]: - buf.append('\n----> ') - if type(elt) is int: - if names: - buf.append(name(elt)) - else: - buf.append(str(elt)) - buf.append(', ') - elif type(elt) is str: - buf.append(repr(elt)) - else: - if level < limit: - buf.append('\n') - buf.extend(print_sym_tuple(elt, level+1, limit, - names, trace[1:])) - buf.append(')') - return buf - -def assert_tuples_equal(tup1, tup2, curpos = ()): - for index, (elt1, elt2) in enumerate(zip(tup1, tup2)): - if elt1 != elt2: - if type(elt1) is tuple and type(elt2) is tuple: - assert_tuples_equal(elt1, elt2, curpos + (index,)) - raise AssertionError('Found difference at %s : %s != %s' % - (curpos, name(elt1), name(elt2) ), curpos) - -from time import time, clock -def test_samples( samples ): - time_reports = {} - for sample in samples: - print "testing", sample - tstart1, cstart1 = time(), clock() - pypy_tuples = pypy_parse(sample) - tstart2, cstart2 = time(), clock() - python_tuples = python_parse(sample) - time_reports[sample] = (time() - tstart2, tstart2-tstart1, clock() - cstart2, cstart2-cstart1 ) - #print "-"*10, "PyPy parse results", "-"*10 - #print ''.join(print_sym_tuple(pypy_tuples, names=True)) - #print "-"*10, "CPython parse results", "-"*10 - #print ''.join(print_sym_tuple(python_tuples, names=True)) - print - try: - assert_tuples_equal(pypy_tuples, python_tuples) - except AssertionError as e: - error_path = e.args[-1] - print "ERROR PATH =", error_path - print "="*80 - print file(sample).read() - print "="*80 - print "-"*10, "PyPy parse results", "-"*10 - print ''.join(print_sym_tuple(pypy_tuples, names=True, trace=error_path)) - print "-"*10, "CPython parse results", "-"*10 - print ''.join(print_sym_tuple(python_tuples, names=True, trace=error_path)) - print "Failed on (%s)" % sample - # raise - pprint(time_reports) - -if __name__=="__main__": - import getopt - opts, args = getopt.getopt( sys.argv[1:], "d:", [] ) - for opt, val in opts: - if opt == "-d": - pass -# set_debug(int(val)) - if args: - samples = args - else: - samples = read_samples_dir() - - test_samples( samples ) diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py --- a/pypy/module/_socket/interp_socket.py +++ b/pypy/module/_socket/interp_socket.py @@ -312,7 +312,10 @@ raise converted_error(space, e) if buflen < 0 or buflen > 1024: raise explicit_socket_error(space, "getsockopt buflen out of range") - return space.newbytes(self.sock.getsockopt(level, optname, buflen)) + try: + return space.newbytes(self.sock.getsockopt(level, optname, buflen)) + except SocketError as e: + raise converted_error(space, e) def gettimeout_w(self, space): """gettimeout() -> timeout @@ -438,7 +441,10 @@ setblocking(True) is equivalent to settimeout(None); setblocking(False) is equivalent to settimeout(0.0). """ - self.sock.setblocking(flag) + try: + self.sock.setblocking(flag) + except SocketError as e: + pass # CPython 2 only: never raise anything here @unwrap_spec(level=int, optname=int) def setsockopt_w(self, space, level, optname, w_optval): @@ -477,7 +483,10 @@ timeout = space.float_w(w_timeout) if timeout < 0.0: raise oefmt(space.w_ValueError, "Timeout value out of range") - self.sock.settimeout(timeout) + try: + self.sock.settimeout(timeout) + except SocketError as e: + pass # CPython 2 only: never raise anything here @unwrap_spec(nbytes=int, flags=int) def recv_into_w(self, space, w_buffer, nbytes=0, flags=0): diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -872,6 +872,14 @@ cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) assert cli.family == socket.AF_INET + def test_missing_error_catching(self): + from _socket import socket, error + s = socket() + s.close() + s.settimeout(1) # EBADF, but ignored on Python 2 + s.setblocking(True) # EBADF, but ignored on Python 2 + raises(error, s.getsockopt, 42, 84, 8) # EBADF + class AppTestErrno: spaceconfig = {'usemodules': ['_socket']} diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -11,6 +11,7 @@ from pypy.interpreter.baseobjspace import W_Root, CannotHaveLock from pypy.interpreter.eval import Code from pypy.interpreter.pycode import PyCode +from pypy.interpreter.streamutil import wrap_streamerror from rpython.rlib import streamio, jit from rpython.rlib.streamio import StreamErrors from rpython.rlib.objectmodel import we_are_translated, specialize @@ -659,13 +660,14 @@ if find_info.modtype == PY_SOURCE: return load_source_module( space, w_modulename, w_mod, - find_info.filename, find_info.stream.readall(), + find_info.filename, _wrap_readall(space, find_info.stream), find_info.stream.try_to_find_file_descriptor()) elif find_info.modtype == PY_COMPILED: - magic = _r_long(find_info.stream) - timestamp = _r_long(find_info.stream) + magic = _wrap_r_long(space, find_info.stream) + timestamp = _wrap_r_long(space, find_info.stream) return load_compiled_module(space, w_modulename, w_mod, find_info.filename, - magic, timestamp, find_info.stream.readall()) + magic, timestamp, + _wrap_readall(space, find_info.stream)) elif find_info.modtype == PKG_DIRECTORY: w_path = space.newlist([space.newtext(find_info.filename)]) space.setattr(w_mod, space.newtext('__path__'), w_path) @@ -677,10 +679,7 @@ w_mod = load_module(space, w_modulename, find_info, reuse=True) finally: - try: - find_info.stream.close() - except StreamErrors: - pass + _close_ignore(find_info.stream) return w_mod elif find_info.modtype == C_EXTENSION and has_so_extension(space): return load_c_extension(space, find_info.filename, @@ -712,10 +711,7 @@ if find_info: stream = find_info.stream if stream: - try: - stream.close() - except StreamErrors: - pass + _close_ignore(stream) if tentative: return None @@ -770,7 +766,7 @@ return load_module(space, w_modulename, find_info, reuse=True) finally: if find_info.stream: - find_info.stream.close() + _wrap_close(space, find_info.stream) except: # load_module probably removed name from modules because of # the error. Put back the original module object. @@ -936,12 +932,10 @@ if stream: # existing and up-to-date .pyc file try: - code_w = read_compiled_module(space, cpathname, stream.readall()) + code_w = read_compiled_module(space, cpathname, + _wrap_readall(space, stream)) finally: - try: - stream.close() - except StreamErrors: - pass + _close_ignore(stream) space.setattr(w_mod, space.newtext('__file__'), space.newtext(cpathname)) else: code_w = parse_source_module(space, pathname, source) @@ -1007,6 +1001,35 @@ d = x & 0xff stream.write(chr(a) + chr(b) + chr(c) + chr(d)) +def _wrap_r_long(space, stream): + """like _r_long(), but raising app-level exceptions""" + try: + return _r_long(stream) + except StreamErrors as e: + raise wrap_streamerror(space, e) + +def _wrap_readall(space, stream): + """stream.readall(), but raising app-level exceptions""" + try: + return stream.readall() + except StreamErrors as e: + raise wrap_streamerror(space, e) + +def _wrap_close(space, stream): + """stream.close(), but raising app-level exceptions""" + try: + stream.close() + except StreamErrors as e: + raise wrap_streamerror(space, e) + +def _close_ignore(stream): + """stream.close(), but ignoring any stream exception""" + try: + stream.close() + except StreamErrors as e: + pass + + def check_compiled_module(space, pycfilename, expected_mtime): """ Check if a pyc file's magic number and mtime match. @@ -1025,10 +1048,7 @@ return stream except StreamErrors: if stream: - try: - stream.close() - except StreamErrors: - pass + _close_ignore(stream) return None # XXX! must not eat all exceptions, e.g. # Out of file descriptors. diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -104,9 +104,10 @@ w_mod = importing.load_source_module( space, w_modulename, w_mod, - filename, stream.readall(), stream.try_to_find_file_descriptor()) + filename, importing._wrap_readall(space, stream), + stream.try_to_find_file_descriptor()) if space.is_none(w_file): - stream.close() + importing._wrap_close(space, stream) return w_mod @unwrap_spec(filename='fsencode', check_afterwards=int) @@ -115,14 +116,15 @@ # the function 'imp._run_compiled_module' is a pypy-only extension stream = get_file(space, w_file, filename, 'rb') - magic = importing._r_long(stream) - timestamp = importing._r_long(stream) + magic = importing._wrap_r_long(space, stream) + timestamp = importing._wrap_r_long(space, stream) w_mod = importing.load_compiled_module( space, w_modulename, w_module, filename, magic, timestamp, - stream.readall(), check_afterwards=check_afterwards) + importing._wrap_readall(space, stream), + check_afterwards=check_afterwards) if space.is_none(w_file): - stream.close() + importing._wrap_close(space, stream) return w_mod @unwrap_spec(filename='fsencode') From pypy.commits at gmail.com Fri Aug 9 09:21:30 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 06:21:30 -0700 (PDT) Subject: [pypy-commit] pypy default: merge heads Message-ID: <5d4d735a.1c69fb81.d90d3.cc4d@mx.google.com> Author: Armin Rigo Branch: Changeset: r97117:b14e43faf847 Date: 2019-08-09 15:20 +0200 http://bitbucket.org/pypy/pypy/changeset/b14e43faf847/ Log: merge heads diff --git a/pypy/objspace/std/test/apptest_longobject.py b/pypy/objspace/std/test/apptest_longobject.py new file mode 100644 --- /dev/null +++ b/pypy/objspace/std/test/apptest_longobject.py @@ -0,0 +1,409 @@ +from pytest import raises +import sys +import math +import operator + +def test_trunc(): + assert math.trunc(1L) == 1L + assert math.trunc(-1L) == -1L + +def test_add(): + x = 123L + assert int(x + 12443L) == 123 + 12443 + x = -20 + assert x + 2 + 3L + True == -14L + +def test_sub(): + assert int(58543L - 12332L) == 58543 - 12332 + assert int(58543L - 12332) == 58543 - 12332 + assert int(58543 - 12332L) == 58543 - 12332 + x = 237123838281233L + assert x * 12 == x * 12L + +def test_mul(): + x = 363L + assert x * 2 ** 40 == x << 40 + +def test_truediv(): + exec "from __future__ import division; a = 31415926L / 10000000L" + assert a == 3.1415926 + +def test_floordiv(): + x = 31415926L + a = x // 10000000L + assert a == 3L + +def test_int_floordiv(): + x = 3000L + a = x // 1000 + assert a == 3L + + x = 3000L + a = x // -1000 + assert a == -3L + + x = 3000L + raises(ZeroDivisionError, "x // 0") + + n = sys.maxint+1 + assert n / int(-n) == -1L + +def test_numerator_denominator(): + assert (1L).numerator == 1L + assert (1L).denominator == 1L + assert (42L).numerator == 42L + assert (42L).denominator == 1L + +def test_compare(): + Z = 0 + ZL = 0L + + assert Z == ZL + assert not (Z != ZL) + assert ZL == Z + assert not (ZL != Z) + assert Z <= ZL + assert not (Z < ZL) + assert ZL <= ZL + assert not (ZL < ZL) + + for BIG in (1L, 1L << 62, 1L << 9999): + assert not (Z == BIG) + assert Z != BIG + assert not (BIG == Z) + assert BIG != Z + assert not (ZL == BIG) + assert ZL != BIG + assert Z <= BIG + assert Z < BIG + assert not (BIG <= Z) + assert not (BIG < Z) + assert ZL <= BIG + assert ZL < BIG + assert not (BIG <= ZL) + assert not (BIG < ZL) + assert not (Z <= -BIG) + assert not (Z < -BIG) + assert -BIG <= Z + assert -BIG < Z + assert not (ZL <= -BIG) + assert not (ZL < -BIG) + assert -BIG <= ZL + assert -BIG < ZL + # + assert not (BIG < int(BIG)) + assert (BIG <= int(BIG)) + assert (BIG == int(BIG)) + assert not (BIG != int(BIG)) + assert not (BIG > int(BIG)) + assert (BIG >= int(BIG)) + # + assert (BIG < int(BIG)+1) + assert (BIG <= int(BIG)+1) + assert not (BIG == int(BIG)+1) + assert (BIG != int(BIG)+1) + assert not (BIG > int(BIG)+1) + assert not (BIG >= int(BIG)+1) + # + assert not (BIG < int(BIG)-1) + assert not (BIG <= int(BIG)-1) + assert not (BIG == int(BIG)-1) + assert (BIG != int(BIG)-1) + assert (BIG > int(BIG)-1) + assert (BIG >= int(BIG)-1) + # + assert not (int(BIG) < BIG) + assert (int(BIG) <= BIG) + assert (int(BIG) == BIG) + assert not (int(BIG) != BIG) + assert not (int(BIG) > BIG) + assert (int(BIG) >= BIG) + # + assert not (int(BIG)+1 < BIG) + assert not (int(BIG)+1 <= BIG) + assert not (int(BIG)+1 == BIG) + assert (int(BIG)+1 != BIG) + assert (int(BIG)+1 > BIG) + assert (int(BIG)+1 >= BIG) + # + assert (int(BIG)-1 < BIG) + assert (int(BIG)-1 <= BIG) + assert not (int(BIG)-1 == BIG) + assert (int(BIG)-1 != BIG) + assert not (int(BIG)-1 > BIG) + assert not (int(BIG)-1 >= BIG) + +def test_conversion(): + class long2(long): + pass + x = 1L + x = long2(x<<100) + y = int(x) + assert type(y) == long + assert type(+long2(5)) is long + assert type(long2(5) << 0) is long + assert type(long2(5) >> 0) is long + assert type(long2(5) + 0) is long + assert type(long2(5) - 0) is long + assert type(long2(5) * 1) is long + assert type(1 * long2(5)) is long + assert type(0 + long2(5)) is long + assert type(-long2(0)) is long + assert type(long2(5) // 1) is long + +def test_shift(): + assert 65l >> 2l == 16l + assert 65l >> 2 == 16l + assert 65 >> 2l == 16l + assert 65l << 2l == 65l * 4 + assert 65l << 2 == 65l * 4 + assert 65 << 2l == 65l * 4 + raises(ValueError, "1L << -1L") + raises(ValueError, "1L << -1") + raises(OverflowError, "1L << (2 ** 100)") + raises(ValueError, "1L >> -1L") + raises(ValueError, "1L >> -1") + raises(OverflowError, "1L >> (2 ** 100)") + +def test_pow(): + x = 0L + assert pow(x, 0L, 1L) == 0L + assert pow(-1L, -1L) == -1.0 + assert pow(2 ** 68, 0.5) == 2.0 ** 34 + assert pow(2 ** 68, 2) == 2 ** 136 + raises(TypeError, pow, 2l, -1, 3) + raises(ValueError, pow, 2l, 5, 0) + + # some rpow tests + assert pow(0, 0L, 1L) == 0L + assert pow(-1, -1L) == -1.0 + +def test_int_pow(): + x = 2L + assert pow(x, 2) == 4L + assert pow(x, 2, 2) == 0L + assert pow(x, 2, 3L) == 1L + +def test_getnewargs(): + assert 0L .__getnewargs__() == (0L,) + assert (-1L) .__getnewargs__() == (-1L,) + +def test_divmod(): + def check_division(x, y): + q, r = divmod(x, y) + pab, pba = x*y, y*x + assert pab == pba + assert q == x // y + assert r == x % y + assert x == q*y + r + if y > 0: + assert 0 <= r < y + else: + assert y < r <= 0 + for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: + for y in [-105566530L, -1L, 1L, 1034522340L]: + print "checking division for %s, %s" % (x, y) + check_division(x, y) + check_division(x, int(y)) + check_division(int(x), y) + # special case from python tests: + s1 = 33 + s2 = 2 + x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 + x >>= s1*16 + y = 10953035502453784575 + y >>= s2*16 + x = 0x3FE0003FFFFC0001FFFL + y = 0x9800FFC1L + check_division(x, y) + raises(ZeroDivisionError, "x // 0L") + raises(ZeroDivisionError, "x % 0L") + raises(ZeroDivisionError, divmod, x, 0L) + raises(ZeroDivisionError, "x // 0") + raises(ZeroDivisionError, "x % 0") + raises(ZeroDivisionError, divmod, x, 0) + +def test_int_divmod(): + q, r = divmod(100L, 11) + assert q == 9L + assert r == 1L + +def test_format(): + assert repr(12345678901234567890) == '12345678901234567890L' + assert str(12345678901234567890) == '12345678901234567890' + assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' + assert oct(01234567012345670L) == '01234567012345670L' + +def test_bits(): + x = 0xAAAAAAAAL + assert x | 0x55555555L == 0xFFFFFFFFL + assert x & 0x55555555L == 0x00000000L + assert x ^ 0x55555555L == 0xFFFFFFFFL + assert -x | 0x55555555L == -0xAAAAAAA9L + assert x | 0x555555555L == 0x5FFFFFFFFL + assert x & 0x555555555L == 0x000000000L + assert x ^ 0x555555555L == 0x5FFFFFFFFL + +def test_hash(): + # ints have the same hash as equal longs + for i in range(-4, 14): + assert hash(i) == hash(long(i)) == long(i).__hash__() + # might check too much -- it's ok to change the hashing algorithm + assert hash(123456789L) == 123456789 + assert hash(1234567890123456789L) in ( + -1895067127, # with 32-bit platforms + 1234567890123456789) # with 64-bit platforms + +def test_math_log(): + raises(ValueError, math.log, 0L) + raises(ValueError, math.log, -1L) + raises(ValueError, math.log, -2L) + raises(ValueError, math.log, -(1L << 10000)) + #raises(ValueError, math.log, 0) + raises(ValueError, math.log, -1) + raises(ValueError, math.log, -2) + +def test_long(): + n = -sys.maxint-1 + assert long(n) == n + assert str(long(n)) == str(n) + a = buffer('123') + assert long(a) == 123L + +def test_huge_longs(): + x = 1L + huge = x << 40000L + raises(OverflowError, float, huge) + raises(OverflowError, operator.truediv, huge, 3) + raises(OverflowError, operator.truediv, huge, 3L) + +def test_just_trunc(): + class myint(object): + def __trunc__(self): + return 42 + assert long(myint()) == 42 + +def test_override___long__(): + class mylong(long): + def __long__(self): + return 42L + assert long(mylong(21)) == 42L + class myotherlong(long): + pass + assert long(myotherlong(21)) == 21L + +def test___long__(): + class A(object): + def __long__(self): + return 42 + assert long(A()) == 42L + class B(object): + def __int__(self): + return 42 + raises(TypeError, long, B()) + + class LongSubclass(long): + pass + class ReturnsLongSubclass(object): + def __long__(self): + return LongSubclass(42L) + n = long(ReturnsLongSubclass()) + assert n == 42 + assert type(n) is LongSubclass + +def test_trunc_returns(): + # but!: (blame CPython 2.7) + class Integral(object): + def __int__(self): + return 42 + class TruncReturnsNonLong(object): + def __trunc__(self): + return Integral() + n = long(TruncReturnsNonLong()) + assert type(n) is long + assert n == 42 + + class LongSubclass(long): + pass + class TruncReturnsNonInt(object): + def __trunc__(self): + return LongSubclass(42) + n = long(TruncReturnsNonInt()) + assert n == 42 + assert type(n) is LongSubclass + +def test_long_before_string(): + class A(str): + def __long__(self): + return 42 + assert long(A('abc')) == 42 + +def test_long_errors(): + raises(TypeError, long, 12, 12) + raises(ValueError, long, 'xxxxxx?', 12) + +def test_conjugate(): + assert (7L).conjugate() == 7L + assert (-7L).conjugate() == -7L + + class L(long): + pass + + assert type(L(7).conjugate()) is long + + class L(long): + def __pos__(self): + return 43 + assert L(7).conjugate() == 7L + +def test_bit_length(): + assert 8L.bit_length() == 4 + assert (-1<<40).bit_length() == 41 + assert ((2**31)-1).bit_length() == 31 + +def test_negative_zero(): + x = eval("-0L") + assert x == 0L + +def test_mix_int_and_long(): + class IntLongMixClass(object): + def __int__(self): + return 42L + + def __long__(self): + return 64 + + mixIntAndLong = IntLongMixClass() + as_long = long(mixIntAndLong) + assert type(as_long) is long + assert as_long == 64 + +def test_long_real(): + class A(long): pass + b = A(5).real + assert type(b) is long + +def test__int__(): + class A(long): + def __int__(self): + return 42 + + assert int(long(3)) == long(3) + assert int(A(13)) == 42 + +def test_long_error_msg(): + e = raises(TypeError, long, []) + assert str(e.value) == ( + "long() argument must be a string or a number, not 'list'") + +def test_coerce(): + assert 3L.__coerce__(4L) == (3L, 4L) + assert 3L.__coerce__(4) == (3, 4) + assert 3L.__coerce__(object()) == NotImplemented + +def test_linear_long_base_16(): + # never finishes if long(_, 16) is not linear-time + size = 100000 + n = "a" * size + expected = (2 << (size * 4)) // 3 + assert long(n, 16) == expected diff --git a/pypy/objspace/std/test/test_longobject.py b/pypy/objspace/std/test/test_longobject.py --- a/pypy/objspace/std/test/test_longobject.py +++ b/pypy/objspace/std/test/test_longobject.py @@ -1,4 +1,3 @@ -import py from pypy.objspace.std import longobject as lobj from rpython.rlib.rbigint import rbigint @@ -36,417 +35,3 @@ x &= r.MASK w_obj = space.newlong_from_rarith_int(r(x)) assert space.bigint_w(w_obj).eq(rbigint.fromlong(x)) - - -class AppTestLong: - def test_trunc(self): - import math - assert math.trunc(1L) == 1L - assert math.trunc(-1L) == -1L - - def test_add(self): - x = 123L - assert int(x + 12443L) == 123 + 12443 - x = -20 - assert x + 2 + 3L + True == -14L - - def test_sub(self): - assert int(58543L - 12332L) == 58543 - 12332 - assert int(58543L - 12332) == 58543 - 12332 - assert int(58543 - 12332L) == 58543 - 12332 - x = 237123838281233L - assert x * 12 == x * 12L - - def test_mul(self): - x = 363L - assert x * 2 ** 40 == x << 40 - - def test_truediv(self): - exec "from __future__ import division; a = 31415926L / 10000000L" - assert a == 3.1415926 - - def test_floordiv(self): - x = 31415926L - a = x // 10000000L - assert a == 3L - - def test_int_floordiv(self): - import sys - - x = 3000L - a = x // 1000 - assert a == 3L - - x = 3000L - a = x // -1000 - assert a == -3L - - x = 3000L - raises(ZeroDivisionError, "x // 0") - - n = sys.maxint+1 - assert n / int(-n) == -1L - - def test_numerator_denominator(self): - assert (1L).numerator == 1L - assert (1L).denominator == 1L - assert (42L).numerator == 42L - assert (42L).denominator == 1L - - def test_compare(self): - Z = 0 - ZL = 0L - - assert Z == ZL - assert not (Z != ZL) - assert ZL == Z - assert not (ZL != Z) - assert Z <= ZL - assert not (Z < ZL) - assert ZL <= ZL - assert not (ZL < ZL) - - for BIG in (1L, 1L << 62, 1L << 9999): - assert not (Z == BIG) - assert Z != BIG - assert not (BIG == Z) - assert BIG != Z - assert not (ZL == BIG) - assert ZL != BIG - assert Z <= BIG - assert Z < BIG - assert not (BIG <= Z) - assert not (BIG < Z) - assert ZL <= BIG - assert ZL < BIG - assert not (BIG <= ZL) - assert not (BIG < ZL) - assert not (Z <= -BIG) - assert not (Z < -BIG) - assert -BIG <= Z - assert -BIG < Z - assert not (ZL <= -BIG) - assert not (ZL < -BIG) - assert -BIG <= ZL - assert -BIG < ZL - # - assert not (BIG < int(BIG)) - assert (BIG <= int(BIG)) - assert (BIG == int(BIG)) - assert not (BIG != int(BIG)) - assert not (BIG > int(BIG)) - assert (BIG >= int(BIG)) - # - assert (BIG < int(BIG)+1) - assert (BIG <= int(BIG)+1) - assert not (BIG == int(BIG)+1) - assert (BIG != int(BIG)+1) - assert not (BIG > int(BIG)+1) - assert not (BIG >= int(BIG)+1) - # - assert not (BIG < int(BIG)-1) - assert not (BIG <= int(BIG)-1) - assert not (BIG == int(BIG)-1) - assert (BIG != int(BIG)-1) - assert (BIG > int(BIG)-1) - assert (BIG >= int(BIG)-1) - # - assert not (int(BIG) < BIG) - assert (int(BIG) <= BIG) - assert (int(BIG) == BIG) - assert not (int(BIG) != BIG) - assert not (int(BIG) > BIG) - assert (int(BIG) >= BIG) - # - assert not (int(BIG)+1 < BIG) - assert not (int(BIG)+1 <= BIG) - assert not (int(BIG)+1 == BIG) - assert (int(BIG)+1 != BIG) - assert (int(BIG)+1 > BIG) - assert (int(BIG)+1 >= BIG) - # - assert (int(BIG)-1 < BIG) - assert (int(BIG)-1 <= BIG) - assert not (int(BIG)-1 == BIG) - assert (int(BIG)-1 != BIG) - assert not (int(BIG)-1 > BIG) - assert not (int(BIG)-1 >= BIG) - - def test_conversion(self): - class long2(long): - pass - x = 1L - x = long2(x<<100) - y = int(x) - assert type(y) == long - assert type(+long2(5)) is long - assert type(long2(5) << 0) is long - assert type(long2(5) >> 0) is long - assert type(long2(5) + 0) is long - assert type(long2(5) - 0) is long - assert type(long2(5) * 1) is long - assert type(1 * long2(5)) is long - assert type(0 + long2(5)) is long - assert type(-long2(0)) is long - assert type(long2(5) // 1) is long - - def test_shift(self): - assert 65l >> 2l == 16l - assert 65l >> 2 == 16l - assert 65 >> 2l == 16l - assert 65l << 2l == 65l * 4 - assert 65l << 2 == 65l * 4 - assert 65 << 2l == 65l * 4 - raises(ValueError, "1L << -1L") - raises(ValueError, "1L << -1") - raises(OverflowError, "1L << (2 ** 100)") - raises(ValueError, "1L >> -1L") - raises(ValueError, "1L >> -1") - raises(OverflowError, "1L >> (2 ** 100)") - - def test_pow(self): - x = 0L - assert pow(x, 0L, 1L) == 0L - assert pow(-1L, -1L) == -1.0 - assert pow(2 ** 68, 0.5) == 2.0 ** 34 - assert pow(2 ** 68, 2) == 2 ** 136 - raises(TypeError, pow, 2l, -1, 3) - raises(ValueError, pow, 2l, 5, 0) - - # some rpow tests - assert pow(0, 0L, 1L) == 0L - assert pow(-1, -1L) == -1.0 - - def test_int_pow(self): - x = 2L - assert pow(x, 2) == 4L - assert pow(x, 2, 2) == 0L - assert pow(x, 2, 3L) == 1L - - def test_getnewargs(self): - assert 0L .__getnewargs__() == (0L,) - assert (-1L) .__getnewargs__() == (-1L,) - - def test_divmod(self): - def check_division(x, y): - q, r = divmod(x, y) - pab, pba = x*y, y*x - assert pab == pba - assert q == x // y - assert r == x % y - assert x == q*y + r - if y > 0: - assert 0 <= r < y - else: - assert y < r <= 0 - for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: - for y in [-105566530L, -1L, 1L, 1034522340L]: - print "checking division for %s, %s" % (x, y) - check_division(x, y) - check_division(x, int(y)) - check_division(int(x), y) - # special case from python tests: - s1 = 33 - s2 = 2 - x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 - x >>= s1*16 - y = 10953035502453784575 - y >>= s2*16 - x = 0x3FE0003FFFFC0001FFFL - y = 0x9800FFC1L - check_division(x, y) - raises(ZeroDivisionError, "x // 0L") - raises(ZeroDivisionError, "x % 0L") - raises(ZeroDivisionError, divmod, x, 0L) - raises(ZeroDivisionError, "x // 0") - raises(ZeroDivisionError, "x % 0") - raises(ZeroDivisionError, divmod, x, 0) - - def test_int_divmod(self): - q, r = divmod(100L, 11) - assert q == 9L - assert r == 1L - - def test_format(self): - assert repr(12345678901234567890) == '12345678901234567890L' - assert str(12345678901234567890) == '12345678901234567890' - assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' - assert oct(01234567012345670L) == '01234567012345670L' - - def test_bits(self): - x = 0xAAAAAAAAL - assert x | 0x55555555L == 0xFFFFFFFFL - assert x & 0x55555555L == 0x00000000L - assert x ^ 0x55555555L == 0xFFFFFFFFL - assert -x | 0x55555555L == -0xAAAAAAA9L - assert x | 0x555555555L == 0x5FFFFFFFFL - assert x & 0x555555555L == 0x000000000L - assert x ^ 0x555555555L == 0x5FFFFFFFFL - - def test_hash(self): - # ints have the same hash as equal longs - for i in range(-4, 14): - assert hash(i) == hash(long(i)) == long(i).__hash__() - # might check too much -- it's ok to change the hashing algorithm - assert hash(123456789L) == 123456789 - assert hash(1234567890123456789L) in ( - -1895067127, # with 32-bit platforms - 1234567890123456789) # with 64-bit platforms - - def test_math_log(self): - import math - raises(ValueError, math.log, 0L) - raises(ValueError, math.log, -1L) - raises(ValueError, math.log, -2L) - raises(ValueError, math.log, -(1L << 10000)) - #raises(ValueError, math.log, 0) - raises(ValueError, math.log, -1) - raises(ValueError, math.log, -2) - - def test_long(self): - import sys - n = -sys.maxint-1 - assert long(n) == n - assert str(long(n)) == str(n) - a = buffer('123') - assert long(a) == 123L - - def test_huge_longs(self): - import operator - x = 1L - huge = x << 40000L - raises(OverflowError, float, huge) - raises(OverflowError, operator.truediv, huge, 3) - raises(OverflowError, operator.truediv, huge, 3L) - - def test_just_trunc(self): - class myint(object): - def __trunc__(self): - return 42 - assert long(myint()) == 42 - - def test_override___long__(self): - class mylong(long): - def __long__(self): - return 42L - assert long(mylong(21)) == 42L - class myotherlong(long): - pass - assert long(myotherlong(21)) == 21L - - def test___long__(self): - class A(object): - def __long__(self): - return 42 - assert long(A()) == 42L - class B(object): - def __int__(self): - return 42 - raises(TypeError, long, B()) - - class LongSubclass(long): - pass - class ReturnsLongSubclass(object): - def __long__(self): - return LongSubclass(42L) - n = long(ReturnsLongSubclass()) - assert n == 42 - assert type(n) is LongSubclass - - def test_trunc_returns(self): - # but!: (blame CPython 2.7) - class Integral(object): - def __int__(self): - return 42 - class TruncReturnsNonLong(object): - def __trunc__(self): - return Integral() - n = long(TruncReturnsNonLong()) - assert type(n) is long - assert n == 42 - - class LongSubclass(long): - pass - class TruncReturnsNonInt(object): - def __trunc__(self): - return LongSubclass(42) - n = long(TruncReturnsNonInt()) - assert n == 42 - assert type(n) is LongSubclass - - def test_long_before_string(self): - class A(str): - def __long__(self): - return 42 - assert long(A('abc')) == 42 - - def test_long_errors(self): - raises(TypeError, long, 12, 12) - raises(ValueError, long, 'xxxxxx?', 12) - - def test_conjugate(self): - assert (7L).conjugate() == 7L - assert (-7L).conjugate() == -7L - - class L(long): - pass - - assert type(L(7).conjugate()) is long - - class L(long): - def __pos__(self): - return 43 - assert L(7).conjugate() == 7L - - def test_bit_length(self): - assert 8L.bit_length() == 4 - assert (-1<<40).bit_length() == 41 - assert ((2**31)-1).bit_length() == 31 - - def test_negative_zero(self): - x = eval("-0L") - assert x == 0L - - def test_mix_int_and_long(self): - class IntLongMixClass(object): - def __int__(self): - return 42L - - def __long__(self): - return 64 - - mixIntAndLong = IntLongMixClass() - as_long = long(mixIntAndLong) - assert type(as_long) is long - assert as_long == 64 - - def test_long_real(self): - class A(long): pass - b = A(5).real - assert type(b) is long - - def test__int__(self): - class A(long): - def __int__(self): - return 42 - - assert int(long(3)) == long(3) - assert int(A(13)) == 42 - - def test_long_error_msg(self): - e = raises(TypeError, long, []) - assert str(e.value) == ( - "long() argument must be a string or a number, not 'list'") - - def test_coerce(self): - assert 3L.__coerce__(4L) == (3L, 4L) - assert 3L.__coerce__(4) == (3, 4) - assert 3L.__coerce__(object()) == NotImplemented - - def test_linear_long_base_16(self): - # never finishes if long(_, 16) is not linear-time - size = 100000 - n = "a" * size - expected = (2 << (size * 4)) // 3 - assert long(n, 16) == expected - From pypy.commits at gmail.com Fri Aug 9 09:21:56 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 06:21:56 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: More sandboxsafe's Message-ID: <5d4d7374.1c69fb81.d0c16.2113@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97118:2bfca02e69de Date: 2019-08-09 15:21 +0200 http://bitbucket.org/pypy/pypy/changeset/2bfca02e69de/ Log: More sandboxsafe's diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py --- a/rpython/rtyper/lltypesystem/rffi.py +++ b/rpython/rtyper/lltypesystem/rffi.py @@ -1405,12 +1405,14 @@ lltype.Void, releasegil=False, calling_conv='c', + sandboxsafe=True, ) c_memset = llexternal("memset", [VOIDP, lltype.Signed, SIZE_T], lltype.Void, releasegil=False, calling_conv='c', + sandboxsafe=True, ) From pypy.commits at gmail.com Fri Aug 9 09:37:34 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 06:37:34 -0700 (PDT) Subject: [pypy-commit] pypy default: Wrap the I/O errors we can get when importing modules (one more place) Message-ID: <5d4d771e.1c69fb81.4b1a9.035d@mx.google.com> Author: Armin Rigo Branch: Changeset: r97119:1f0f33b64b3b Date: 2019-08-09 15:32 +0200 http://bitbucket.org/pypy/pypy/changeset/1f0f33b64b3b/ Log: Wrap the I/O errors we can get when importing modules (one more place) diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -7,7 +7,7 @@ from pypy.interpreter.module import Module from pypy.interpreter.gateway import interp2app, unwrap_spec from pypy.interpreter.typedef import TypeDef, generic_new_descr -from pypy.interpreter.error import OperationError, oefmt +from pypy.interpreter.error import OperationError, oefmt, wrap_oserror from pypy.interpreter.baseobjspace import W_Root, CannotHaveLock from pypy.interpreter.eval import Code from pypy.interpreter.pycode import PyCode @@ -923,7 +923,10 @@ log_pyverbose(space, 1, "import %s # from %s\n" % (space.text_w(w_modulename), pathname)) - src_stat = os.fstat(fd) + try: + src_stat = os.fstat(fd) + except OSError as e: + raise wrap_oserror(space, e, pathname) # better report this error cpathname = pathname + 'c' mtime = int(src_stat[stat.ST_MTIME]) mode = src_stat[stat.ST_MODE] From pypy.commits at gmail.com Fri Aug 9 09:37:36 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 06:37:36 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: hg merge default Message-ID: <5d4d7720.1c69fb81.87d0b.a344@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97120:d26ba88cc09d Date: 2019-08-09 15:36 +0200 http://bitbucket.org/pypy/pypy/changeset/d26ba88cc09d/ Log: hg merge default diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -7,7 +7,7 @@ from pypy.interpreter.module import Module from pypy.interpreter.gateway import interp2app, unwrap_spec from pypy.interpreter.typedef import TypeDef, generic_new_descr -from pypy.interpreter.error import OperationError, oefmt +from pypy.interpreter.error import OperationError, oefmt, wrap_oserror from pypy.interpreter.baseobjspace import W_Root, CannotHaveLock from pypy.interpreter.eval import Code from pypy.interpreter.pycode import PyCode @@ -923,7 +923,10 @@ log_pyverbose(space, 1, "import %s # from %s\n" % (space.text_w(w_modulename), pathname)) - src_stat = os.fstat(fd) + try: + src_stat = os.fstat(fd) + except OSError as e: + raise wrap_oserror(space, e, pathname) # better report this error cpathname = pathname + 'c' mtime = int(src_stat[stat.ST_MTIME]) mode = src_stat[stat.ST_MODE] diff --git a/pypy/objspace/std/test/apptest_longobject.py b/pypy/objspace/std/test/apptest_longobject.py new file mode 100644 --- /dev/null +++ b/pypy/objspace/std/test/apptest_longobject.py @@ -0,0 +1,409 @@ +from pytest import raises +import sys +import math +import operator + +def test_trunc(): + assert math.trunc(1L) == 1L + assert math.trunc(-1L) == -1L + +def test_add(): + x = 123L + assert int(x + 12443L) == 123 + 12443 + x = -20 + assert x + 2 + 3L + True == -14L + +def test_sub(): + assert int(58543L - 12332L) == 58543 - 12332 + assert int(58543L - 12332) == 58543 - 12332 + assert int(58543 - 12332L) == 58543 - 12332 + x = 237123838281233L + assert x * 12 == x * 12L + +def test_mul(): + x = 363L + assert x * 2 ** 40 == x << 40 + +def test_truediv(): + exec "from __future__ import division; a = 31415926L / 10000000L" + assert a == 3.1415926 + +def test_floordiv(): + x = 31415926L + a = x // 10000000L + assert a == 3L + +def test_int_floordiv(): + x = 3000L + a = x // 1000 + assert a == 3L + + x = 3000L + a = x // -1000 + assert a == -3L + + x = 3000L + raises(ZeroDivisionError, "x // 0") + + n = sys.maxint+1 + assert n / int(-n) == -1L + +def test_numerator_denominator(): + assert (1L).numerator == 1L + assert (1L).denominator == 1L + assert (42L).numerator == 42L + assert (42L).denominator == 1L + +def test_compare(): + Z = 0 + ZL = 0L + + assert Z == ZL + assert not (Z != ZL) + assert ZL == Z + assert not (ZL != Z) + assert Z <= ZL + assert not (Z < ZL) + assert ZL <= ZL + assert not (ZL < ZL) + + for BIG in (1L, 1L << 62, 1L << 9999): + assert not (Z == BIG) + assert Z != BIG + assert not (BIG == Z) + assert BIG != Z + assert not (ZL == BIG) + assert ZL != BIG + assert Z <= BIG + assert Z < BIG + assert not (BIG <= Z) + assert not (BIG < Z) + assert ZL <= BIG + assert ZL < BIG + assert not (BIG <= ZL) + assert not (BIG < ZL) + assert not (Z <= -BIG) + assert not (Z < -BIG) + assert -BIG <= Z + assert -BIG < Z + assert not (ZL <= -BIG) + assert not (ZL < -BIG) + assert -BIG <= ZL + assert -BIG < ZL + # + assert not (BIG < int(BIG)) + assert (BIG <= int(BIG)) + assert (BIG == int(BIG)) + assert not (BIG != int(BIG)) + assert not (BIG > int(BIG)) + assert (BIG >= int(BIG)) + # + assert (BIG < int(BIG)+1) + assert (BIG <= int(BIG)+1) + assert not (BIG == int(BIG)+1) + assert (BIG != int(BIG)+1) + assert not (BIG > int(BIG)+1) + assert not (BIG >= int(BIG)+1) + # + assert not (BIG < int(BIG)-1) + assert not (BIG <= int(BIG)-1) + assert not (BIG == int(BIG)-1) + assert (BIG != int(BIG)-1) + assert (BIG > int(BIG)-1) + assert (BIG >= int(BIG)-1) + # + assert not (int(BIG) < BIG) + assert (int(BIG) <= BIG) + assert (int(BIG) == BIG) + assert not (int(BIG) != BIG) + assert not (int(BIG) > BIG) + assert (int(BIG) >= BIG) + # + assert not (int(BIG)+1 < BIG) + assert not (int(BIG)+1 <= BIG) + assert not (int(BIG)+1 == BIG) + assert (int(BIG)+1 != BIG) + assert (int(BIG)+1 > BIG) + assert (int(BIG)+1 >= BIG) + # + assert (int(BIG)-1 < BIG) + assert (int(BIG)-1 <= BIG) + assert not (int(BIG)-1 == BIG) + assert (int(BIG)-1 != BIG) + assert not (int(BIG)-1 > BIG) + assert not (int(BIG)-1 >= BIG) + +def test_conversion(): + class long2(long): + pass + x = 1L + x = long2(x<<100) + y = int(x) + assert type(y) == long + assert type(+long2(5)) is long + assert type(long2(5) << 0) is long + assert type(long2(5) >> 0) is long + assert type(long2(5) + 0) is long + assert type(long2(5) - 0) is long + assert type(long2(5) * 1) is long + assert type(1 * long2(5)) is long + assert type(0 + long2(5)) is long + assert type(-long2(0)) is long + assert type(long2(5) // 1) is long + +def test_shift(): + assert 65l >> 2l == 16l + assert 65l >> 2 == 16l + assert 65 >> 2l == 16l + assert 65l << 2l == 65l * 4 + assert 65l << 2 == 65l * 4 + assert 65 << 2l == 65l * 4 + raises(ValueError, "1L << -1L") + raises(ValueError, "1L << -1") + raises(OverflowError, "1L << (2 ** 100)") + raises(ValueError, "1L >> -1L") + raises(ValueError, "1L >> -1") + raises(OverflowError, "1L >> (2 ** 100)") + +def test_pow(): + x = 0L + assert pow(x, 0L, 1L) == 0L + assert pow(-1L, -1L) == -1.0 + assert pow(2 ** 68, 0.5) == 2.0 ** 34 + assert pow(2 ** 68, 2) == 2 ** 136 + raises(TypeError, pow, 2l, -1, 3) + raises(ValueError, pow, 2l, 5, 0) + + # some rpow tests + assert pow(0, 0L, 1L) == 0L + assert pow(-1, -1L) == -1.0 + +def test_int_pow(): + x = 2L + assert pow(x, 2) == 4L + assert pow(x, 2, 2) == 0L + assert pow(x, 2, 3L) == 1L + +def test_getnewargs(): + assert 0L .__getnewargs__() == (0L,) + assert (-1L) .__getnewargs__() == (-1L,) + +def test_divmod(): + def check_division(x, y): + q, r = divmod(x, y) + pab, pba = x*y, y*x + assert pab == pba + assert q == x // y + assert r == x % y + assert x == q*y + r + if y > 0: + assert 0 <= r < y + else: + assert y < r <= 0 + for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: + for y in [-105566530L, -1L, 1L, 1034522340L]: + print "checking division for %s, %s" % (x, y) + check_division(x, y) + check_division(x, int(y)) + check_division(int(x), y) + # special case from python tests: + s1 = 33 + s2 = 2 + x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 + x >>= s1*16 + y = 10953035502453784575 + y >>= s2*16 + x = 0x3FE0003FFFFC0001FFFL + y = 0x9800FFC1L + check_division(x, y) + raises(ZeroDivisionError, "x // 0L") + raises(ZeroDivisionError, "x % 0L") + raises(ZeroDivisionError, divmod, x, 0L) + raises(ZeroDivisionError, "x // 0") + raises(ZeroDivisionError, "x % 0") + raises(ZeroDivisionError, divmod, x, 0) + +def test_int_divmod(): + q, r = divmod(100L, 11) + assert q == 9L + assert r == 1L + +def test_format(): + assert repr(12345678901234567890) == '12345678901234567890L' + assert str(12345678901234567890) == '12345678901234567890' + assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' + assert oct(01234567012345670L) == '01234567012345670L' + +def test_bits(): + x = 0xAAAAAAAAL + assert x | 0x55555555L == 0xFFFFFFFFL + assert x & 0x55555555L == 0x00000000L + assert x ^ 0x55555555L == 0xFFFFFFFFL + assert -x | 0x55555555L == -0xAAAAAAA9L + assert x | 0x555555555L == 0x5FFFFFFFFL + assert x & 0x555555555L == 0x000000000L + assert x ^ 0x555555555L == 0x5FFFFFFFFL + +def test_hash(): + # ints have the same hash as equal longs + for i in range(-4, 14): + assert hash(i) == hash(long(i)) == long(i).__hash__() + # might check too much -- it's ok to change the hashing algorithm + assert hash(123456789L) == 123456789 + assert hash(1234567890123456789L) in ( + -1895067127, # with 32-bit platforms + 1234567890123456789) # with 64-bit platforms + +def test_math_log(): + raises(ValueError, math.log, 0L) + raises(ValueError, math.log, -1L) + raises(ValueError, math.log, -2L) + raises(ValueError, math.log, -(1L << 10000)) + #raises(ValueError, math.log, 0) + raises(ValueError, math.log, -1) + raises(ValueError, math.log, -2) + +def test_long(): + n = -sys.maxint-1 + assert long(n) == n + assert str(long(n)) == str(n) + a = buffer('123') + assert long(a) == 123L + +def test_huge_longs(): + x = 1L + huge = x << 40000L + raises(OverflowError, float, huge) + raises(OverflowError, operator.truediv, huge, 3) + raises(OverflowError, operator.truediv, huge, 3L) + +def test_just_trunc(): + class myint(object): + def __trunc__(self): + return 42 + assert long(myint()) == 42 + +def test_override___long__(): + class mylong(long): + def __long__(self): + return 42L + assert long(mylong(21)) == 42L + class myotherlong(long): + pass + assert long(myotherlong(21)) == 21L + +def test___long__(): + class A(object): + def __long__(self): + return 42 + assert long(A()) == 42L + class B(object): + def __int__(self): + return 42 + raises(TypeError, long, B()) + + class LongSubclass(long): + pass + class ReturnsLongSubclass(object): + def __long__(self): + return LongSubclass(42L) + n = long(ReturnsLongSubclass()) + assert n == 42 + assert type(n) is LongSubclass + +def test_trunc_returns(): + # but!: (blame CPython 2.7) + class Integral(object): + def __int__(self): + return 42 + class TruncReturnsNonLong(object): + def __trunc__(self): + return Integral() + n = long(TruncReturnsNonLong()) + assert type(n) is long + assert n == 42 + + class LongSubclass(long): + pass + class TruncReturnsNonInt(object): + def __trunc__(self): + return LongSubclass(42) + n = long(TruncReturnsNonInt()) + assert n == 42 + assert type(n) is LongSubclass + +def test_long_before_string(): + class A(str): + def __long__(self): + return 42 + assert long(A('abc')) == 42 + +def test_long_errors(): + raises(TypeError, long, 12, 12) + raises(ValueError, long, 'xxxxxx?', 12) + +def test_conjugate(): + assert (7L).conjugate() == 7L + assert (-7L).conjugate() == -7L + + class L(long): + pass + + assert type(L(7).conjugate()) is long + + class L(long): + def __pos__(self): + return 43 + assert L(7).conjugate() == 7L + +def test_bit_length(): + assert 8L.bit_length() == 4 + assert (-1<<40).bit_length() == 41 + assert ((2**31)-1).bit_length() == 31 + +def test_negative_zero(): + x = eval("-0L") + assert x == 0L + +def test_mix_int_and_long(): + class IntLongMixClass(object): + def __int__(self): + return 42L + + def __long__(self): + return 64 + + mixIntAndLong = IntLongMixClass() + as_long = long(mixIntAndLong) + assert type(as_long) is long + assert as_long == 64 + +def test_long_real(): + class A(long): pass + b = A(5).real + assert type(b) is long + +def test__int__(): + class A(long): + def __int__(self): + return 42 + + assert int(long(3)) == long(3) + assert int(A(13)) == 42 + +def test_long_error_msg(): + e = raises(TypeError, long, []) + assert str(e.value) == ( + "long() argument must be a string or a number, not 'list'") + +def test_coerce(): + assert 3L.__coerce__(4L) == (3L, 4L) + assert 3L.__coerce__(4) == (3, 4) + assert 3L.__coerce__(object()) == NotImplemented + +def test_linear_long_base_16(): + # never finishes if long(_, 16) is not linear-time + size = 100000 + n = "a" * size + expected = (2 << (size * 4)) // 3 + assert long(n, 16) == expected diff --git a/pypy/objspace/std/test/test_longobject.py b/pypy/objspace/std/test/test_longobject.py --- a/pypy/objspace/std/test/test_longobject.py +++ b/pypy/objspace/std/test/test_longobject.py @@ -1,4 +1,3 @@ -import py from pypy.objspace.std import longobject as lobj from rpython.rlib.rbigint import rbigint @@ -36,417 +35,3 @@ x &= r.MASK w_obj = space.newlong_from_rarith_int(r(x)) assert space.bigint_w(w_obj).eq(rbigint.fromlong(x)) - - -class AppTestLong: - def test_trunc(self): - import math - assert math.trunc(1L) == 1L - assert math.trunc(-1L) == -1L - - def test_add(self): - x = 123L - assert int(x + 12443L) == 123 + 12443 - x = -20 - assert x + 2 + 3L + True == -14L - - def test_sub(self): - assert int(58543L - 12332L) == 58543 - 12332 - assert int(58543L - 12332) == 58543 - 12332 - assert int(58543 - 12332L) == 58543 - 12332 - x = 237123838281233L - assert x * 12 == x * 12L - - def test_mul(self): - x = 363L - assert x * 2 ** 40 == x << 40 - - def test_truediv(self): - exec "from __future__ import division; a = 31415926L / 10000000L" - assert a == 3.1415926 - - def test_floordiv(self): - x = 31415926L - a = x // 10000000L - assert a == 3L - - def test_int_floordiv(self): - import sys - - x = 3000L - a = x // 1000 - assert a == 3L - - x = 3000L - a = x // -1000 - assert a == -3L - - x = 3000L - raises(ZeroDivisionError, "x // 0") - - n = sys.maxint+1 - assert n / int(-n) == -1L - - def test_numerator_denominator(self): - assert (1L).numerator == 1L - assert (1L).denominator == 1L - assert (42L).numerator == 42L - assert (42L).denominator == 1L - - def test_compare(self): - Z = 0 - ZL = 0L - - assert Z == ZL - assert not (Z != ZL) - assert ZL == Z - assert not (ZL != Z) - assert Z <= ZL - assert not (Z < ZL) - assert ZL <= ZL - assert not (ZL < ZL) - - for BIG in (1L, 1L << 62, 1L << 9999): - assert not (Z == BIG) - assert Z != BIG - assert not (BIG == Z) - assert BIG != Z - assert not (ZL == BIG) - assert ZL != BIG - assert Z <= BIG - assert Z < BIG - assert not (BIG <= Z) - assert not (BIG < Z) - assert ZL <= BIG - assert ZL < BIG - assert not (BIG <= ZL) - assert not (BIG < ZL) - assert not (Z <= -BIG) - assert not (Z < -BIG) - assert -BIG <= Z - assert -BIG < Z - assert not (ZL <= -BIG) - assert not (ZL < -BIG) - assert -BIG <= ZL - assert -BIG < ZL - # - assert not (BIG < int(BIG)) - assert (BIG <= int(BIG)) - assert (BIG == int(BIG)) - assert not (BIG != int(BIG)) - assert not (BIG > int(BIG)) - assert (BIG >= int(BIG)) - # - assert (BIG < int(BIG)+1) - assert (BIG <= int(BIG)+1) - assert not (BIG == int(BIG)+1) - assert (BIG != int(BIG)+1) - assert not (BIG > int(BIG)+1) - assert not (BIG >= int(BIG)+1) - # - assert not (BIG < int(BIG)-1) - assert not (BIG <= int(BIG)-1) - assert not (BIG == int(BIG)-1) - assert (BIG != int(BIG)-1) - assert (BIG > int(BIG)-1) - assert (BIG >= int(BIG)-1) - # - assert not (int(BIG) < BIG) - assert (int(BIG) <= BIG) - assert (int(BIG) == BIG) - assert not (int(BIG) != BIG) - assert not (int(BIG) > BIG) - assert (int(BIG) >= BIG) - # - assert not (int(BIG)+1 < BIG) - assert not (int(BIG)+1 <= BIG) - assert not (int(BIG)+1 == BIG) - assert (int(BIG)+1 != BIG) - assert (int(BIG)+1 > BIG) - assert (int(BIG)+1 >= BIG) - # - assert (int(BIG)-1 < BIG) - assert (int(BIG)-1 <= BIG) - assert not (int(BIG)-1 == BIG) - assert (int(BIG)-1 != BIG) - assert not (int(BIG)-1 > BIG) - assert not (int(BIG)-1 >= BIG) - - def test_conversion(self): - class long2(long): - pass - x = 1L - x = long2(x<<100) - y = int(x) - assert type(y) == long - assert type(+long2(5)) is long - assert type(long2(5) << 0) is long - assert type(long2(5) >> 0) is long - assert type(long2(5) + 0) is long - assert type(long2(5) - 0) is long - assert type(long2(5) * 1) is long - assert type(1 * long2(5)) is long - assert type(0 + long2(5)) is long - assert type(-long2(0)) is long - assert type(long2(5) // 1) is long - - def test_shift(self): - assert 65l >> 2l == 16l - assert 65l >> 2 == 16l - assert 65 >> 2l == 16l - assert 65l << 2l == 65l * 4 - assert 65l << 2 == 65l * 4 - assert 65 << 2l == 65l * 4 - raises(ValueError, "1L << -1L") - raises(ValueError, "1L << -1") - raises(OverflowError, "1L << (2 ** 100)") - raises(ValueError, "1L >> -1L") - raises(ValueError, "1L >> -1") - raises(OverflowError, "1L >> (2 ** 100)") - - def test_pow(self): - x = 0L - assert pow(x, 0L, 1L) == 0L - assert pow(-1L, -1L) == -1.0 - assert pow(2 ** 68, 0.5) == 2.0 ** 34 - assert pow(2 ** 68, 2) == 2 ** 136 - raises(TypeError, pow, 2l, -1, 3) - raises(ValueError, pow, 2l, 5, 0) - - # some rpow tests - assert pow(0, 0L, 1L) == 0L - assert pow(-1, -1L) == -1.0 - - def test_int_pow(self): - x = 2L - assert pow(x, 2) == 4L - assert pow(x, 2, 2) == 0L - assert pow(x, 2, 3L) == 1L - - def test_getnewargs(self): - assert 0L .__getnewargs__() == (0L,) - assert (-1L) .__getnewargs__() == (-1L,) - - def test_divmod(self): - def check_division(x, y): - q, r = divmod(x, y) - pab, pba = x*y, y*x - assert pab == pba - assert q == x // y - assert r == x % y - assert x == q*y + r - if y > 0: - assert 0 <= r < y - else: - assert y < r <= 0 - for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: - for y in [-105566530L, -1L, 1L, 1034522340L]: - print "checking division for %s, %s" % (x, y) - check_division(x, y) - check_division(x, int(y)) - check_division(int(x), y) - # special case from python tests: - s1 = 33 - s2 = 2 - x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 - x >>= s1*16 - y = 10953035502453784575 - y >>= s2*16 - x = 0x3FE0003FFFFC0001FFFL - y = 0x9800FFC1L - check_division(x, y) - raises(ZeroDivisionError, "x // 0L") - raises(ZeroDivisionError, "x % 0L") - raises(ZeroDivisionError, divmod, x, 0L) - raises(ZeroDivisionError, "x // 0") - raises(ZeroDivisionError, "x % 0") - raises(ZeroDivisionError, divmod, x, 0) - - def test_int_divmod(self): - q, r = divmod(100L, 11) - assert q == 9L - assert r == 1L - - def test_format(self): - assert repr(12345678901234567890) == '12345678901234567890L' - assert str(12345678901234567890) == '12345678901234567890' - assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' - assert oct(01234567012345670L) == '01234567012345670L' - - def test_bits(self): - x = 0xAAAAAAAAL - assert x | 0x55555555L == 0xFFFFFFFFL - assert x & 0x55555555L == 0x00000000L - assert x ^ 0x55555555L == 0xFFFFFFFFL - assert -x | 0x55555555L == -0xAAAAAAA9L - assert x | 0x555555555L == 0x5FFFFFFFFL - assert x & 0x555555555L == 0x000000000L - assert x ^ 0x555555555L == 0x5FFFFFFFFL - - def test_hash(self): - # ints have the same hash as equal longs - for i in range(-4, 14): - assert hash(i) == hash(long(i)) == long(i).__hash__() - # might check too much -- it's ok to change the hashing algorithm - assert hash(123456789L) == 123456789 - assert hash(1234567890123456789L) in ( - -1895067127, # with 32-bit platforms - 1234567890123456789) # with 64-bit platforms - - def test_math_log(self): - import math - raises(ValueError, math.log, 0L) - raises(ValueError, math.log, -1L) - raises(ValueError, math.log, -2L) - raises(ValueError, math.log, -(1L << 10000)) - #raises(ValueError, math.log, 0) - raises(ValueError, math.log, -1) - raises(ValueError, math.log, -2) - - def test_long(self): - import sys - n = -sys.maxint-1 - assert long(n) == n - assert str(long(n)) == str(n) - a = buffer('123') - assert long(a) == 123L - - def test_huge_longs(self): - import operator - x = 1L - huge = x << 40000L - raises(OverflowError, float, huge) - raises(OverflowError, operator.truediv, huge, 3) - raises(OverflowError, operator.truediv, huge, 3L) - - def test_just_trunc(self): - class myint(object): - def __trunc__(self): - return 42 - assert long(myint()) == 42 - - def test_override___long__(self): - class mylong(long): - def __long__(self): - return 42L - assert long(mylong(21)) == 42L - class myotherlong(long): - pass - assert long(myotherlong(21)) == 21L - - def test___long__(self): - class A(object): - def __long__(self): - return 42 - assert long(A()) == 42L - class B(object): - def __int__(self): - return 42 - raises(TypeError, long, B()) - - class LongSubclass(long): - pass - class ReturnsLongSubclass(object): - def __long__(self): - return LongSubclass(42L) - n = long(ReturnsLongSubclass()) - assert n == 42 - assert type(n) is LongSubclass - - def test_trunc_returns(self): - # but!: (blame CPython 2.7) - class Integral(object): - def __int__(self): - return 42 - class TruncReturnsNonLong(object): - def __trunc__(self): - return Integral() - n = long(TruncReturnsNonLong()) - assert type(n) is long - assert n == 42 - - class LongSubclass(long): - pass - class TruncReturnsNonInt(object): - def __trunc__(self): - return LongSubclass(42) - n = long(TruncReturnsNonInt()) - assert n == 42 - assert type(n) is LongSubclass - - def test_long_before_string(self): - class A(str): - def __long__(self): - return 42 - assert long(A('abc')) == 42 - - def test_long_errors(self): - raises(TypeError, long, 12, 12) - raises(ValueError, long, 'xxxxxx?', 12) - - def test_conjugate(self): - assert (7L).conjugate() == 7L - assert (-7L).conjugate() == -7L - - class L(long): - pass - - assert type(L(7).conjugate()) is long - - class L(long): - def __pos__(self): - return 43 - assert L(7).conjugate() == 7L - - def test_bit_length(self): - assert 8L.bit_length() == 4 - assert (-1<<40).bit_length() == 41 - assert ((2**31)-1).bit_length() == 31 - - def test_negative_zero(self): - x = eval("-0L") - assert x == 0L - - def test_mix_int_and_long(self): - class IntLongMixClass(object): - def __int__(self): - return 42L - - def __long__(self): - return 64 - - mixIntAndLong = IntLongMixClass() - as_long = long(mixIntAndLong) - assert type(as_long) is long - assert as_long == 64 - - def test_long_real(self): - class A(long): pass - b = A(5).real - assert type(b) is long - - def test__int__(self): - class A(long): - def __int__(self): - return 42 - - assert int(long(3)) == long(3) - assert int(A(13)) == 42 - - def test_long_error_msg(self): - e = raises(TypeError, long, []) - assert str(e.value) == ( - "long() argument must be a string or a number, not 'list'") - - def test_coerce(self): - assert 3L.__coerce__(4L) == (3L, 4L) - assert 3L.__coerce__(4) == (3, 4) - assert 3L.__coerce__(object()) == NotImplemented - - def test_linear_long_base_16(self): - # never finishes if long(_, 16) is not linear-time - size = 100000 - n = "a" * size - expected = (2 << (size * 4)) // 3 - assert long(n, 16) == expected - From pypy.commits at gmail.com Fri Aug 9 09:59:50 2019 From: pypy.commits at gmail.com (mattip) Date: Fri, 09 Aug 2019 06:59:50 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: allow ssl tests to run without segfault, hang Message-ID: <5d4d7c56.1c69fb81.ccc56.d0e2@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97121:a405e4fc3800 Date: 2019-08-09 16:58 +0300 http://bitbucket.org/pypy/pypy/changeset/a405e4fc3800/ Log: allow ssl tests to run without segfault, hang diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py --- a/lib-python/3/test/test_ssl.py +++ b/lib-python/3/test/test_ssl.py @@ -3013,12 +3013,15 @@ self.assertEqual(s.read(-1, buffer), len(data)) self.assertEqual(buffer, data) # sendall accepts bytes-like objects - if ctypes is not None: - ubyte = ctypes.c_ubyte * len(data) - byteslike = ubyte.from_buffer_copy(data) - s.sendall(byteslike) - self.assertEqual(s.read(), data) - + try: + if ctypes is not None: + ubyte = ctypes.c_ubyte * len(data) + byteslike = ubyte.from_buffer_copy(data) + s.sendall(byteslike) + self.assertEqual(s.read(), data) + except: + s.close() + raise # Make sure sendmsg et al are disallowed to avoid # inadvertent disclosure of data and/or corruption # of the encrypted data stream diff --git a/lib_pypy/_cffi_ssl/_stdssl/certificate.py b/lib_pypy/_cffi_ssl/_stdssl/certificate.py --- a/lib_pypy/_cffi_ssl/_stdssl/certificate.py +++ b/lib_pypy/_cffi_ssl/_stdssl/certificate.py @@ -265,6 +265,8 @@ count = lib.sk_DIST_POINT_num(dps) for i in range(count): dp = lib.sk_DIST_POINT_value(dps, i); + if not dp.distpoint: + return None gns = dp.distpoint.name.fullname; jcount = lib.sk_GENERAL_NAME_num(gns) From pypy.commits at gmail.com Fri Aug 9 11:17:50 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 08:17:50 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix bad merge in idlelib Message-ID: <5d4d8e9e.1c69fb81.8c7d7.89ee@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97122:2ab7f3bbe84f Date: 2019-08-09 16:17 +0100 http://bitbucket.org/pypy/pypy/changeset/2ab7f3bbe84f/ Log: fix bad merge in idlelib diff too long, truncating to 2000 out of 2142 lines diff --git a/lib-python/3/idlelib/filelist.py b/lib-python/3/idlelib/filelist.py --- a/lib-python/3/idlelib/filelist.py +++ b/lib-python/3/idlelib/filelist.py @@ -120,7 +120,7 @@ fixwordbreaks(root) root.withdraw() flist = FileList(root) - flist.new() + flist.new() if flist.inversedict: root.mainloop() diff --git a/lib-python/3/idlelib/help.html b/lib-python/3/idlelib/help.html --- a/lib-python/3/idlelib/help.html +++ b/lib-python/3/idlelib/help.html @@ -66,14 +66,14 @@ previous | -
  • +
  • Python »
  • -
  • +
  • 3.8.0a0 Documentation » -
  • + @@ -862,14 +862,14 @@ previous | -
  • +
  • Python »
  • -
  • +
  • 3.8.0a0 Documentation » -
  • + @@ -895,8 +895,8 @@
    The Python Software Foundation is a non-profit corporation. - Please donate. -
    +Please donate. +

    Last updated on Nov 12, 2018. diff --git a/lib-python/3/idlelib/help.txt b/lib-python/3/idlelib/help.txt deleted file mode 100644 --- a/lib-python/3/idlelib/help.txt +++ /dev/null @@ -1,372 +0,0 @@ -This file, idlelib/help.txt is out-of-date and no longer used by Idle. -It is deprecated and will be removed in the future, possibly in 3.6 ----------------------------------------------------------------------- - -[See the end of this file for ** TIPS ** on using IDLE !!] - -IDLE is the Python IDE built with the tkinter GUI toolkit. - -IDLE has the following features: --coded in 100% pure Python, using the tkinter GUI toolkit --cross-platform: works on Windows, Unix, and OS X --multi-window text editor with multiple undo, Python colorizing, smart indent, -call tips, and many other features --Python shell window (a.k.a interactive interpreter) --debugger (not complete, but you can set breakpoints, view and step) - -Menus: - -IDLE has two window types the Shell window and the Editor window. It is -possible to have multiple editor windows simultaneously. IDLE's -menus dynamically change based on which window is currently selected. Each menu -documented below indicates which window type it is associated with. - -File Menu (Shell and Editor): - - New File -- Create a new file editing window - Open... -- Open an existing file - Open Module... -- Open an existing module (searches sys.path) - Recent Files... -- Open a list of recent files - Class Browser -- Show classes and methods in current file - Path Browser -- Show sys.path directories, modules, classes, - and methods - --- - Save -- Save current window to the associated file (unsaved - windows have a * before and after the window title) - - Save As... -- Save current window to new file, which becomes - the associated file - Save Copy As... -- Save current window to different file - without changing the associated file - --- - Print Window -- Print the current window - --- - Close -- Close current window (asks to save if unsaved) - Exit -- Close all windows, quit (asks to save if unsaved) - -Edit Menu (Shell and Editor): - - Undo -- Undo last change to current window - (a maximum of 1000 changes may be undone) - Redo -- Redo last undone change to current window - --- - Cut -- Copy a selection into system-wide clipboard, - then delete the selection - Copy -- Copy selection into system-wide clipboard - Paste -- Insert system-wide clipboard into window - Select All -- Select the entire contents of the edit buffer - --- - Find... -- Open a search dialog box with many options - Find Again -- Repeat last search - Find Selection -- Search for the string in the selection - Find in Files... -- Open a search dialog box for searching files - Replace... -- Open a search-and-replace dialog box - Go to Line -- Ask for a line number and show that line - Expand Word -- Expand the word you have typed to match another - word in the same buffer; repeat to get a - different expansion - Show Calltip -- After an unclosed parenthesis for a function, open - a small window with function parameter hints - Show Parens -- Highlight the surrounding parenthesis - Show Completions -- Open a scroll window allowing selection keywords - and attributes. (see '*TIPS*', below) - -Format Menu (Editor window only): - - Indent Region -- Shift selected lines right by the indent width - (default 4 spaces) - Dedent Region -- Shift selected lines left by the indent width - (default 4 spaces) - Comment Out Region -- Insert ## in front of selected lines - Uncomment Region -- Remove leading # or ## from selected lines - Tabify Region -- Turns *leading* stretches of spaces into tabs. - (Note: We recommend using 4 space blocks to indent Python code.) - Untabify Region -- Turn *all* tabs into the corrent number of spaces - Toggle tabs -- Open a dialog to switch between indenting with - spaces and tabs. - New Indent Width... -- Open a dialog to change indent width. The - accepted default by the Python community is 4 - spaces. - Format Paragraph -- Reformat the current blank-line-separated - paragraph. All lines in the paragraph will be - formatted to less than 80 columns. - --- - Strip trailing whitespace -- Removed any space characters after the end - of the last non-space character - -Run Menu (Editor window only): - - Python Shell -- Open or wake up the Python shell window - --- - Check Module -- Check the syntax of the module currently open in the - Editor window. If the module has not been saved IDLE - will prompt the user to save the code. - Run Module -- Restart the shell to clean the environment, then - execute the currently open module. If the module has - not been saved IDLE will prompt the user to save the - code. - -Shell Menu (Shell window only): - - View Last Restart -- Scroll the shell window to the last Shell restart - Restart Shell -- Restart the shell to clean the environment - -Debug Menu (Shell window only): - - Go to File/Line -- Look around the insert point for a filename - and line number, open the file, and show the line. - Useful to view the source lines referenced in an - exception traceback. Available in the context - menu of the Shell window. - Debugger (toggle) -- This feature is not complete and considered - experimental. Run commands in the shell under the - debugger. - Stack Viewer -- Show the stack traceback of the last exception - Auto-open Stack Viewer (toggle) -- Toggle automatically opening the - stack viewer on unhandled - exception - -Options Menu (Shell and Editor): - - Configure IDLE -- Open a configuration dialog. Fonts, indentation, - keybindings, and color themes may be altered. - Startup Preferences may be set, and additional Help - sources can be specified. On OS X, open the - configuration dialog by selecting Preferences - in the application menu. - - --- - Code Context (toggle) -- Open a pane at the top of the edit window - which shows the block context of the section - of code which is scrolling off the top or the - window. This is not present in the Shell - window only the Editor window. - -Window Menu (Shell and Editor): - - Zoom Height -- Toggles the window between normal size (40x80 initial - setting) and maximum height. The initial size is in the Configure - IDLE dialog under the general tab. - --- - The rest of this menu lists the names of all open windows; - select one to bring it to the foreground (deiconifying it if - necessary). - -Help Menu: - - About IDLE -- Version, copyright, license, credits - --- - IDLE Help -- Display this file which is a help file for IDLE - detailing the menu options, basic editing and navigation, - and other tips. - Python Docs -- Access local Python documentation, if - installed. Or will start a web browser and open - docs.python.org showing the latest Python documentation. - --- - Additional help sources may be added here with the Configure IDLE - dialog under the General tab. - -Editor context menu (Right-click / Control-click on OS X in Edit window): - - Cut -- Copy a selection into system-wide clipboard, - then delete the selection - Copy -- Copy selection into system-wide clipboard - Paste -- Insert system-wide clipboard into window - Set Breakpoint -- Sets a breakpoint. Breakpoints are only enabled - when the debugger is open. - Clear Breakpoint -- Clears the breakpoint on that line - -Shell context menu (Right-click / Control-click on OS X in Shell window): - - Cut -- Copy a selection into system-wide clipboard, - then delete the selection - Copy -- Copy selection into system-wide clipboard - Paste -- Insert system-wide clipboard into window - --- - Go to file/line -- Same as in Debug menu - - -** TIPS ** -========== - -Additional Help Sources: - - Windows users can Google on zopeshelf.chm to access Zope help files in - the Windows help format. The Additional Help Sources feature of the - configuration GUI supports .chm, along with any other filetypes - supported by your browser. Supply a Menu Item title, and enter the - location in the Help File Path slot of the New Help Source dialog. Use - http:// and/or www. to identify external URLs, or download the file and - browse for its path on your machine using the Browse button. - - All users can access the extensive sources of help, including - tutorials, available at docs.python.org. Selected URLs can be added - or removed from the Help menu at any time using Configure IDLE. - -Basic editing and navigation: - - Backspace deletes char to the left; DEL deletes char to the right. - Control-backspace deletes word left, Control-DEL deletes word right. - Arrow keys and Page Up/Down move around. - Control-left/right Arrow moves by words in a strange but useful way. - Home/End go to begin/end of line. - Control-Home/End go to begin/end of file. - Some useful Emacs bindings are inherited from Tcl/Tk: - Control-a beginning of line - Control-e end of line - Control-k kill line (but doesn't put it in clipboard) - Control-l center window around the insertion point - Standard keybindings (like Control-c to copy and Control-v to - paste) may work. Keybindings are selected in the Configure IDLE - dialog. - -Automatic indentation: - - After a block-opening statement, the next line is indented by 4 spaces - (in the Python Shell window by one tab). After certain keywords - (break, return etc.) the next line is dedented. In leading - indentation, Backspace deletes up to 4 spaces if they are there. Tab - inserts spaces (in the Python Shell window one tab), number depends on - Indent Width. Currently tabs are restricted to four spaces due - to Tcl/Tk limitations. - - See also the indent/dedent region commands in the edit menu. - -Completions: - - Completions are supplied for functions, classes, and attributes of - classes, both built-in and user-defined. Completions are also provided - for filenames. - - The AutoCompleteWindow (ACW) will open after a predefined delay - (default is two seconds) after a '.' or (in a string) an os.sep is - typed. If after one of those characters (plus zero or more other - characters) a tab is typed the ACW will open immediately if a possible - continuation is found. - - If there is only one possible completion for the characters entered, a - tab will supply that completion without opening the ACW. - - 'Show Completions' will force open a completions window, by default the - Control-space keys will open a completions window. In an empty - string, this will contain the files in the current directory. On a - blank line, it will contain the built-in and user-defined functions and - classes in the current name spaces, plus any modules imported. If some - characters have been entered, the ACW will attempt to be more specific. - - If string of characters is typed, the ACW selection will jump to the - entry most closely matching those characters. Entering a tab will cause - the longest non-ambiguous match to be entered in the Edit window or - Shell. Two tabs in a row will supply the current ACW selection, as - will return or a double click. Cursor keys, Page Up/Down, mouse - selection, and the scroll wheel all operate on the ACW. - - "Hidden" attributes can be accessed by typing the beginning of hidden - name after a '.', e.g. '_'. This allows access to modules with - '__all__' set, or to class-private attributes. - - Completions and the 'Expand Word' facility can save a lot of typing! - - Completions are currently limited to those in the namespaces. Names in - an Editor window which are not via __main__ or sys.modules will not be - found. Run the module once with your imports to correct this - situation. Note that IDLE itself places quite a few modules in - sys.modules, so much can be found by default, e.g. the re module. - - If you don't like the ACW popping up unbidden, simply make the delay - longer or disable the extension. Or another option is the delay could - be set to zero. Another alternative to preventing ACW popups is to - disable the call tips extension. - -Python Shell window: - - Control-c interrupts executing command. - Control-d sends end-of-file; closes window if typed at >>> prompt. - Alt-/ expand word is also useful to reduce typing. - - Command history: - - Alt-p retrieves previous command matching what you have typed. On OS X - use Control-p. - Alt-n retrieves next. On OS X use Control-n. - Return while cursor is on a previous command retrieves that command. - - Syntax colors: - - The coloring is applied in a background "thread", so you may - occasionally see uncolorized text. To change the color - scheme, use the Configure IDLE / Highlighting dialog. - - Python default syntax colors: - - Keywords orange - Builtins royal purple - Strings green - Comments red - Definitions blue - - Shell default colors: - - Console output brown - stdout blue - stderr red - stdin black - -Other preferences: - - The font preferences, highlighting, keys, and general preferences can - be changed via the Configure IDLE menu option. Be sure to note that - keys can be user defined, IDLE ships with four built in key sets. In - addition a user can create a custom key set in the Configure IDLE - dialog under the keys tab. - -Command line usage: - - Enter idle -h at the command prompt to get a usage message. - - idle.py [-c command] [-d] [-e] [-s] [-t title] [arg] ... - - -c command run this command - -d enable debugger - -e edit mode; arguments are files to be edited - -s run $IDLESTARTUP or $PYTHONSTARTUP first - -t title set title of shell window - - If there are arguments: - 1. If -e is used, arguments are files opened for editing and sys.argv - reflects the arguments passed to IDLE itself. - 2. Otherwise, if -c is used, all arguments are placed in - sys.argv[1:...], with sys.argv[0] set to -c. - 3. Otherwise, if neither -e nor -c is used, the first argument is a - script which is executed with the remaining arguments in - sys.argv[1:...] and sys.argv[0] set to the script name. If the - script name is -, no script is executed but an interactive Python - session is started; the arguments are still available in sys.argv. - -Running without a subprocess: (DEPRECATED in Python 3.4 see Issue 16123) - - If IDLE is started with the -n command line switch it will run in a - single process and will not create the subprocess which runs the RPC - Python execution server. This can be useful if Python cannot create - the subprocess or the RPC socket interface on your platform. However, - in this mode user code is not isolated from IDLE itself. Also, the - environment is not restarted when Run/Run Module (F5) is selected. If - your code has been modified, you must reload() the affected modules and - re-import any specific items (e.g. from foo import baz) if the changes - are to take effect. For these reasons, it is preferable to run IDLE - with the default subprocess if at all possible. - -Extensions: - - IDLE contains an extension facility. See the beginning of - config-extensions.def in the idlelib directory for further information. - The default extensions are currently: - - FormatParagraph - AutoExpand - ZoomHeight - ScriptBinding - CallTips - ParenMatch - AutoComplete - CodeContext diff --git a/lib-python/3/idlelib/idle_test/test_config_help.py b/lib-python/3/idlelib/idle_test/test_config_help.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_config_help.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Unittests for idlelib.configHelpSourceEdit""" -import unittest -from idlelib.idle_test.mock_tk import Var, Mbox, Entry -from idlelib import configHelpSourceEdit as help_dialog_module - -help_dialog = help_dialog_module.GetHelpSourceDialog - - -class Dummy_help_dialog: - # Mock for testing the following methods of help_dialog - menu_ok = help_dialog.menu_ok - path_ok = help_dialog.path_ok - ok = help_dialog.ok - cancel = help_dialog.cancel - # Attributes, constant or variable, needed for tests - menu = Var() - entryMenu = Entry() - path = Var() - entryPath = Entry() - result = None - destroyed = False - - def destroy(self): - self.destroyed = True - - -# menu_ok and path_ok call Mbox.showerror if menu and path are not ok. -orig_mbox = help_dialog_module.tkMessageBox -showerror = Mbox.showerror - - -class ConfigHelpTest(unittest.TestCase): - dialog = Dummy_help_dialog() - - @classmethod - def setUpClass(cls): - help_dialog_module.tkMessageBox = Mbox - - @classmethod - def tearDownClass(cls): - help_dialog_module.tkMessageBox = orig_mbox - - def test_blank_menu(self): - self.dialog.menu.set('') - self.assertFalse(self.dialog.menu_ok()) - self.assertEqual(showerror.title, 'Menu Item Error') - self.assertIn('No', showerror.message) - - def test_long_menu(self): - self.dialog.menu.set('hello' * 10) - self.assertFalse(self.dialog.menu_ok()) - self.assertEqual(showerror.title, 'Menu Item Error') - self.assertIn('long', showerror.message) - - def test_good_menu(self): - self.dialog.menu.set('help') - showerror.title = 'No Error' # should not be called - self.assertTrue(self.dialog.menu_ok()) - self.assertEqual(showerror.title, 'No Error') - - def test_blank_path(self): - self.dialog.path.set('') - self.assertFalse(self.dialog.path_ok()) - self.assertEqual(showerror.title, 'File Path Error') - self.assertIn('No', showerror.message) - - def test_invalid_file_path(self): - self.dialog.path.set('foobar' * 100) - self.assertFalse(self.dialog.path_ok()) - self.assertEqual(showerror.title, 'File Path Error') - self.assertIn('not exist', showerror.message) - - def test_invalid_url_path(self): - self.dialog.path.set('ww.foobar.com') - self.assertFalse(self.dialog.path_ok()) - self.assertEqual(showerror.title, 'File Path Error') - self.assertIn('not exist', showerror.message) - - self.dialog.path.set('htt.foobar.com') - self.assertFalse(self.dialog.path_ok()) - self.assertEqual(showerror.title, 'File Path Error') - self.assertIn('not exist', showerror.message) - - def test_good_path(self): - self.dialog.path.set('https://docs.python.org') - showerror.title = 'No Error' # should not be called - self.assertTrue(self.dialog.path_ok()) - self.assertEqual(showerror.title, 'No Error') - - def test_ok(self): - self.dialog.destroyed = False - self.dialog.menu.set('help') - self.dialog.path.set('https://docs.python.org') - self.dialog.ok() - self.assertEqual(self.dialog.result, ('help', - 'https://docs.python.org')) - self.assertTrue(self.dialog.destroyed) - - def test_cancel(self): - self.dialog.destroyed = False - self.dialog.cancel() - self.assertEqual(self.dialog.result, None) - self.assertTrue(self.dialog.destroyed) - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=False) diff --git a/lib-python/3/idlelib/idle_test/test_config_name.py b/lib-python/3/idlelib/idle_test/test_config_name.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_config_name.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Unit tests for idlelib.configSectionNameDialog""" -import unittest -from idlelib.idle_test.mock_tk import Var, Mbox -from idlelib import configSectionNameDialog as name_dialog_module - -name_dialog = name_dialog_module.GetCfgSectionNameDialog - -class Dummy_name_dialog: - # Mock for testing the following methods of name_dialog - name_ok = name_dialog.name_ok - Ok = name_dialog.Ok - Cancel = name_dialog.Cancel - # Attributes, constant or variable, needed for tests - used_names = ['used'] - name = Var() - result = None - destroyed = False - def destroy(self): - self.destroyed = True - -# name_ok calls Mbox.showerror if name is not ok -orig_mbox = name_dialog_module.tkMessageBox -showerror = Mbox.showerror - -class ConfigNameTest(unittest.TestCase): - dialog = Dummy_name_dialog() - - @classmethod - def setUpClass(cls): - name_dialog_module.tkMessageBox = Mbox - - @classmethod - def tearDownClass(cls): - name_dialog_module.tkMessageBox = orig_mbox - - def test_blank_name(self): - self.dialog.name.set(' ') - self.assertEqual(self.dialog.name_ok(), '') - self.assertEqual(showerror.title, 'Name Error') - self.assertIn('No', showerror.message) - - def test_used_name(self): - self.dialog.name.set('used') - self.assertEqual(self.dialog.name_ok(), '') - self.assertEqual(showerror.title, 'Name Error') - self.assertIn('use', showerror.message) - - def test_long_name(self): - self.dialog.name.set('good'*8) - self.assertEqual(self.dialog.name_ok(), '') - self.assertEqual(showerror.title, 'Name Error') - self.assertIn('too long', showerror.message) - - def test_good_name(self): - self.dialog.name.set(' good ') - showerror.title = 'No Error' # should not be called - self.assertEqual(self.dialog.name_ok(), 'good') - self.assertEqual(showerror.title, 'No Error') - - def test_ok(self): - self.dialog.destroyed = False - self.dialog.name.set('good') - self.dialog.Ok() - self.assertEqual(self.dialog.result, 'good') - self.assertTrue(self.dialog.destroyed) - - def test_cancel(self): - self.dialog.destroyed = False - self.dialog.Cancel() - self.assertEqual(self.dialog.result, '') - self.assertTrue(self.dialog.destroyed) - - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=False) diff --git a/lib-python/3/idlelib/idle_test/test_editmenu.py b/lib-python/3/idlelib/idle_test/test_editmenu.py --- a/lib-python/3/idlelib/idle_test/test_editmenu.py +++ b/lib-python/3/idlelib/idle_test/test_editmenu.py @@ -17,7 +17,6 @@ @classmethod def setUpClass(cls): cls.root = root = tk.Tk() - root.withdraw() cls.root.withdraw() pyshell.fix_x11_paste(root) cls.text = tk.Text(root) diff --git a/lib-python/3/idlelib/idle_test/test_formatparagraph.py b/lib-python/3/idlelib/idle_test/test_formatparagraph.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_formatparagraph.py +++ /dev/null @@ -1,376 +0,0 @@ -# Test the functions and main class method of FormatParagraph.py -import unittest -from idlelib import FormatParagraph as fp -from idlelib.EditorWindow import EditorWindow -from tkinter import Tk, Text -from test.support import requires - - -class Is_Get_Test(unittest.TestCase): - """Test the is_ and get_ functions""" - test_comment = '# This is a comment' - test_nocomment = 'This is not a comment' - trailingws_comment = '# This is a comment ' - leadingws_comment = ' # This is a comment' - leadingws_nocomment = ' This is not a comment' - - def test_is_all_white(self): - self.assertTrue(fp.is_all_white('')) - self.assertTrue(fp.is_all_white('\t\n\r\f\v')) - self.assertFalse(fp.is_all_white(self.test_comment)) - - def test_get_indent(self): - Equal = self.assertEqual - Equal(fp.get_indent(self.test_comment), '') - Equal(fp.get_indent(self.trailingws_comment), '') - Equal(fp.get_indent(self.leadingws_comment), ' ') - Equal(fp.get_indent(self.leadingws_nocomment), ' ') - - def test_get_comment_header(self): - Equal = self.assertEqual - # Test comment strings - Equal(fp.get_comment_header(self.test_comment), '#') - Equal(fp.get_comment_header(self.trailingws_comment), '#') - Equal(fp.get_comment_header(self.leadingws_comment), ' #') - # Test non-comment strings - Equal(fp.get_comment_header(self.leadingws_nocomment), ' ') - Equal(fp.get_comment_header(self.test_nocomment), '') - - -class FindTest(unittest.TestCase): - """Test the find_paragraph function in FormatParagraph. - - Using the runcase() function, find_paragraph() is called with 'mark' set at - multiple indexes before and inside the test paragraph. - - It appears that code with the same indentation as a quoted string is grouped - as part of the same paragraph, which is probably incorrect behavior. - """ - - @classmethod - def setUpClass(cls): - from idlelib.idle_test.mock_tk import Text - cls.text = Text() - - def runcase(self, inserttext, stopline, expected): - # Check that find_paragraph returns the expected paragraph when - # the mark index is set to beginning, middle, end of each line - # up to but not including the stop line - text = self.text - text.insert('1.0', inserttext) - for line in range(1, stopline): - linelength = int(text.index("%d.end" % line).split('.')[1]) - for col in (0, linelength//2, linelength): - tempindex = "%d.%d" % (line, col) - self.assertEqual(fp.find_paragraph(text, tempindex), expected) - text.delete('1.0', 'end') - - def test_find_comment(self): - comment = ( - "# Comment block with no blank lines before\n" - "# Comment line\n" - "\n") - self.runcase(comment, 3, ('1.0', '3.0', '#', comment[0:58])) - - comment = ( - "\n" - "# Comment block with whitespace line before and after\n" - "# Comment line\n" - "\n") - self.runcase(comment, 4, ('2.0', '4.0', '#', comment[1:70])) - - comment = ( - "\n" - " # Indented comment block with whitespace before and after\n" - " # Comment line\n" - "\n") - self.runcase(comment, 4, ('2.0', '4.0', ' #', comment[1:82])) - - comment = ( - "\n" - "# Single line comment\n" - "\n") - self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:23])) - - comment = ( - "\n" - " # Single line comment with leading whitespace\n" - "\n") - self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:51])) - - comment = ( - "\n" - "# Comment immediately followed by code\n" - "x = 42\n" - "\n") - self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:40])) - - comment = ( - "\n" - " # Indented comment immediately followed by code\n" - "x = 42\n" - "\n") - self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:53])) - - comment = ( - "\n" - "# Comment immediately followed by indented code\n" - " x = 42\n" - "\n") - self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:49])) - - def test_find_paragraph(self): - teststring = ( - '"""String with no blank lines before\n' - 'String line\n' - '"""\n' - '\n') - self.runcase(teststring, 4, ('1.0', '4.0', '', teststring[0:53])) - - teststring = ( - "\n" - '"""String with whitespace line before and after\n' - 'String line.\n' - '"""\n' - '\n') - self.runcase(teststring, 5, ('2.0', '5.0', '', teststring[1:66])) - - teststring = ( - '\n' - ' """Indented string with whitespace before and after\n' - ' Comment string.\n' - ' """\n' - '\n') - self.runcase(teststring, 5, ('2.0', '5.0', ' ', teststring[1:85])) - - teststring = ( - '\n' - '"""Single line string."""\n' - '\n') - self.runcase(teststring, 3, ('2.0', '3.0', '', teststring[1:27])) - - teststring = ( - '\n' - ' """Single line string with leading whitespace."""\n' - '\n') - self.runcase(teststring, 3, ('2.0', '3.0', ' ', teststring[1:55])) - - -class ReformatFunctionTest(unittest.TestCase): - """Test the reformat_paragraph function without the editor window.""" - - def test_reformat_paragraph(self): - Equal = self.assertEqual - reform = fp.reformat_paragraph - hw = "O hello world" - Equal(reform(' ', 1), ' ') - Equal(reform("Hello world", 20), "Hello world") - - # Test without leading newline - Equal(reform(hw, 1), "O\nhello\nworld") - Equal(reform(hw, 6), "O\nhello\nworld") - Equal(reform(hw, 7), "O hello\nworld") - Equal(reform(hw, 12), "O hello\nworld") - Equal(reform(hw, 13), "O hello world") - - # Test with leading newline - hw = "\nO hello world" - Equal(reform(hw, 1), "\nO\nhello\nworld") - Equal(reform(hw, 6), "\nO\nhello\nworld") - Equal(reform(hw, 7), "\nO hello\nworld") - Equal(reform(hw, 12), "\nO hello\nworld") - Equal(reform(hw, 13), "\nO hello world") - - -class ReformatCommentTest(unittest.TestCase): - """Test the reformat_comment function without the editor window.""" - - def test_reformat_comment(self): - Equal = self.assertEqual - - # reformat_comment formats to a minimum of 20 characters - test_string = ( - " \"\"\"this is a test of a reformat for a triple quoted string" - " will it reformat to less than 70 characters for me?\"\"\"") - result = fp.reformat_comment(test_string, 70, " ") - expected = ( - " \"\"\"this is a test of a reformat for a triple quoted string will it\n" - " reformat to less than 70 characters for me?\"\"\"") - Equal(result, expected) - - test_comment = ( - "# this is a test of a reformat for a triple quoted string will " - "it reformat to less than 70 characters for me?") - result = fp.reformat_comment(test_comment, 70, "#") - expected = ( - "# this is a test of a reformat for a triple quoted string will it\n" - "# reformat to less than 70 characters for me?") - Equal(result, expected) - - -class FormatClassTest(unittest.TestCase): - def test_init_close(self): - instance = fp.FormatParagraph('editor') - self.assertEqual(instance.editwin, 'editor') - instance.close() - self.assertEqual(instance.editwin, None) - - -# For testing format_paragraph_event, Initialize FormatParagraph with -# a mock Editor with .text and .get_selection_indices. The text must -# be a Text wrapper that adds two methods - -# A real EditorWindow creates unneeded, time-consuming baggage and -# sometimes emits shutdown warnings like this: -# "warning: callback failed in WindowList -# : invalid command name ".55131368.windows". -# Calling EditorWindow._close in tearDownClass prevents this but causes -# other problems (windows left open). - -class TextWrapper: - def __init__(self, master): - self.text = Text(master=master) - def __getattr__(self, name): - return getattr(self.text, name) - def undo_block_start(self): pass - def undo_block_stop(self): pass - -class Editor: - def __init__(self, root): - self.text = TextWrapper(root) - get_selection_indices = EditorWindow. get_selection_indices - -class FormatEventTest(unittest.TestCase): - """Test the formatting of text inside a Text widget. - - This is done with FormatParagraph.format.paragraph_event, - which calls functions in the module as appropriate. - """ - test_string = ( - " '''this is a test of a reformat for a triple " - "quoted string will it reformat to less than 70 " - "characters for me?'''\n") - multiline_test_string = ( - " '''The first line is under the max width.\n" - " The second line's length is way over the max width. It goes " - "on and on until it is over 100 characters long.\n" - " Same thing with the third line. It is also way over the max " - "width, but FormatParagraph will fix it.\n" - " '''\n") - multiline_test_comment = ( - "# The first line is under the max width.\n" - "# The second line's length is way over the max width. It goes on " - "and on until it is over 100 characters long.\n" - "# Same thing with the third line. It is also way over the max " - "width, but FormatParagraph will fix it.\n" - "# The fourth line is short like the first line.") - - @classmethod - def setUpClass(cls): - requires('gui') - cls.root = Tk() - editor = Editor(root=cls.root) - cls.text = editor.text.text # Test code does not need the wrapper. - cls.formatter = fp.FormatParagraph(editor).format_paragraph_event - # Sets the insert mark just after the re-wrapped and inserted text. - - @classmethod - def tearDownClass(cls): - del cls.text, cls.formatter - cls.root.destroy() - del cls.root - - def test_short_line(self): - self.text.insert('1.0', "Short line\n") - self.formatter("Dummy") - self.assertEqual(self.text.get('1.0', 'insert'), "Short line\n" ) - self.text.delete('1.0', 'end') - - def test_long_line(self): - text = self.text - - # Set cursor ('insert' mark) to '1.0', within text. - text.insert('1.0', self.test_string) - text.mark_set('insert', '1.0') - self.formatter('ParameterDoesNothing', limit=70) - result = text.get('1.0', 'insert') - # find function includes \n - expected = ( -" '''this is a test of a reformat for a triple quoted string will it\n" -" reformat to less than 70 characters for me?'''\n") # yes - self.assertEqual(result, expected) - text.delete('1.0', 'end') - - # Select from 1.11 to line end. - text.insert('1.0', self.test_string) - text.tag_add('sel', '1.11', '1.end') - self.formatter('ParameterDoesNothing', limit=70) - result = text.get('1.0', 'insert') - # selection excludes \n - expected = ( -" '''this is a test of a reformat for a triple quoted string will it reformat\n" -" to less than 70 characters for me?'''") # no - self.assertEqual(result, expected) - text.delete('1.0', 'end') - - def test_multiple_lines(self): - text = self.text - # Select 2 long lines. - text.insert('1.0', self.multiline_test_string) - text.tag_add('sel', '2.0', '4.0') - self.formatter('ParameterDoesNothing', limit=70) - result = text.get('2.0', 'insert') - expected = ( -" The second line's length is way over the max width. It goes on and\n" -" on until it is over 100 characters long. Same thing with the third\n" -" line. It is also way over the max width, but FormatParagraph will\n" -" fix it.\n") - self.assertEqual(result, expected) - text.delete('1.0', 'end') - - def test_comment_block(self): - text = self.text - - # Set cursor ('insert') to '1.0', within block. - text.insert('1.0', self.multiline_test_comment) - self.formatter('ParameterDoesNothing', limit=70) - result = text.get('1.0', 'insert') - expected = ( -"# The first line is under the max width. The second line's length is\n" -"# way over the max width. It goes on and on until it is over 100\n" -"# characters long. Same thing with the third line. It is also way over\n" -"# the max width, but FormatParagraph will fix it. The fourth line is\n" -"# short like the first line.\n") - self.assertEqual(result, expected) - text.delete('1.0', 'end') - - # Select line 2, verify line 1 unaffected. - text.insert('1.0', self.multiline_test_comment) - text.tag_add('sel', '2.0', '3.0') - self.formatter('ParameterDoesNothing', limit=70) - result = text.get('1.0', 'insert') - expected = ( -"# The first line is under the max width.\n" -"# The second line's length is way over the max width. It goes on and\n" -"# on until it is over 100 characters long.\n") - self.assertEqual(result, expected) - text.delete('1.0', 'end') - -# The following block worked with EditorWindow but fails with the mock. -# Lines 2 and 3 get pasted together even though the previous block left -# the previous line alone. More investigation is needed. -## # Select lines 3 and 4 -## text.insert('1.0', self.multiline_test_comment) -## text.tag_add('sel', '3.0', '5.0') -## self.formatter('ParameterDoesNothing') -## result = text.get('3.0', 'insert') -## expected = ( -##"# Same thing with the third line. It is also way over the max width,\n" -##"# but FormatParagraph will fix it. The fourth line is short like the\n" -##"# first line.\n") -## self.assertEqual(result, expected) -## text.delete('1.0', 'end') - - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=2) diff --git a/lib-python/3/idlelib/idle_test/test_idlehistory.py b/lib-python/3/idlelib/idle_test/test_idlehistory.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_idlehistory.py +++ /dev/null @@ -1,168 +0,0 @@ -import unittest -from test.support import requires - -import tkinter as tk -from tkinter import Text as tkText -from idlelib.idle_test.mock_tk import Text as mkText -from idlelib.IdleHistory import History -from idlelib.configHandler import idleConf - -line1 = 'a = 7' -line2 = 'b = a' - -class StoreTest(unittest.TestCase): - '''Tests History.__init__ and History.store with mock Text''' - - @classmethod - def setUpClass(cls): - cls.text = mkText() - cls.history = History(cls.text) - - def tearDown(self): - self.text.delete('1.0', 'end') - self.history.history = [] - - def test_init(self): - self.assertIs(self.history.text, self.text) - self.assertEqual(self.history.history, []) - self.assertIsNone(self.history.prefix) - self.assertIsNone(self.history.pointer) - self.assertEqual(self.history.cyclic, - idleConf.GetOption("main", "History", "cyclic", 1, "bool")) - - def test_store_short(self): - self.history.store('a') - self.assertEqual(self.history.history, []) - self.history.store(' a ') - self.assertEqual(self.history.history, []) - - def test_store_dup(self): - self.history.store(line1) - self.assertEqual(self.history.history, [line1]) - self.history.store(line2) - self.assertEqual(self.history.history, [line1, line2]) - self.history.store(line1) - self.assertEqual(self.history.history, [line2, line1]) - - def test_store_reset(self): - self.history.prefix = line1 - self.history.pointer = 0 - self.history.store(line2) - self.assertIsNone(self.history.prefix) - self.assertIsNone(self.history.pointer) - - -class TextWrapper: - def __init__(self, master): - self.text = tkText(master=master) - self._bell = False - def __getattr__(self, name): - return getattr(self.text, name) - def bell(self): - self._bell = True - -class FetchTest(unittest.TestCase): - '''Test History.fetch with wrapped tk.Text. - ''' - @classmethod - def setUpClass(cls): - requires('gui') - cls.root = tk.Tk() - cls.root.withdraw() - - def setUp(self): - self.text = text = TextWrapper(self.root) - text.insert('1.0', ">>> ") - text.mark_set('iomark', '1.4') - text.mark_gravity('iomark', 'left') - self.history = History(text) - self.history.history = [line1, line2] - - @classmethod - def tearDownClass(cls): - cls.root.destroy() - del cls.root - - def fetch_test(self, reverse, line, prefix, index, *, bell=False): - # Perform one fetch as invoked by Alt-N or Alt-P - # Test the result. The line test is the most important. - # The last two are diagnostic of fetch internals. - History = self.history - History.fetch(reverse) - - Equal = self.assertEqual - Equal(self.text.get('iomark', 'end-1c'), line) - Equal(self.text._bell, bell) - if bell: - self.text._bell = False - Equal(History.prefix, prefix) - Equal(History.pointer, index) - Equal(self.text.compare("insert", '==', "end-1c"), 1) - - def test_fetch_prev_cyclic(self): - prefix = '' - test = self.fetch_test - test(True, line2, prefix, 1) - test(True, line1, prefix, 0) - test(True, prefix, None, None, bell=True) - - def test_fetch_next_cyclic(self): - prefix = '' - test = self.fetch_test - test(False, line1, prefix, 0) - test(False, line2, prefix, 1) - test(False, prefix, None, None, bell=True) - - # Prefix 'a' tests skip line2, which starts with 'b' - def test_fetch_prev_prefix(self): - prefix = 'a' - self.text.insert('iomark', prefix) - self.fetch_test(True, line1, prefix, 0) - self.fetch_test(True, prefix, None, None, bell=True) - - def test_fetch_next_prefix(self): - prefix = 'a' - self.text.insert('iomark', prefix) - self.fetch_test(False, line1, prefix, 0) - self.fetch_test(False, prefix, None, None, bell=True) - - def test_fetch_prev_noncyclic(self): - prefix = '' - self.history.cyclic = False - test = self.fetch_test - test(True, line2, prefix, 1) - test(True, line1, prefix, 0) - test(True, line1, prefix, 0, bell=True) - - def test_fetch_next_noncyclic(self): - prefix = '' - self.history.cyclic = False - test = self.fetch_test - test(False, prefix, None, None, bell=True) - test(True, line2, prefix, 1) - test(False, prefix, None, None, bell=True) - test(False, prefix, None, None, bell=True) - - def test_fetch_cursor_move(self): - # Move cursor after fetch - self.history.fetch(reverse=True) # initialization - self.text.mark_set('insert', 'iomark') - self.fetch_test(True, line2, None, None, bell=True) - - def test_fetch_edit(self): - # Edit after fetch - self.history.fetch(reverse=True) # initialization - self.text.delete('iomark', 'insert', ) - self.text.insert('iomark', 'a =') - self.fetch_test(True, line1, 'a =', 0) # prefix is reset - - def test_history_prev_next(self): - # Minimally test functions bound to events - self.history.history_prev('dummy event') - self.assertEqual(self.history.pointer, 1) - self.history.history_next('dummy event') - self.assertEqual(self.history.pointer, None) - - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=2) diff --git a/lib-python/3/idlelib/idle_test/test_io.py b/lib-python/3/idlelib/idle_test/test_io.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_io.py +++ /dev/null @@ -1,233 +0,0 @@ -import unittest -import io -from idlelib.PyShell import PseudoInputFile, PseudoOutputFile - - -class S(str): - def __str__(self): - return '%s:str' % type(self).__name__ - def __unicode__(self): - return '%s:unicode' % type(self).__name__ - def __len__(self): - return 3 - def __iter__(self): - return iter('abc') - def __getitem__(self, *args): - return '%s:item' % type(self).__name__ - def __getslice__(self, *args): - return '%s:slice' % type(self).__name__ - -class MockShell: - def __init__(self): - self.reset() - - def write(self, *args): - self.written.append(args) - - def readline(self): - return self.lines.pop() - - def close(self): - pass - - def reset(self): - self.written = [] - - def push(self, lines): - self.lines = list(lines)[::-1] - - -class PseudeOutputFilesTest(unittest.TestCase): - def test_misc(self): - shell = MockShell() - f = PseudoOutputFile(shell, 'stdout', 'utf-8') - self.assertIsInstance(f, io.TextIOBase) - self.assertEqual(f.encoding, 'utf-8') - self.assertIsNone(f.errors) - self.assertIsNone(f.newlines) - self.assertEqual(f.name, '') - self.assertFalse(f.closed) - self.assertTrue(f.isatty()) - self.assertFalse(f.readable()) - self.assertTrue(f.writable()) - self.assertFalse(f.seekable()) - - def test_unsupported(self): - shell = MockShell() - f = PseudoOutputFile(shell, 'stdout', 'utf-8') - self.assertRaises(OSError, f.fileno) - self.assertRaises(OSError, f.tell) - self.assertRaises(OSError, f.seek, 0) - self.assertRaises(OSError, f.read, 0) - self.assertRaises(OSError, f.readline, 0) - - def test_write(self): - shell = MockShell() - f = PseudoOutputFile(shell, 'stdout', 'utf-8') - f.write('test') - self.assertEqual(shell.written, [('test', 'stdout')]) - shell.reset() - f.write('t\xe8st') - self.assertEqual(shell.written, [('t\xe8st', 'stdout')]) - shell.reset() - - f.write(S('t\xe8st')) - self.assertEqual(shell.written, [('t\xe8st', 'stdout')]) - self.assertEqual(type(shell.written[0][0]), str) - shell.reset() - - self.assertRaises(TypeError, f.write) - self.assertEqual(shell.written, []) - self.assertRaises(TypeError, f.write, b'test') - self.assertRaises(TypeError, f.write, 123) - self.assertEqual(shell.written, []) - self.assertRaises(TypeError, f.write, 'test', 'spam') - self.assertEqual(shell.written, []) - - def test_writelines(self): - shell = MockShell() - f = PseudoOutputFile(shell, 'stdout', 'utf-8') - f.writelines([]) - self.assertEqual(shell.written, []) - shell.reset() - f.writelines(['one\n', 'two']) - self.assertEqual(shell.written, - [('one\n', 'stdout'), ('two', 'stdout')]) - shell.reset() - f.writelines(['on\xe8\n', 'tw\xf2']) - self.assertEqual(shell.written, - [('on\xe8\n', 'stdout'), ('tw\xf2', 'stdout')]) - shell.reset() - - f.writelines([S('t\xe8st')]) - self.assertEqual(shell.written, [('t\xe8st', 'stdout')]) - self.assertEqual(type(shell.written[0][0]), str) - shell.reset() - - self.assertRaises(TypeError, f.writelines) - self.assertEqual(shell.written, []) - self.assertRaises(TypeError, f.writelines, 123) - self.assertEqual(shell.written, []) - self.assertRaises(TypeError, f.writelines, [b'test']) - self.assertRaises(TypeError, f.writelines, [123]) - self.assertEqual(shell.written, []) - self.assertRaises(TypeError, f.writelines, [], []) - self.assertEqual(shell.written, []) - - def test_close(self): - shell = MockShell() - f = PseudoOutputFile(shell, 'stdout', 'utf-8') - self.assertFalse(f.closed) - f.write('test') - f.close() - self.assertTrue(f.closed) - self.assertRaises(ValueError, f.write, 'x') - self.assertEqual(shell.written, [('test', 'stdout')]) - f.close() - self.assertRaises(TypeError, f.close, 1) - - -class PseudeInputFilesTest(unittest.TestCase): - def test_misc(self): - shell = MockShell() - f = PseudoInputFile(shell, 'stdin', 'utf-8') - self.assertIsInstance(f, io.TextIOBase) - self.assertEqual(f.encoding, 'utf-8') - self.assertIsNone(f.errors) - self.assertIsNone(f.newlines) - self.assertEqual(f.name, '') - self.assertFalse(f.closed) - self.assertTrue(f.isatty()) - self.assertTrue(f.readable()) - self.assertFalse(f.writable()) - self.assertFalse(f.seekable()) - - def test_unsupported(self): - shell = MockShell() - f = PseudoInputFile(shell, 'stdin', 'utf-8') - self.assertRaises(OSError, f.fileno) - self.assertRaises(OSError, f.tell) - self.assertRaises(OSError, f.seek, 0) - self.assertRaises(OSError, f.write, 'x') - self.assertRaises(OSError, f.writelines, ['x']) - - def test_read(self): - shell = MockShell() - f = PseudoInputFile(shell, 'stdin', 'utf-8') - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.read(), 'one\ntwo\n') - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.read(-1), 'one\ntwo\n') - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.read(None), 'one\ntwo\n') - shell.push(['one\n', 'two\n', 'three\n', '']) - self.assertEqual(f.read(2), 'on') - self.assertEqual(f.read(3), 'e\nt') - self.assertEqual(f.read(10), 'wo\nthree\n') - - shell.push(['one\n', 'two\n']) - self.assertEqual(f.read(0), '') - self.assertRaises(TypeError, f.read, 1.5) - self.assertRaises(TypeError, f.read, '1') - self.assertRaises(TypeError, f.read, 1, 1) - - def test_readline(self): - shell = MockShell() - f = PseudoInputFile(shell, 'stdin', 'utf-8') - shell.push(['one\n', 'two\n', 'three\n', 'four\n']) - self.assertEqual(f.readline(), 'one\n') - self.assertEqual(f.readline(-1), 'two\n') - self.assertEqual(f.readline(None), 'three\n') - shell.push(['one\ntwo\n']) - self.assertEqual(f.readline(), 'one\n') - self.assertEqual(f.readline(), 'two\n') - shell.push(['one', 'two', 'three']) - self.assertEqual(f.readline(), 'one') - self.assertEqual(f.readline(), 'two') - shell.push(['one\n', 'two\n', 'three\n']) - self.assertEqual(f.readline(2), 'on') - self.assertEqual(f.readline(1), 'e') - self.assertEqual(f.readline(1), '\n') - self.assertEqual(f.readline(10), 'two\n') - - shell.push(['one\n', 'two\n']) - self.assertEqual(f.readline(0), '') - self.assertRaises(TypeError, f.readlines, 1.5) - self.assertRaises(TypeError, f.readlines, '1') - self.assertRaises(TypeError, f.readlines, 1, 1) - - def test_readlines(self): - shell = MockShell() - f = PseudoInputFile(shell, 'stdin', 'utf-8') - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.readlines(), ['one\n', 'two\n']) - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.readlines(-1), ['one\n', 'two\n']) - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.readlines(None), ['one\n', 'two\n']) - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.readlines(0), ['one\n', 'two\n']) - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.readlines(3), ['one\n']) - shell.push(['one\n', 'two\n', '']) - self.assertEqual(f.readlines(4), ['one\n', 'two\n']) - - shell.push(['one\n', 'two\n', '']) - self.assertRaises(TypeError, f.readlines, 1.5) - self.assertRaises(TypeError, f.readlines, '1') - self.assertRaises(TypeError, f.readlines, 1, 1) - - def test_close(self): - shell = MockShell() - f = PseudoInputFile(shell, 'stdin', 'utf-8') - shell.push(['one\n', 'two\n', '']) - self.assertFalse(f.closed) - self.assertEqual(f.readline(), 'one\n') - f.close() - self.assertFalse(f.closed) - self.assertEqual(f.readline(), 'two\n') - self.assertRaises(TypeError, f.close, 1) - - -if __name__ == '__main__': - unittest.main() diff --git a/lib-python/3/idlelib/idle_test/test_replacedialog.py b/lib-python/3/idlelib/idle_test/test_replacedialog.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_replacedialog.py +++ /dev/null @@ -1,293 +0,0 @@ -"""Unittest for idlelib.ReplaceDialog""" -from test.support import requires -requires('gui') - -import unittest -from unittest.mock import Mock -from tkinter import Tk, Text -from idlelib.idle_test.mock_tk import Mbox -import idlelib.SearchEngine as se -import idlelib.ReplaceDialog as rd - -orig_mbox = se.tkMessageBox -showerror = Mbox.showerror - - -class ReplaceDialogTest(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.root = Tk() - cls.root.withdraw() - se.tkMessageBox = Mbox - cls.engine = se.SearchEngine(cls.root) - cls.dialog = rd.ReplaceDialog(cls.root, cls.engine) - cls.dialog.ok = Mock() - cls.text = Text(cls.root) - cls.text.undo_block_start = Mock() - cls.text.undo_block_stop = Mock() - cls.dialog.text = cls.text - - @classmethod - def tearDownClass(cls): - se.tkMessageBox = orig_mbox - del cls.text, cls.dialog, cls.engine - cls.root.destroy() - del cls.root - - def setUp(self): - self.text.insert('insert', 'This is a sample sTring') - - def tearDown(self): - self.engine.patvar.set('') - self.dialog.replvar.set('') - self.engine.wordvar.set(False) - self.engine.casevar.set(False) - self.engine.revar.set(False) - self.engine.wrapvar.set(True) - self.engine.backvar.set(False) - showerror.title = '' - showerror.message = '' - self.text.delete('1.0', 'end') - - def test_replace_simple(self): - # Test replace function with all options at default setting. - # Wrap around - True - # Regular Expression - False - # Match case - False - # Match word - False - # Direction - Forwards - text = self.text - equal = self.assertEqual - pv = self.engine.patvar - rv = self.dialog.replvar - replace = self.dialog.replace_it - - # test accessor method - self.engine.setpat('asdf') - equal(self.engine.getpat(), pv.get()) - - # text found and replaced - pv.set('a') - rv.set('asdf') - self.dialog.open(self.text) - replace() - equal(text.get('1.8', '1.12'), 'asdf') - - # dont "match word" case - text.mark_set('insert', '1.0') - pv.set('is') - rv.set('hello') - replace() - equal(text.get('1.2', '1.7'), 'hello') - - # dont "match case" case - pv.set('string') - rv.set('world') - replace() - equal(text.get('1.23', '1.28'), 'world') - - # without "regular expression" case - text.mark_set('insert', 'end') - text.insert('insert', '\nline42:') - before_text = text.get('1.0', 'end') - pv.set('[a-z][\d]+') - replace() - after_text = text.get('1.0', 'end') - equal(before_text, after_text) - - # test with wrap around selected and complete a cycle - text.mark_set('insert', '1.9') - pv.set('i') - rv.set('j') - replace() - equal(text.get('1.8'), 'i') - equal(text.get('2.1'), 'j') - replace() - equal(text.get('2.1'), 'j') - equal(text.get('1.8'), 'j') - before_text = text.get('1.0', 'end') - replace() - after_text = text.get('1.0', 'end') - equal(before_text, after_text) - - # text not found - before_text = text.get('1.0', 'end') - pv.set('foobar') - replace() - after_text = text.get('1.0', 'end') - equal(before_text, after_text) - - # test access method - self.dialog.find_it(0) - - def test_replace_wrap_around(self): - text = self.text - equal = self.assertEqual - pv = self.engine.patvar - rv = self.dialog.replvar - replace = self.dialog.replace_it - self.engine.wrapvar.set(False) - - # replace candidate found both after and before 'insert' - text.mark_set('insert', '1.4') - pv.set('i') - rv.set('j') - replace() - equal(text.get('1.2'), 'i') - equal(text.get('1.5'), 'j') - replace() - equal(text.get('1.2'), 'i') - equal(text.get('1.20'), 'j') - replace() - equal(text.get('1.2'), 'i') - - # replace candidate found only before 'insert' - text.mark_set('insert', '1.8') - pv.set('is') - before_text = text.get('1.0', 'end') - replace() - after_text = text.get('1.0', 'end') - equal(before_text, after_text) - - def test_replace_whole_word(self): - text = self.text - equal = self.assertEqual - pv = self.engine.patvar - rv = self.dialog.replvar - replace = self.dialog.replace_it - self.engine.wordvar.set(True) - - pv.set('is') - rv.set('hello') - replace() - equal(text.get('1.0', '1.4'), 'This') - equal(text.get('1.5', '1.10'), 'hello') - - def test_replace_match_case(self): - equal = self.assertEqual - text = self.text - pv = self.engine.patvar - rv = self.dialog.replvar - replace = self.dialog.replace_it - self.engine.casevar.set(True) - - before_text = self.text.get('1.0', 'end') - pv.set('this') - rv.set('that') - replace() - after_text = self.text.get('1.0', 'end') - equal(before_text, after_text) - - pv.set('This') - replace() - equal(text.get('1.0', '1.4'), 'that') - - def test_replace_regex(self): - equal = self.assertEqual - text = self.text - pv = self.engine.patvar - rv = self.dialog.replvar - replace = self.dialog.replace_it - self.engine.revar.set(True) - - before_text = text.get('1.0', 'end') - pv.set('[a-z][\d]+') - rv.set('hello') - replace() - after_text = text.get('1.0', 'end') - equal(before_text, after_text) - - text.insert('insert', '\nline42') - replace() - equal(text.get('2.0', '2.8'), 'linhello') - - pv.set('') - replace() - self.assertIn('error', showerror.title) - self.assertIn('Empty', showerror.message) - - pv.set('[\d') - replace() - self.assertIn('error', showerror.title) - self.assertIn('Pattern', showerror.message) - - showerror.title = '' - showerror.message = '' - pv.set('[a]') - rv.set('test\\') - replace() - self.assertIn('error', showerror.title) - self.assertIn('Invalid Replace Expression', showerror.message) - - # test access method - self.engine.setcookedpat("\'") - equal(pv.get(), "\\'") - - def test_replace_backwards(self): - equal = self.assertEqual - text = self.text - pv = self.engine.patvar - rv = self.dialog.replvar - replace = self.dialog.replace_it - self.engine.backvar.set(True) - - text.insert('insert', '\nis as ') - - pv.set('is') - rv.set('was') - replace() - equal(text.get('1.2', '1.4'), 'is') - equal(text.get('2.0', '2.3'), 'was') - replace() - equal(text.get('1.5', '1.8'), 'was') - replace() - equal(text.get('1.2', '1.5'), 'was') - - def test_replace_all(self): - text = self.text - pv = self.engine.patvar - rv = self.dialog.replvar - replace_all = self.dialog.replace_all - - text.insert('insert', '\n') - text.insert('insert', text.get('1.0', 'end')*100) - pv.set('is') - rv.set('was') - replace_all() - self.assertNotIn('is', text.get('1.0', 'end')) - - self.engine.revar.set(True) - pv.set('') - replace_all() - self.assertIn('error', showerror.title) - self.assertIn('Empty', showerror.message) - - pv.set('[s][T]') - rv.set('\\') - replace_all() - - self.engine.revar.set(False) - pv.set('text which is not present') - rv.set('foobar') - replace_all() - - def test_default_command(self): - text = self.text - pv = self.engine.patvar - rv = self.dialog.replvar - replace_find = self.dialog.default_command - equal = self.assertEqual - - pv.set('This') - rv.set('was') - replace_find() - equal(text.get('sel.first', 'sel.last'), 'was') - - self.engine.revar.set(True) - pv.set('') - replace_find() - - -if __name__ == '__main__': - unittest.main(verbosity=2) diff --git a/lib-python/3/idlelib/idle_test/test_searchdialog.py b/lib-python/3/idlelib/idle_test/test_searchdialog.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_searchdialog.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Test SearchDialog class in SearchDialogue.py""" - -# Does not currently test the event handler wrappers. -# A usage test should simulate clicks and check hilighting. -# Tests need to be coordinated with SearchDialogBase tests -# to avoid duplication. - -from test.support import requires -requires('gui') - -import unittest -import tkinter as tk -from tkinter import BooleanVar -import idlelib.SearchEngine as se -import idlelib.SearchDialog as sd - - -class SearchDialogTest(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.root = tk.Tk() - - @classmethod - def tearDownClass(cls): - cls.root.destroy() - del cls.root - - def setUp(self): - self.engine = se.SearchEngine(self.root) - self.dialog = sd.SearchDialog(self.root, self.engine) - self.text = tk.Text(self.root) - self.text.insert('1.0', 'Hello World!') - - def test_find_again(self): - # Search for various expressions - text = self.text - - self.engine.setpat('') - self.assertFalse(self.dialog.find_again(text)) - - self.engine.setpat('Hello') - self.assertTrue(self.dialog.find_again(text)) - - self.engine.setpat('Goodbye') - self.assertFalse(self.dialog.find_again(text)) - - self.engine.setpat('World!') - self.assertTrue(self.dialog.find_again(text)) - - self.engine.setpat('Hello World!') - self.assertTrue(self.dialog.find_again(text)) - - # Regular expression - self.engine.revar = BooleanVar(self.root, True) - self.engine.setpat('W[aeiouy]r') - self.assertTrue(self.dialog.find_again(text)) - - def test_find_selection(self): - # Select some text and make sure it's found - text = self.text - # Add additional line to find - self.text.insert('2.0', 'Hello World!') - - text.tag_add('sel', '1.0', '1.4') # Select 'Hello' - self.assertTrue(self.dialog.find_selection(text)) - - text.tag_remove('sel', '1.0', 'end') - text.tag_add('sel', '1.6', '1.11') # Select 'World!' - self.assertTrue(self.dialog.find_selection(text)) - - text.tag_remove('sel', '1.0', 'end') - text.tag_add('sel', '1.0', '1.11') # Select 'Hello World!' - self.assertTrue(self.dialog.find_selection(text)) - - # Remove additional line - text.delete('2.0', 'end') - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=2) diff --git a/lib-python/3/idlelib/idle_test/test_searchdialogbase.py b/lib-python/3/idlelib/idle_test/test_searchdialogbase.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_searchdialogbase.py +++ /dev/null @@ -1,165 +0,0 @@ -'''Unittests for idlelib/SearchDialogBase.py - -Coverage: 99%. The only thing not covered is inconsequential -- -testing skipping of suite when self.needwrapbutton is false. - -''' -import unittest -from test.support import requires -from tkinter import Tk, Toplevel, Frame ##, BooleanVar, StringVar -from idlelib import SearchEngine as se -from idlelib import SearchDialogBase as sdb -from idlelib.idle_test.mock_idle import Func -## from idlelib.idle_test.mock_tk import Var - -# The ## imports above & following could help make some tests gui-free. -# However, they currently make radiobutton tests fail. -##def setUpModule(): -## # Replace tk objects used to initialize se.SearchEngine. -## se.BooleanVar = Var -## se.StringVar = Var -## -##def tearDownModule(): -## se.BooleanVar = BooleanVar -## se.StringVar = StringVar - -class SearchDialogBaseTest(unittest.TestCase): - - @classmethod - def setUpClass(cls): - requires('gui') - cls.root = Tk() - - @classmethod - def tearDownClass(cls): - cls.root.destroy() - del cls.root - - def setUp(self): - self.engine = se.SearchEngine(self.root) # None also seems to work - self.dialog = sdb.SearchDialogBase(root=self.root, engine=self.engine) - - def tearDown(self): - self.dialog.close() - - def test_open_and_close(self): - # open calls create_widgets, which needs default_command - self.dialog.default_command = None - - # Since text parameter of .open is not used in base class, - # pass dummy 'text' instead of tk.Text(). - self.dialog.open('text') - self.assertEqual(self.dialog.top.state(), 'normal') - self.dialog.close() - self.assertEqual(self.dialog.top.state(), 'withdrawn') - - self.dialog.open('text', searchphrase="hello") - self.assertEqual(self.dialog.ent.get(), 'hello') - self.dialog.close() - - def test_create_widgets(self): - self.dialog.create_entries = Func() - self.dialog.create_option_buttons = Func() - self.dialog.create_other_buttons = Func() - self.dialog.create_command_buttons = Func() - - self.dialog.default_command = None - self.dialog.create_widgets() - - self.assertTrue(self.dialog.create_entries.called) - self.assertTrue(self.dialog.create_option_buttons.called) - self.assertTrue(self.dialog.create_other_buttons.called) - self.assertTrue(self.dialog.create_command_buttons.called) - - def test_make_entry(self): - equal = self.assertEqual - self.dialog.row = 0 - self.dialog.top = Toplevel(self.root) - entry, label = self.dialog.make_entry("Test:", 'hello') - equal(label['text'], 'Test:') - - self.assertIn(entry.get(), 'hello') - egi = entry.grid_info() - equal(int(egi['row']), 0) - equal(int(egi['column']), 1) - equal(int(egi['rowspan']), 1) - equal(int(egi['columnspan']), 1) - equal(self.dialog.row, 1) - - def test_create_entries(self): - self.dialog.row = 0 - self.engine.setpat('hello') - self.dialog.create_entries() - self.assertIn(self.dialog.ent.get(), 'hello') - - def test_make_frame(self): - self.dialog.row = 0 - self.dialog.top = Toplevel(self.root) - frame, label = self.dialog.make_frame() - self.assertEqual(label, '') - self.assertIsInstance(frame, Frame) - - frame, label = self.dialog.make_frame('testlabel') - self.assertEqual(label['text'], 'testlabel') - self.assertIsInstance(frame, Frame) - - def btn_test_setup(self, meth): - self.dialog.top = Toplevel(self.root) - self.dialog.row = 0 - return meth() - - def test_create_option_buttons(self): - e = self.engine - for state in (0, 1): - for var in (e.revar, e.casevar, e.wordvar, e.wrapvar): - var.set(state) - frame, options = self.btn_test_setup( - self.dialog.create_option_buttons) - for spec, button in zip (options, frame.pack_slaves()): - var, label = spec - self.assertEqual(button['text'], label) - self.assertEqual(var.get(), state) - if state == 1: - button.deselect() - else: - button.select() - self.assertEqual(var.get(), 1 - state) - - def test_create_other_buttons(self): - for state in (False, True): - var = self.engine.backvar - var.set(state) - frame, others = self.btn_test_setup( - self.dialog.create_other_buttons) - buttons = frame.pack_slaves() - for spec, button in zip(others, buttons): - val, label = spec - self.assertEqual(button['text'], label) - if val == state: - # hit other button, then this one - # indexes depend on button order - self.assertEqual(var.get(), state) - buttons[val].select() - self.assertEqual(var.get(), 1 - state) - buttons[1-val].select() - self.assertEqual(var.get(), state) - - def test_make_button(self): - self.dialog.top = Toplevel(self.root) - self.dialog.buttonframe = Frame(self.dialog.top) - btn = self.dialog.make_button('Test', self.dialog.close) - self.assertEqual(btn['text'], 'Test') - - def test_create_command_buttons(self): - self.dialog.create_command_buttons() - # Look for close button command in buttonframe - closebuttoncommand = '' - for child in self.dialog.buttonframe.winfo_children(): - if child['text'] == 'close': - closebuttoncommand = child['command'] - self.assertIn('close', closebuttoncommand) - - - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=2) diff --git a/lib-python/3/idlelib/idle_test/test_undodelegator.py b/lib-python/3/idlelib/idle_test/test_undodelegator.py deleted file mode 100644 --- a/lib-python/3/idlelib/idle_test/test_undodelegator.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Unittest for UndoDelegator in idlelib.UndoDelegator. - -Coverage about 80% (retest). -""" -from test.support import requires -requires('gui') - From pypy.commits at gmail.com Fri Aug 9 11:28:28 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 08:28:28 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Don't ignore fold parameter in (date, )time.replace() Message-ID: <5d4d911c.1c69fb81.3af82.6618@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97123:adf1da3800af Date: 2019-08-09 16:27 +0100 http://bitbucket.org/pypy/pypy/changeset/adf1da3800af/ Log: Don't ignore fold parameter in (date,)time.replace() diff --git a/lib-python/3/datetime.py b/lib-python/3/datetime.py --- a/lib-python/3/datetime.py +++ b/lib-python/3/datetime.py @@ -1347,8 +1347,8 @@ tzinfo = self.tzinfo if fold is None: fold = self._fold - return time.__new__(type(self), - hour, minute, second, microsecond, tzinfo) + return time.__new__( + type(self), hour, minute, second, microsecond, tzinfo, fold=fold) # Pickle support. @@ -1647,8 +1647,9 @@ tzinfo = self.tzinfo if fold is None: fold = self.fold - return datetime.__new__(type(self), year, month, day, hour, minute, - second, microsecond, tzinfo) + return datetime.__new__( + type(self), year, month, day, hour, minute, second, microsecond, + tzinfo, fold=fold) def _local_timezone(self): if self.tzinfo is None: From pypy.commits at gmail.com Fri Aug 9 11:51:07 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 08:51:07 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Backed out changeset d6fbbd74e9d4 (breaks test_smalllongobject.py) Message-ID: <5d4d966b.1c69fb81.ad2a5.c492@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97124:1097e21c2af6 Date: 2019-08-09 16:49 +0100 http://bitbucket.org/pypy/pypy/changeset/1097e21c2af6/ Log: Backed out changeset d6fbbd74e9d4 (breaks test_smalllongobject.py) diff --git a/pypy/objspace/std/test/apptest_longobject.py b/pypy/objspace/std/test/apptest_longobject.py deleted file mode 100644 --- a/pypy/objspace/std/test/apptest_longobject.py +++ /dev/null @@ -1,472 +0,0 @@ -from pytest import raises - -def _long(obj): - # XXX: currently returns a W_LongObject but might return - # W_IntObject in the future - huge = 1 << 65 - return obj + huge - huge - -def test_trunc(): - import math - assert math.trunc(_long(1)) == _long(1) - assert math.trunc(-_long(1)) == -_long(1) - -def test_add(): - x = _long(123) - assert int(x + _long(12443)) == 123 + 12443 - x = -20 - assert x + 2 + _long(3) + True == -_long(14) - -def test_sub(): - assert int(_long(58543) - _long(12332)) == 58543 - 12332 - assert int(_long(58543) - 12332) == 58543 - 12332 - assert int(58543 - _long(12332)) == 58543 - 12332 - x = _long(237123838281233) - assert x * 12 == x * _long(12) - -def test_mul(): - x = _long(363) - assert x * 2 ** 40 == x << 40 - -def test_truediv(): - a = _long(31415926) / _long(10000000) - assert a == 3.1415926 - -def test_floordiv(): - x = _long(31415926) - a = x // _long(10000000) - assert a == _long(3) - -def test_int_floordiv(): - import sys - long = _long - - x = long(3000) - a = x // 1000 - assert a == 3 - - x = long(3000) - a = x // -1000 - assert a == -3 - - x = long(3000) - raises(ZeroDivisionError, "x // 0") - - n = sys.maxsize + 1 - assert n / int(-n) == long(-1) - -def test_numerator_denominator(): - assert (_long(1)).numerator == _long(1) - assert (_long(1)).denominator == _long(1) - assert (_long(42)).numerator == _long(42) - assert (_long(42)).denominator == _long(1) - -def test_compare(): - Z = 0 - ZL = _long(0) - - assert Z == ZL - assert not (Z != ZL) - assert ZL == Z - assert not (ZL != Z) - assert Z <= ZL - assert not (Z < ZL) - assert ZL <= ZL - assert not (ZL < ZL) - - for BIG in (_long(1), _long(1) << 62, _long(1) << 9999): - assert not (Z == BIG) - assert Z != BIG - assert not (BIG == Z) - assert BIG != Z - assert not (ZL == BIG) - assert ZL != BIG - assert Z <= BIG - assert Z < BIG - assert not (BIG <= Z) - assert not (BIG < Z) - assert ZL <= BIG - assert ZL < BIG - assert not (BIG <= ZL) - assert not (BIG < ZL) - assert not (Z <= -BIG) - assert not (Z < -BIG) - assert -BIG <= Z - assert -BIG < Z - assert not (ZL <= -BIG) - assert not (ZL < -BIG) - assert -BIG <= ZL - assert -BIG < ZL - # - assert not (BIG < int(BIG)) - assert (BIG <= int(BIG)) - assert (BIG == int(BIG)) - assert not (BIG != int(BIG)) - assert not (BIG > int(BIG)) - assert (BIG >= int(BIG)) - # - assert (BIG < int(BIG)+1) - assert (BIG <= int(BIG)+1) - assert not (BIG == int(BIG)+1) - assert (BIG != int(BIG)+1) - assert not (BIG > int(BIG)+1) - assert not (BIG >= int(BIG)+1) - # - assert not (BIG < int(BIG)-1) - assert not (BIG <= int(BIG)-1) - assert not (BIG == int(BIG)-1) - assert (BIG != int(BIG)-1) - assert (BIG > int(BIG)-1) - assert (BIG >= int(BIG)-1) - # - assert not (int(BIG) < BIG) - assert (int(BIG) <= BIG) - assert (int(BIG) == BIG) - assert not (int(BIG) != BIG) - assert not (int(BIG) > BIG) - assert (int(BIG) >= BIG) - # - assert not (int(BIG)+1 < BIG) - assert not (int(BIG)+1 <= BIG) - assert not (int(BIG)+1 == BIG) - assert (int(BIG)+1 != BIG) - assert (int(BIG)+1 > BIG) - assert (int(BIG)+1 >= BIG) - # - assert (int(BIG)-1 < BIG) - assert (int(BIG)-1 <= BIG) - assert not (int(BIG)-1 == BIG) - assert (int(BIG)-1 != BIG) - assert not (int(BIG)-1 > BIG) - assert not (int(BIG)-1 >= BIG) - -def test_conversion(): - class long2(int): - pass - x = _long(1) - x = long2(x<<100) - y = int(x) - assert type(y) == int - assert type(+long2(5)) is int - assert type(long2(5) << 0) is int - assert type(long2(5) >> 0) is int - assert type(long2(5) + 0) is int - assert type(long2(5) - 0) is int - assert type(long2(5) * 1) is int - assert type(1 * long2(5)) is int - assert type(0 + long2(5)) is int - assert type(-long2(0)) is int - assert type(long2(5) // 1) is int - -def test_shift(): - long = _long - assert long(65) >> long(2) == long(16) - assert long(65) >> 2 == long(16) - assert 65 >> long(2) == long(16) - assert long(65) << long(2) == long(65) * 4 - assert long(65) << 2 == long(65) * 4 - assert 65 << long(2) == long(65) * 4 - raises(ValueError, "long(1) << long(-1)") - raises(ValueError, "long(1) << -1") - raises(OverflowError, "long(1) << (2 ** 100)") - raises(ValueError, "long(1) >> long(-1)") - raises(ValueError, "long(1) >> -1") - raises(OverflowError, "long(1) >> (2 ** 100)") - -def test_pow(): - long = _long - x = _long(0) - assert pow(x, _long(0), _long(1)) == _long(0) - assert pow(-_long(1), -_long(1)) == -1.0 - assert pow(2 ** 68, 0.5) == 2.0 ** 34 - assert pow(2 ** 68, 2) == 2 ** 136 - raises(ValueError, pow, long(2), -1, 3) - raises(ValueError, pow, long(2), 5, 0) - - # some rpow tests - assert pow(0, long(0), long(1)) == long(0) - assert pow(-1, long(-1)) == -1.0 - -def test_int_pow(): - long = _long - x = long(2) - assert pow(x, 2) == long(4) - assert pow(x, 2, 2) == long(0) - assert pow(x, 2, long(3)) == long(1) - -def test_getnewargs(): - assert _long(0) .__getnewargs__() == (_long(0),) - assert (-_long(1)) .__getnewargs__() == (-_long(1),) - -def test_divmod(): - long = _long - def check_division(x, y): - q, r = divmod(x, y) - pab, pba = x*y, y*x - assert pab == pba - assert q == x // y - assert r == x % y - assert x == q*y + r - if y > 0: - assert 0 <= r < y - else: - assert y < r <= 0 - for x in [-_long(1), _long(0), _long(1), _long(2) ** 100 - 1, -_long(2) ** 100 - 1]: - for y in [-_long(105566530), -_long(1), _long(1), _long(1034522340)]: - print("checking division for %s, %s" % (x, y)) - check_division(x, y) - check_division(x, int(y)) - check_division(int(x), y) - # special case from python tests: - s1 = 33 - s2 = 2 - x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 - x >>= s1*16 - y = 10953035502453784575 - y >>= s2*16 - x = 0x3FE0003FFFFC0001FFF - y = _long(0x9800FFC1) - check_division(x, y) - raises(ZeroDivisionError, "x // _long(0)") - divmod(3, _long(4)) - raises(ZeroDivisionError, "x % long(0)") - raises(ZeroDivisionError, divmod, x, long(0)) - raises(ZeroDivisionError, "x // 0") - raises(ZeroDivisionError, "x % 0") - raises(ZeroDivisionError, divmod, x, 0) - -def test_int_divmod(): - long = _long - q, r = divmod(long(100), 11) - assert q == 9 - assert r == 1 - -def test_format(): - assert repr(12345678901234567890) == '12345678901234567890' - assert str(12345678901234567890) == '12345678901234567890' - assert hex(_long(0x1234567890ABCDEF)) == '0x1234567890abcdef' - assert oct(_long(0o1234567012345670)) == '0o1234567012345670' - -def test_bits(): - x = _long(0xAAAAAAAA) - assert x | _long(0x55555555) == _long(0xFFFFFFFF) - assert x & _long(0x55555555) == _long(0x00000000) - assert x ^ _long(0x55555555) == _long(0xFFFFFFFF) - assert -x | _long(0x55555555) == -_long(0xAAAAAAA9) - assert x | _long(0x555555555) == _long(0x5FFFFFFFF) - assert x & _long(0x555555555) == _long(0x000000000) - assert x ^ _long(0x555555555) == _long(0x5FFFFFFFF) - -def test_hash(): - import sys - modulus = sys.hash_info.modulus - def longhash(x): - return hash(_long(x)) - for x in (list(range(200)) + - [1234567890123456789, 18446743523953737727, - 987685321987685321987685321987685321987685321, - 10**50]): - y = x % modulus - assert longhash(x) == longhash(y) - assert longhash(-x) == longhash(-y) - assert longhash(modulus - 1) == modulus - 1 - assert longhash(modulus) == 0 - assert longhash(modulus + 1) == 1 - - assert longhash(-1) == -2 - value = -(modulus + 1) - assert longhash(value) == -2 - assert longhash(value * 2 + 1) == -2 - assert longhash(value * 4 + 3) == -2 - -def test_hash_2(): - class AAA: - def __hash__(a): - return _long(-1) - assert hash(AAA()) == -2 - -def test_math_log(): - import math - raises(ValueError, math.log, _long(0)) - raises(ValueError, math.log, -_long(1)) - raises(ValueError, math.log, -_long(2)) - raises(ValueError, math.log, -(_long(1) << 10000)) - #raises(ValueError, math.log, 0) - raises(ValueError, math.log, -1) - raises(ValueError, math.log, -2) - -def test_long(): - import sys - n = -sys.maxsize-1 - assert int(n) == n - assert str(int(n)) == str(n) - a = memoryview(b'123') - assert int(a) == _long(123) - -def test_huge_longs(): - import operator - x = _long(1) - huge = x << _long(40000) - raises(OverflowError, float, huge) - raises(OverflowError, operator.truediv, huge, 3) - raises(OverflowError, operator.truediv, huge, _long(3)) - -def test_just_trunc(): - class myint(object): - def __trunc__(self): - return 42 - assert int(myint()) == 42 - -def test_override___int__(): - class myint(int): - def __int__(self): - return 42 - assert int(myint(21)) == 42 - class myotherint(int): - pass - assert int(myotherint(21)) == 21 - -def test___int__(): - class A(object): - def __int__(self): - return 42 - assert int(A()) == 42 - - class IntSubclass(int): - pass - class ReturnsIntSubclass(object): - def __int__(self): - return IntSubclass(42) - n = int(ReturnsIntSubclass()) - assert n == 42 - # cpython 3.6 fixed behaviour to actually return type int here - assert type(n) is int - -def test_trunc_returns(): - # but!: (blame CPython 2.7) - class Integral(object): - def __int__(self): - return 42 - class TruncReturnsNonInt(object): - def __trunc__(self): - return Integral() - n = int(TruncReturnsNonInt()) - assert type(n) is int - assert n == 42 - - class IntSubclass(int): - pass - class TruncReturnsNonInt(object): - def __trunc__(self): - return IntSubclass(42) - n = int(TruncReturnsNonInt()) - assert n == 42 - assert type(n) is int - -def test_long_before_string(): - class A(str): - def __int__(self): - return 42 - assert int(A('abc')) == 42 - -def test_conjugate(): - assert (_long(7)).conjugate() == _long(7) - assert (-_long(7)).conjugate() == -_long(7) - - class L(int): - pass - - assert type(L(7).conjugate()) is int - - class L(int): - def __pos__(self): - return 43 - assert L(7).conjugate() == _long(7) - -def test_bit_length(): - assert _long(8).bit_length() == 4 - assert (-1<<40).bit_length() == 41 - assert ((2**31)-1).bit_length() == 31 - -def test_from_bytes(): - assert int.from_bytes(b'c', 'little') == 99 - assert int.from_bytes(b'\x01\x01', 'little') == 257 - assert int.from_bytes(b'\x01\x00', 'big') == 256 - assert int.from_bytes(b'\x00\x80', 'little', signed=True) == -32768 - assert int.from_bytes([255, 0, 0], 'big', signed=True) == -65536 - raises(TypeError, int.from_bytes, 0, 'big') - raises(TypeError, int.from_bytes, '', 'big') - raises(ValueError, int.from_bytes, b'c', 'foo') - -def test_to_bytes(): - assert 65535 .to_bytes(2, 'big') == b'\xff\xff' - assert (-8388608).to_bytes(3, 'little', signed=True) == b'\x00\x00\x80' - raises(OverflowError, (-5).to_bytes, 1, 'big') - raises(ValueError, (-5).to_bytes, 1, 'foo') - assert 65535 .to_bytes(length=2, byteorder='big') == b'\xff\xff' - -def test_negative_zero(): - x = eval("-_long(0)") - assert x == _long(0) - -def test_long_real(): - class A(int): pass - b = A(5).real - assert type(b) is int - -#@py.test.mark.skipif("not config.option.runappdirect and sys.maxunicode == 0xffff") -def test_long_from_unicode(): - raises(ValueError, int, '123L') - assert int('L', 22) == 21 - s = '\U0001D7CF\U0001D7CE' # 𝟏𝟎 - assert int(s) == 10 - -def test_long_from_bytes(): - assert int(b'1234') == 1234 - -def test_invalid_literal_message(): - try: - int('hello àèìò') - except ValueError as e: - assert 'hello àèìò' in str(e) - else: - assert False, 'did not raise' - -def test_base_overflow(): - raises(ValueError, int, '42', 2**63) - -def test_long_real(): - class A(int): pass - b = A(5).real - assert type(b) is int - -def test__int__(): - class A(int): - def __int__(self): - return 42 - - assert int(int(3)) == int(3) - assert int(A(13)) == 42 - -def test_long_error_msg(): - e = raises(TypeError, int, []) - assert str(e.value) == ( - "int() argument must be a string, a bytes-like object " - "or a number, not 'list'") - -def test_linear_long_base_16(): - # never finishes if int(_, 16) is not linear-time - size = 100000 - n = "a" * size - expected = (2 << (size * 4)) // 3 - assert int(n, 16) == expected - -def test_large_identity(): - import sys - if '__pypy__' not in sys.builtin_module_names: - skip('PyPy only') - a = sys.maxsize + 1 - b = sys.maxsize + 2 - assert a is not b - b -= 1 - assert a is b diff --git a/pypy/objspace/std/test/test_longobject.py b/pypy/objspace/std/test/test_longobject.py --- a/pypy/objspace/std/test/test_longobject.py +++ b/pypy/objspace/std/test/test_longobject.py @@ -1,4 +1,5 @@ # -*- encoding: utf-8 -*- +import py from pypy.objspace.std import longobject as lobj from rpython.rlib.rbigint import rbigint @@ -36,3 +37,477 @@ x &= r.MASK w_obj = space.newlong_from_rarith_int(r(x)) assert space.bigint_w(w_obj).eq(rbigint.fromlong(x)) + + +class AppTestLong: + + def w__long(self, obj): + # XXX: currently returns a W_LongObject but might return + # W_IntObject in the future + huge = 1 << 65 + return obj + huge - huge + + def test_trunc(self): + import math + assert math.trunc(self._long(1)) == self._long(1) + assert math.trunc(-self._long(1)) == -self._long(1) + + def test_add(self): + x = self._long(123) + assert int(x + self._long(12443)) == 123 + 12443 + x = -20 + assert x + 2 + self._long(3) + True == -self._long(14) + + def test_sub(self): + assert int(self._long(58543) - self._long(12332)) == 58543 - 12332 + assert int(self._long(58543) - 12332) == 58543 - 12332 + assert int(58543 - self._long(12332)) == 58543 - 12332 + x = self._long(237123838281233) + assert x * 12 == x * self._long(12) + + def test_mul(self): + x = self._long(363) + assert x * 2 ** 40 == x << 40 + + def test_truediv(self): + a = self._long(31415926) / self._long(10000000) + assert a == 3.1415926 + + def test_floordiv(self): + x = self._long(31415926) + a = x // self._long(10000000) + assert a == self._long(3) + + def test_int_floordiv(self): + import sys + long = self._long + + x = long(3000) + a = x // 1000 + assert a == 3 + + x = long(3000) + a = x // -1000 + assert a == -3 + + x = long(3000) + raises(ZeroDivisionError, "x // 0") + + n = sys.maxsize + 1 + assert n / int(-n) == long(-1) + + def test_numerator_denominator(self): + assert (self._long(1)).numerator == self._long(1) + assert (self._long(1)).denominator == self._long(1) + assert (self._long(42)).numerator == self._long(42) + assert (self._long(42)).denominator == self._long(1) + + def test_compare(self): + Z = 0 + ZL = self._long(0) + + assert Z == ZL + assert not (Z != ZL) + assert ZL == Z + assert not (ZL != Z) + assert Z <= ZL + assert not (Z < ZL) + assert ZL <= ZL + assert not (ZL < ZL) + + for BIG in (self._long(1), self._long(1) << 62, self._long(1) << 9999): + assert not (Z == BIG) + assert Z != BIG + assert not (BIG == Z) + assert BIG != Z + assert not (ZL == BIG) + assert ZL != BIG + assert Z <= BIG + assert Z < BIG + assert not (BIG <= Z) + assert not (BIG < Z) + assert ZL <= BIG + assert ZL < BIG + assert not (BIG <= ZL) + assert not (BIG < ZL) + assert not (Z <= -BIG) + assert not (Z < -BIG) + assert -BIG <= Z + assert -BIG < Z + assert not (ZL <= -BIG) + assert not (ZL < -BIG) + assert -BIG <= ZL + assert -BIG < ZL + # + assert not (BIG < int(BIG)) + assert (BIG <= int(BIG)) + assert (BIG == int(BIG)) + assert not (BIG != int(BIG)) + assert not (BIG > int(BIG)) + assert (BIG >= int(BIG)) + # + assert (BIG < int(BIG)+1) + assert (BIG <= int(BIG)+1) + assert not (BIG == int(BIG)+1) + assert (BIG != int(BIG)+1) + assert not (BIG > int(BIG)+1) + assert not (BIG >= int(BIG)+1) + # + assert not (BIG < int(BIG)-1) + assert not (BIG <= int(BIG)-1) + assert not (BIG == int(BIG)-1) + assert (BIG != int(BIG)-1) + assert (BIG > int(BIG)-1) + assert (BIG >= int(BIG)-1) + # + assert not (int(BIG) < BIG) + assert (int(BIG) <= BIG) + assert (int(BIG) == BIG) + assert not (int(BIG) != BIG) + assert not (int(BIG) > BIG) + assert (int(BIG) >= BIG) + # + assert not (int(BIG)+1 < BIG) + assert not (int(BIG)+1 <= BIG) + assert not (int(BIG)+1 == BIG) + assert (int(BIG)+1 != BIG) + assert (int(BIG)+1 > BIG) + assert (int(BIG)+1 >= BIG) + # + assert (int(BIG)-1 < BIG) + assert (int(BIG)-1 <= BIG) + assert not (int(BIG)-1 == BIG) + assert (int(BIG)-1 != BIG) + assert not (int(BIG)-1 > BIG) + assert not (int(BIG)-1 >= BIG) + + def test_conversion(self): + class long2(int): + pass + x = self._long(1) + x = long2(x<<100) + y = int(x) + assert type(y) == int + assert type(+long2(5)) is int + assert type(long2(5) << 0) is int + assert type(long2(5) >> 0) is int + assert type(long2(5) + 0) is int + assert type(long2(5) - 0) is int + assert type(long2(5) * 1) is int + assert type(1 * long2(5)) is int + assert type(0 + long2(5)) is int + assert type(-long2(0)) is int + assert type(long2(5) // 1) is int + + def test_shift(self): + long = self._long + assert long(65) >> long(2) == long(16) + assert long(65) >> 2 == long(16) + assert 65 >> long(2) == long(16) + assert long(65) << long(2) == long(65) * 4 + assert long(65) << 2 == long(65) * 4 + assert 65 << long(2) == long(65) * 4 + raises(ValueError, "long(1) << long(-1)") + raises(ValueError, "long(1) << -1") + raises(OverflowError, "long(1) << (2 ** 100)") + raises(ValueError, "long(1) >> long(-1)") + raises(ValueError, "long(1) >> -1") + raises(OverflowError, "long(1) >> (2 ** 100)") + + def test_pow(self): + long = self._long + x = self._long(0) + assert pow(x, self._long(0), self._long(1)) == self._long(0) + assert pow(-self._long(1), -self._long(1)) == -1.0 + assert pow(2 ** 68, 0.5) == 2.0 ** 34 + assert pow(2 ** 68, 2) == 2 ** 136 + raises(ValueError, pow, long(2), -1, 3) + raises(ValueError, pow, long(2), 5, 0) + + # some rpow tests + assert pow(0, long(0), long(1)) == long(0) + assert pow(-1, long(-1)) == -1.0 + + def test_int_pow(self): + long = self._long + x = long(2) + assert pow(x, 2) == long(4) + assert pow(x, 2, 2) == long(0) + assert pow(x, 2, long(3)) == long(1) + + def test_getnewargs(self): + assert self._long(0) .__getnewargs__() == (self._long(0),) + assert (-self._long(1)) .__getnewargs__() == (-self._long(1),) + + def test_divmod(self): + long = self._long + def check_division(x, y): + q, r = divmod(x, y) + pab, pba = x*y, y*x + assert pab == pba + assert q == x // y + assert r == x % y + assert x == q*y + r + if y > 0: + assert 0 <= r < y + else: + assert y < r <= 0 + for x in [-self._long(1), self._long(0), self._long(1), self._long(2) ** 100 - 1, -self._long(2) ** 100 - 1]: + for y in [-self._long(105566530), -self._long(1), self._long(1), self._long(1034522340)]: + print("checking division for %s, %s" % (x, y)) + check_division(x, y) + check_division(x, int(y)) + check_division(int(x), y) + # special case from python tests: + s1 = 33 + s2 = 2 + x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 + x >>= s1*16 + y = 10953035502453784575 + y >>= s2*16 + x = 0x3FE0003FFFFC0001FFF + y = self._long(0x9800FFC1) + check_division(x, y) + raises(ZeroDivisionError, "x // self._long(0)") + divmod(3, self._long(4)) + raises(ZeroDivisionError, "x % long(0)") + raises(ZeroDivisionError, divmod, x, long(0)) + raises(ZeroDivisionError, "x // 0") + raises(ZeroDivisionError, "x % 0") + raises(ZeroDivisionError, divmod, x, 0) + + def test_int_divmod(self): + long = self._long + q, r = divmod(long(100), 11) + assert q == 9 + assert r == 1 + + def test_format(self): + assert repr(12345678901234567890) == '12345678901234567890' + assert str(12345678901234567890) == '12345678901234567890' + assert hex(self._long(0x1234567890ABCDEF)) == '0x1234567890abcdef' + assert oct(self._long(0o1234567012345670)) == '0o1234567012345670' + + def test_bits(self): + x = self._long(0xAAAAAAAA) + assert x | self._long(0x55555555) == self._long(0xFFFFFFFF) + assert x & self._long(0x55555555) == self._long(0x00000000) + assert x ^ self._long(0x55555555) == self._long(0xFFFFFFFF) + assert -x | self._long(0x55555555) == -self._long(0xAAAAAAA9) + assert x | self._long(0x555555555) == self._long(0x5FFFFFFFF) + assert x & self._long(0x555555555) == self._long(0x000000000) + assert x ^ self._long(0x555555555) == self._long(0x5FFFFFFFF) + + def test_hash(self): + import sys + modulus = sys.hash_info.modulus + def longhash(x): + return hash(self._long(x)) + for x in (list(range(200)) + + [1234567890123456789, 18446743523953737727, + 987685321987685321987685321987685321987685321, + 10**50]): + y = x % modulus + assert longhash(x) == longhash(y) + assert longhash(-x) == longhash(-y) + assert longhash(modulus - 1) == modulus - 1 + assert longhash(modulus) == 0 + assert longhash(modulus + 1) == 1 + + assert longhash(-1) == -2 + value = -(modulus + 1) + assert longhash(value) == -2 + assert longhash(value * 2 + 1) == -2 + assert longhash(value * 4 + 3) == -2 + + def test_hash_2(self): + class AAA: + def __hash__(a): + return self._long(-1) + assert hash(AAA()) == -2 + + def test_math_log(self): + import math + raises(ValueError, math.log, self._long(0)) + raises(ValueError, math.log, -self._long(1)) + raises(ValueError, math.log, -self._long(2)) + raises(ValueError, math.log, -(self._long(1) << 10000)) + #raises(ValueError, math.log, 0) + raises(ValueError, math.log, -1) + raises(ValueError, math.log, -2) + + def test_long(self): + import sys + n = -sys.maxsize-1 + assert int(n) == n + assert str(int(n)) == str(n) + a = memoryview(b'123') + assert int(a) == self._long(123) + + def test_huge_longs(self): + import operator + x = self._long(1) + huge = x << self._long(40000) + raises(OverflowError, float, huge) + raises(OverflowError, operator.truediv, huge, 3) + raises(OverflowError, operator.truediv, huge, self._long(3)) + + def test_just_trunc(self): + class myint(object): + def __trunc__(self): + return 42 + assert int(myint()) == 42 + + def test_override___int__(self): + class myint(int): + def __int__(self): + return 42 + assert int(myint(21)) == 42 + class myotherint(int): + pass + assert int(myotherint(21)) == 21 + + def test___int__(self): + class A(object): + def __int__(self): + return 42 + assert int(A()) == 42 + + class IntSubclass(int): + pass + class ReturnsIntSubclass(object): + def __int__(self): + return IntSubclass(42) + n = int(ReturnsIntSubclass()) + assert n == 42 + # cpython 3.6 fixed behaviour to actually return type int here + assert type(n) is int + + def test_trunc_returns(self): + # but!: (blame CPython 2.7) + class Integral(object): + def __int__(self): + return 42 + class TruncReturnsNonInt(object): + def __trunc__(self): + return Integral() + n = int(TruncReturnsNonInt()) + assert type(n) is int + assert n == 42 + + class IntSubclass(int): + pass + class TruncReturnsNonInt(object): + def __trunc__(self): + return IntSubclass(42) + n = int(TruncReturnsNonInt()) + assert n == 42 + assert type(n) is int + + def test_long_before_string(self): + class A(str): + def __int__(self): + return 42 + assert int(A('abc')) == 42 + + def test_conjugate(self): + assert (self._long(7)).conjugate() == self._long(7) + assert (-self._long(7)).conjugate() == -self._long(7) + + class L(int): + pass + + assert type(L(7).conjugate()) is int + + class L(int): + def __pos__(self): + return 43 + assert L(7).conjugate() == self._long(7) + + def test_bit_length(self): + assert self._long(8).bit_length() == 4 + assert (-1<<40).bit_length() == 41 + assert ((2**31)-1).bit_length() == 31 + + def test_from_bytes(self): + assert int.from_bytes(b'c', 'little') == 99 + assert int.from_bytes(b'\x01\x01', 'little') == 257 + assert int.from_bytes(b'\x01\x00', 'big') == 256 + assert int.from_bytes(b'\x00\x80', 'little', signed=True) == -32768 + assert int.from_bytes([255, 0, 0], 'big', signed=True) == -65536 + raises(TypeError, int.from_bytes, 0, 'big') + raises(TypeError, int.from_bytes, '', 'big') + raises(ValueError, int.from_bytes, b'c', 'foo') + + def test_to_bytes(self): + assert 65535 .to_bytes(2, 'big') == b'\xff\xff' + assert (-8388608).to_bytes(3, 'little', signed=True) == b'\x00\x00\x80' + raises(OverflowError, (-5).to_bytes, 1, 'big') + raises(ValueError, (-5).to_bytes, 1, 'foo') + assert 65535 .to_bytes(length=2, byteorder='big') == b'\xff\xff' + + def test_negative_zero(self): + x = eval("-self._long(0)") + assert x == self._long(0) + + def test_long_real(self): + class A(int): pass + b = A(5).real + assert type(b) is int + + @py.test.mark.skipif("not config.option.runappdirect and sys.maxunicode == 0xffff") + def test_long_from_unicode(self): + raises(ValueError, int, '123L') + assert int('L', 22) == 21 + s = '\U0001D7CF\U0001D7CE' # 𝟏𝟎 + assert int(s) == 10 + + def test_long_from_bytes(self): + assert int(b'1234') == 1234 + + def test_invalid_literal_message(self): + try: + int('hello àèìò') + except ValueError as e: + assert 'hello àèìò' in str(e) + else: + assert False, 'did not raise' + + def test_base_overflow(self): + raises(ValueError, int, '42', 2**63) + + def test_long_real(self): + class A(int): pass + b = A(5).real + assert type(b) is int + + def test__int__(self): + class A(int): + def __int__(self): + return 42 + + assert int(int(3)) == int(3) + assert int(A(13)) == 42 + + def test_long_error_msg(self): + e = raises(TypeError, int, []) + assert str(e.value) == ( + "int() argument must be a string, a bytes-like object " + "or a number, not 'list'") + + def test_linear_long_base_16(self): + # never finishes if int(_, 16) is not linear-time + size = 100000 + n = "a" * size + expected = (2 << (size * 4)) // 3 + assert int(n, 16) == expected + + def test_large_identity(self): + import sys + if '__pypy__' not in sys.builtin_module_names: + skip('PyPy only') + a = sys.maxsize + 1 + b = sys.maxsize + 2 + assert a is not b + b -= 1 + assert a is b From pypy.commits at gmail.com Fri Aug 9 11:52:31 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 08:52:31 -0700 (PDT) Subject: [pypy-commit] pypy default: Backed out changeset 5a0b5a0945e0 (breaks test_smalllongobject.py) Message-ID: <5d4d96bf.1c69fb81.1518f.deb1@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97125:3d00647f3dc5 Date: 2019-08-09 16:51 +0100 http://bitbucket.org/pypy/pypy/changeset/3d00647f3dc5/ Log: Backed out changeset 5a0b5a0945e0 (breaks test_smalllongobject.py) diff --git a/pypy/objspace/std/test/apptest_longobject.py b/pypy/objspace/std/test/apptest_longobject.py deleted file mode 100644 --- a/pypy/objspace/std/test/apptest_longobject.py +++ /dev/null @@ -1,409 +0,0 @@ -from pytest import raises -import sys -import math -import operator - -def test_trunc(): - assert math.trunc(1L) == 1L - assert math.trunc(-1L) == -1L - -def test_add(): - x = 123L - assert int(x + 12443L) == 123 + 12443 - x = -20 - assert x + 2 + 3L + True == -14L - -def test_sub(): - assert int(58543L - 12332L) == 58543 - 12332 - assert int(58543L - 12332) == 58543 - 12332 - assert int(58543 - 12332L) == 58543 - 12332 - x = 237123838281233L - assert x * 12 == x * 12L - -def test_mul(): - x = 363L - assert x * 2 ** 40 == x << 40 - -def test_truediv(): - exec "from __future__ import division; a = 31415926L / 10000000L" - assert a == 3.1415926 - -def test_floordiv(): - x = 31415926L - a = x // 10000000L - assert a == 3L - -def test_int_floordiv(): - x = 3000L - a = x // 1000 - assert a == 3L - - x = 3000L - a = x // -1000 - assert a == -3L - - x = 3000L - raises(ZeroDivisionError, "x // 0") - - n = sys.maxint+1 - assert n / int(-n) == -1L - -def test_numerator_denominator(): - assert (1L).numerator == 1L - assert (1L).denominator == 1L - assert (42L).numerator == 42L - assert (42L).denominator == 1L - -def test_compare(): - Z = 0 - ZL = 0L - - assert Z == ZL - assert not (Z != ZL) - assert ZL == Z - assert not (ZL != Z) - assert Z <= ZL - assert not (Z < ZL) - assert ZL <= ZL - assert not (ZL < ZL) - - for BIG in (1L, 1L << 62, 1L << 9999): - assert not (Z == BIG) - assert Z != BIG - assert not (BIG == Z) - assert BIG != Z - assert not (ZL == BIG) - assert ZL != BIG - assert Z <= BIG - assert Z < BIG - assert not (BIG <= Z) - assert not (BIG < Z) - assert ZL <= BIG - assert ZL < BIG - assert not (BIG <= ZL) - assert not (BIG < ZL) - assert not (Z <= -BIG) - assert not (Z < -BIG) - assert -BIG <= Z - assert -BIG < Z - assert not (ZL <= -BIG) - assert not (ZL < -BIG) - assert -BIG <= ZL - assert -BIG < ZL - # - assert not (BIG < int(BIG)) - assert (BIG <= int(BIG)) - assert (BIG == int(BIG)) - assert not (BIG != int(BIG)) - assert not (BIG > int(BIG)) - assert (BIG >= int(BIG)) - # - assert (BIG < int(BIG)+1) - assert (BIG <= int(BIG)+1) - assert not (BIG == int(BIG)+1) - assert (BIG != int(BIG)+1) - assert not (BIG > int(BIG)+1) - assert not (BIG >= int(BIG)+1) - # - assert not (BIG < int(BIG)-1) - assert not (BIG <= int(BIG)-1) - assert not (BIG == int(BIG)-1) - assert (BIG != int(BIG)-1) - assert (BIG > int(BIG)-1) - assert (BIG >= int(BIG)-1) - # - assert not (int(BIG) < BIG) - assert (int(BIG) <= BIG) - assert (int(BIG) == BIG) - assert not (int(BIG) != BIG) - assert not (int(BIG) > BIG) - assert (int(BIG) >= BIG) - # - assert not (int(BIG)+1 < BIG) - assert not (int(BIG)+1 <= BIG) - assert not (int(BIG)+1 == BIG) - assert (int(BIG)+1 != BIG) - assert (int(BIG)+1 > BIG) - assert (int(BIG)+1 >= BIG) - # - assert (int(BIG)-1 < BIG) - assert (int(BIG)-1 <= BIG) - assert not (int(BIG)-1 == BIG) - assert (int(BIG)-1 != BIG) - assert not (int(BIG)-1 > BIG) - assert not (int(BIG)-1 >= BIG) - -def test_conversion(): - class long2(long): - pass - x = 1L - x = long2(x<<100) - y = int(x) - assert type(y) == long - assert type(+long2(5)) is long - assert type(long2(5) << 0) is long - assert type(long2(5) >> 0) is long - assert type(long2(5) + 0) is long - assert type(long2(5) - 0) is long - assert type(long2(5) * 1) is long - assert type(1 * long2(5)) is long - assert type(0 + long2(5)) is long - assert type(-long2(0)) is long - assert type(long2(5) // 1) is long - -def test_shift(): - assert 65l >> 2l == 16l - assert 65l >> 2 == 16l - assert 65 >> 2l == 16l - assert 65l << 2l == 65l * 4 - assert 65l << 2 == 65l * 4 - assert 65 << 2l == 65l * 4 - raises(ValueError, "1L << -1L") - raises(ValueError, "1L << -1") - raises(OverflowError, "1L << (2 ** 100)") - raises(ValueError, "1L >> -1L") - raises(ValueError, "1L >> -1") - raises(OverflowError, "1L >> (2 ** 100)") - -def test_pow(): - x = 0L - assert pow(x, 0L, 1L) == 0L - assert pow(-1L, -1L) == -1.0 - assert pow(2 ** 68, 0.5) == 2.0 ** 34 - assert pow(2 ** 68, 2) == 2 ** 136 - raises(TypeError, pow, 2l, -1, 3) - raises(ValueError, pow, 2l, 5, 0) - - # some rpow tests - assert pow(0, 0L, 1L) == 0L - assert pow(-1, -1L) == -1.0 - -def test_int_pow(): - x = 2L - assert pow(x, 2) == 4L - assert pow(x, 2, 2) == 0L - assert pow(x, 2, 3L) == 1L - -def test_getnewargs(): - assert 0L .__getnewargs__() == (0L,) - assert (-1L) .__getnewargs__() == (-1L,) - -def test_divmod(): - def check_division(x, y): - q, r = divmod(x, y) - pab, pba = x*y, y*x - assert pab == pba - assert q == x // y - assert r == x % y - assert x == q*y + r - if y > 0: - assert 0 <= r < y - else: - assert y < r <= 0 - for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: - for y in [-105566530L, -1L, 1L, 1034522340L]: - print "checking division for %s, %s" % (x, y) - check_division(x, y) - check_division(x, int(y)) - check_division(int(x), y) - # special case from python tests: - s1 = 33 - s2 = 2 - x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 - x >>= s1*16 - y = 10953035502453784575 - y >>= s2*16 - x = 0x3FE0003FFFFC0001FFFL - y = 0x9800FFC1L - check_division(x, y) - raises(ZeroDivisionError, "x // 0L") - raises(ZeroDivisionError, "x % 0L") - raises(ZeroDivisionError, divmod, x, 0L) - raises(ZeroDivisionError, "x // 0") - raises(ZeroDivisionError, "x % 0") - raises(ZeroDivisionError, divmod, x, 0) - -def test_int_divmod(): - q, r = divmod(100L, 11) - assert q == 9L - assert r == 1L - -def test_format(): - assert repr(12345678901234567890) == '12345678901234567890L' - assert str(12345678901234567890) == '12345678901234567890' - assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' - assert oct(01234567012345670L) == '01234567012345670L' - -def test_bits(): - x = 0xAAAAAAAAL - assert x | 0x55555555L == 0xFFFFFFFFL - assert x & 0x55555555L == 0x00000000L - assert x ^ 0x55555555L == 0xFFFFFFFFL - assert -x | 0x55555555L == -0xAAAAAAA9L - assert x | 0x555555555L == 0x5FFFFFFFFL - assert x & 0x555555555L == 0x000000000L - assert x ^ 0x555555555L == 0x5FFFFFFFFL - -def test_hash(): - # ints have the same hash as equal longs - for i in range(-4, 14): - assert hash(i) == hash(long(i)) == long(i).__hash__() - # might check too much -- it's ok to change the hashing algorithm - assert hash(123456789L) == 123456789 - assert hash(1234567890123456789L) in ( - -1895067127, # with 32-bit platforms - 1234567890123456789) # with 64-bit platforms - -def test_math_log(): - raises(ValueError, math.log, 0L) - raises(ValueError, math.log, -1L) - raises(ValueError, math.log, -2L) - raises(ValueError, math.log, -(1L << 10000)) - #raises(ValueError, math.log, 0) - raises(ValueError, math.log, -1) - raises(ValueError, math.log, -2) - -def test_long(): - n = -sys.maxint-1 - assert long(n) == n - assert str(long(n)) == str(n) - a = buffer('123') - assert long(a) == 123L - -def test_huge_longs(): - x = 1L - huge = x << 40000L - raises(OverflowError, float, huge) - raises(OverflowError, operator.truediv, huge, 3) - raises(OverflowError, operator.truediv, huge, 3L) - -def test_just_trunc(): - class myint(object): - def __trunc__(self): - return 42 - assert long(myint()) == 42 - -def test_override___long__(): - class mylong(long): - def __long__(self): - return 42L - assert long(mylong(21)) == 42L - class myotherlong(long): - pass - assert long(myotherlong(21)) == 21L - -def test___long__(): - class A(object): - def __long__(self): - return 42 - assert long(A()) == 42L - class B(object): - def __int__(self): - return 42 - raises(TypeError, long, B()) - - class LongSubclass(long): - pass - class ReturnsLongSubclass(object): - def __long__(self): - return LongSubclass(42L) - n = long(ReturnsLongSubclass()) - assert n == 42 - assert type(n) is LongSubclass - -def test_trunc_returns(): - # but!: (blame CPython 2.7) - class Integral(object): - def __int__(self): - return 42 - class TruncReturnsNonLong(object): - def __trunc__(self): - return Integral() - n = long(TruncReturnsNonLong()) - assert type(n) is long - assert n == 42 - - class LongSubclass(long): - pass - class TruncReturnsNonInt(object): - def __trunc__(self): - return LongSubclass(42) - n = long(TruncReturnsNonInt()) - assert n == 42 - assert type(n) is LongSubclass - -def test_long_before_string(): - class A(str): - def __long__(self): - return 42 - assert long(A('abc')) == 42 - -def test_long_errors(): - raises(TypeError, long, 12, 12) - raises(ValueError, long, 'xxxxxx?', 12) - -def test_conjugate(): - assert (7L).conjugate() == 7L - assert (-7L).conjugate() == -7L - - class L(long): - pass - - assert type(L(7).conjugate()) is long - - class L(long): - def __pos__(self): - return 43 - assert L(7).conjugate() == 7L - -def test_bit_length(): - assert 8L.bit_length() == 4 - assert (-1<<40).bit_length() == 41 - assert ((2**31)-1).bit_length() == 31 - -def test_negative_zero(): - x = eval("-0L") - assert x == 0L - -def test_mix_int_and_long(): - class IntLongMixClass(object): - def __int__(self): - return 42L - - def __long__(self): - return 64 - - mixIntAndLong = IntLongMixClass() - as_long = long(mixIntAndLong) - assert type(as_long) is long - assert as_long == 64 - -def test_long_real(): - class A(long): pass - b = A(5).real - assert type(b) is long - -def test__int__(): - class A(long): - def __int__(self): - return 42 - - assert int(long(3)) == long(3) - assert int(A(13)) == 42 - -def test_long_error_msg(): - e = raises(TypeError, long, []) - assert str(e.value) == ( - "long() argument must be a string or a number, not 'list'") - -def test_coerce(): - assert 3L.__coerce__(4L) == (3L, 4L) - assert 3L.__coerce__(4) == (3, 4) - assert 3L.__coerce__(object()) == NotImplemented - -def test_linear_long_base_16(): - # never finishes if long(_, 16) is not linear-time - size = 100000 - n = "a" * size - expected = (2 << (size * 4)) // 3 - assert long(n, 16) == expected diff --git a/pypy/objspace/std/test/test_longobject.py b/pypy/objspace/std/test/test_longobject.py --- a/pypy/objspace/std/test/test_longobject.py +++ b/pypy/objspace/std/test/test_longobject.py @@ -1,3 +1,4 @@ +import py from pypy.objspace.std import longobject as lobj from rpython.rlib.rbigint import rbigint @@ -35,3 +36,417 @@ x &= r.MASK w_obj = space.newlong_from_rarith_int(r(x)) assert space.bigint_w(w_obj).eq(rbigint.fromlong(x)) + + +class AppTestLong: + def test_trunc(self): + import math + assert math.trunc(1L) == 1L + assert math.trunc(-1L) == -1L + + def test_add(self): + x = 123L + assert int(x + 12443L) == 123 + 12443 + x = -20 + assert x + 2 + 3L + True == -14L + + def test_sub(self): + assert int(58543L - 12332L) == 58543 - 12332 + assert int(58543L - 12332) == 58543 - 12332 + assert int(58543 - 12332L) == 58543 - 12332 + x = 237123838281233L + assert x * 12 == x * 12L + + def test_mul(self): + x = 363L + assert x * 2 ** 40 == x << 40 + + def test_truediv(self): + exec "from __future__ import division; a = 31415926L / 10000000L" + assert a == 3.1415926 + + def test_floordiv(self): + x = 31415926L + a = x // 10000000L + assert a == 3L + + def test_int_floordiv(self): + import sys + + x = 3000L + a = x // 1000 + assert a == 3L + + x = 3000L + a = x // -1000 + assert a == -3L + + x = 3000L + raises(ZeroDivisionError, "x // 0") + + n = sys.maxint+1 + assert n / int(-n) == -1L + + def test_numerator_denominator(self): + assert (1L).numerator == 1L + assert (1L).denominator == 1L + assert (42L).numerator == 42L + assert (42L).denominator == 1L + + def test_compare(self): + Z = 0 + ZL = 0L + + assert Z == ZL + assert not (Z != ZL) + assert ZL == Z + assert not (ZL != Z) + assert Z <= ZL + assert not (Z < ZL) + assert ZL <= ZL + assert not (ZL < ZL) + + for BIG in (1L, 1L << 62, 1L << 9999): + assert not (Z == BIG) + assert Z != BIG + assert not (BIG == Z) + assert BIG != Z + assert not (ZL == BIG) + assert ZL != BIG + assert Z <= BIG + assert Z < BIG + assert not (BIG <= Z) + assert not (BIG < Z) + assert ZL <= BIG + assert ZL < BIG + assert not (BIG <= ZL) + assert not (BIG < ZL) + assert not (Z <= -BIG) + assert not (Z < -BIG) + assert -BIG <= Z + assert -BIG < Z + assert not (ZL <= -BIG) + assert not (ZL < -BIG) + assert -BIG <= ZL + assert -BIG < ZL + # + assert not (BIG < int(BIG)) + assert (BIG <= int(BIG)) + assert (BIG == int(BIG)) + assert not (BIG != int(BIG)) + assert not (BIG > int(BIG)) + assert (BIG >= int(BIG)) + # + assert (BIG < int(BIG)+1) + assert (BIG <= int(BIG)+1) + assert not (BIG == int(BIG)+1) + assert (BIG != int(BIG)+1) + assert not (BIG > int(BIG)+1) + assert not (BIG >= int(BIG)+1) + # + assert not (BIG < int(BIG)-1) + assert not (BIG <= int(BIG)-1) + assert not (BIG == int(BIG)-1) + assert (BIG != int(BIG)-1) + assert (BIG > int(BIG)-1) + assert (BIG >= int(BIG)-1) + # + assert not (int(BIG) < BIG) + assert (int(BIG) <= BIG) + assert (int(BIG) == BIG) + assert not (int(BIG) != BIG) + assert not (int(BIG) > BIG) + assert (int(BIG) >= BIG) + # + assert not (int(BIG)+1 < BIG) + assert not (int(BIG)+1 <= BIG) + assert not (int(BIG)+1 == BIG) + assert (int(BIG)+1 != BIG) + assert (int(BIG)+1 > BIG) + assert (int(BIG)+1 >= BIG) + # + assert (int(BIG)-1 < BIG) + assert (int(BIG)-1 <= BIG) + assert not (int(BIG)-1 == BIG) + assert (int(BIG)-1 != BIG) + assert not (int(BIG)-1 > BIG) + assert not (int(BIG)-1 >= BIG) + + def test_conversion(self): + class long2(long): + pass + x = 1L + x = long2(x<<100) + y = int(x) + assert type(y) == long + assert type(+long2(5)) is long + assert type(long2(5) << 0) is long + assert type(long2(5) >> 0) is long + assert type(long2(5) + 0) is long + assert type(long2(5) - 0) is long + assert type(long2(5) * 1) is long + assert type(1 * long2(5)) is long + assert type(0 + long2(5)) is long + assert type(-long2(0)) is long + assert type(long2(5) // 1) is long + + def test_shift(self): + assert 65l >> 2l == 16l + assert 65l >> 2 == 16l + assert 65 >> 2l == 16l + assert 65l << 2l == 65l * 4 + assert 65l << 2 == 65l * 4 + assert 65 << 2l == 65l * 4 + raises(ValueError, "1L << -1L") + raises(ValueError, "1L << -1") + raises(OverflowError, "1L << (2 ** 100)") + raises(ValueError, "1L >> -1L") + raises(ValueError, "1L >> -1") + raises(OverflowError, "1L >> (2 ** 100)") + + def test_pow(self): + x = 0L + assert pow(x, 0L, 1L) == 0L + assert pow(-1L, -1L) == -1.0 + assert pow(2 ** 68, 0.5) == 2.0 ** 34 + assert pow(2 ** 68, 2) == 2 ** 136 + raises(TypeError, pow, 2l, -1, 3) + raises(ValueError, pow, 2l, 5, 0) + + # some rpow tests + assert pow(0, 0L, 1L) == 0L + assert pow(-1, -1L) == -1.0 + + def test_int_pow(self): + x = 2L + assert pow(x, 2) == 4L + assert pow(x, 2, 2) == 0L + assert pow(x, 2, 3L) == 1L + + def test_getnewargs(self): + assert 0L .__getnewargs__() == (0L,) + assert (-1L) .__getnewargs__() == (-1L,) + + def test_divmod(self): + def check_division(x, y): + q, r = divmod(x, y) + pab, pba = x*y, y*x + assert pab == pba + assert q == x // y + assert r == x % y + assert x == q*y + r + if y > 0: + assert 0 <= r < y + else: + assert y < r <= 0 + for x in [-1L, 0L, 1L, 2L ** 100 - 1, -2L ** 100 - 1]: + for y in [-105566530L, -1L, 1L, 1034522340L]: + print "checking division for %s, %s" % (x, y) + check_division(x, y) + check_division(x, int(y)) + check_division(int(x), y) + # special case from python tests: + s1 = 33 + s2 = 2 + x = 16565645174462751485571442763871865344588923363439663038777355323778298703228675004033774331442052275771343018700586987657790981527457655176938756028872904152013524821759375058141439 + x >>= s1*16 + y = 10953035502453784575 + y >>= s2*16 + x = 0x3FE0003FFFFC0001FFFL + y = 0x9800FFC1L + check_division(x, y) + raises(ZeroDivisionError, "x // 0L") + raises(ZeroDivisionError, "x % 0L") + raises(ZeroDivisionError, divmod, x, 0L) + raises(ZeroDivisionError, "x // 0") + raises(ZeroDivisionError, "x % 0") + raises(ZeroDivisionError, divmod, x, 0) + + def test_int_divmod(self): + q, r = divmod(100L, 11) + assert q == 9L + assert r == 1L + + def test_format(self): + assert repr(12345678901234567890) == '12345678901234567890L' + assert str(12345678901234567890) == '12345678901234567890' + assert hex(0x1234567890ABCDEFL) == '0x1234567890abcdefL' + assert oct(01234567012345670L) == '01234567012345670L' + + def test_bits(self): + x = 0xAAAAAAAAL + assert x | 0x55555555L == 0xFFFFFFFFL + assert x & 0x55555555L == 0x00000000L + assert x ^ 0x55555555L == 0xFFFFFFFFL + assert -x | 0x55555555L == -0xAAAAAAA9L + assert x | 0x555555555L == 0x5FFFFFFFFL + assert x & 0x555555555L == 0x000000000L + assert x ^ 0x555555555L == 0x5FFFFFFFFL + + def test_hash(self): + # ints have the same hash as equal longs + for i in range(-4, 14): + assert hash(i) == hash(long(i)) == long(i).__hash__() + # might check too much -- it's ok to change the hashing algorithm + assert hash(123456789L) == 123456789 + assert hash(1234567890123456789L) in ( + -1895067127, # with 32-bit platforms + 1234567890123456789) # with 64-bit platforms + + def test_math_log(self): + import math + raises(ValueError, math.log, 0L) + raises(ValueError, math.log, -1L) + raises(ValueError, math.log, -2L) + raises(ValueError, math.log, -(1L << 10000)) + #raises(ValueError, math.log, 0) + raises(ValueError, math.log, -1) + raises(ValueError, math.log, -2) + + def test_long(self): + import sys + n = -sys.maxint-1 + assert long(n) == n + assert str(long(n)) == str(n) + a = buffer('123') + assert long(a) == 123L + + def test_huge_longs(self): + import operator + x = 1L + huge = x << 40000L + raises(OverflowError, float, huge) + raises(OverflowError, operator.truediv, huge, 3) + raises(OverflowError, operator.truediv, huge, 3L) + + def test_just_trunc(self): + class myint(object): + def __trunc__(self): + return 42 + assert long(myint()) == 42 + + def test_override___long__(self): + class mylong(long): + def __long__(self): + return 42L + assert long(mylong(21)) == 42L + class myotherlong(long): + pass + assert long(myotherlong(21)) == 21L + + def test___long__(self): + class A(object): + def __long__(self): + return 42 + assert long(A()) == 42L + class B(object): + def __int__(self): + return 42 + raises(TypeError, long, B()) + + class LongSubclass(long): + pass + class ReturnsLongSubclass(object): + def __long__(self): + return LongSubclass(42L) + n = long(ReturnsLongSubclass()) + assert n == 42 + assert type(n) is LongSubclass + + def test_trunc_returns(self): + # but!: (blame CPython 2.7) + class Integral(object): + def __int__(self): + return 42 + class TruncReturnsNonLong(object): + def __trunc__(self): + return Integral() + n = long(TruncReturnsNonLong()) + assert type(n) is long + assert n == 42 + + class LongSubclass(long): + pass + class TruncReturnsNonInt(object): + def __trunc__(self): + return LongSubclass(42) + n = long(TruncReturnsNonInt()) + assert n == 42 + assert type(n) is LongSubclass + + def test_long_before_string(self): + class A(str): + def __long__(self): + return 42 + assert long(A('abc')) == 42 + + def test_long_errors(self): + raises(TypeError, long, 12, 12) + raises(ValueError, long, 'xxxxxx?', 12) + + def test_conjugate(self): + assert (7L).conjugate() == 7L + assert (-7L).conjugate() == -7L + + class L(long): + pass + + assert type(L(7).conjugate()) is long + + class L(long): + def __pos__(self): + return 43 + assert L(7).conjugate() == 7L + + def test_bit_length(self): + assert 8L.bit_length() == 4 + assert (-1<<40).bit_length() == 41 + assert ((2**31)-1).bit_length() == 31 + + def test_negative_zero(self): + x = eval("-0L") + assert x == 0L + + def test_mix_int_and_long(self): + class IntLongMixClass(object): + def __int__(self): + return 42L + + def __long__(self): + return 64 + + mixIntAndLong = IntLongMixClass() + as_long = long(mixIntAndLong) + assert type(as_long) is long + assert as_long == 64 + + def test_long_real(self): + class A(long): pass + b = A(5).real + assert type(b) is long + + def test__int__(self): + class A(long): + def __int__(self): + return 42 + + assert int(long(3)) == long(3) + assert int(A(13)) == 42 + + def test_long_error_msg(self): + e = raises(TypeError, long, []) + assert str(e.value) == ( + "long() argument must be a string or a number, not 'list'") + + def test_coerce(self): + assert 3L.__coerce__(4L) == (3L, 4L) + assert 3L.__coerce__(4) == (3, 4) + assert 3L.__coerce__(object()) == NotImplemented + + def test_linear_long_base_16(self): + # never finishes if long(_, 16) is not linear-time + size = 100000 + n = "a" * size + expected = (2 << (size * 4)) // 3 + assert long(n, 16) == expected + From pypy.commits at gmail.com Fri Aug 9 12:08:53 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 09:08:53 -0700 (PDT) Subject: [pypy-commit] pypy default: kill overly complicated apptest that duplicates lib-python Message-ID: <5d4d9a95.1c69fb81.948b2.9d30@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97126:9f3e40bec8d3 Date: 2019-08-09 17:08 +0100 http://bitbucket.org/pypy/pypy/changeset/9f3e40bec8d3/ Log: kill overly complicated apptest that duplicates lib-python diff --git a/pypy/module/_lsprof/test/test_cprofile.py b/pypy/module/_lsprof/test/test_cprofile.py --- a/pypy/module/_lsprof/test/test_cprofile.py +++ b/pypy/module/_lsprof/test/test_cprofile.py @@ -4,7 +4,6 @@ } def setup_class(cls): - cls.w_expected_output = cls.space.wrap(expected_output) cls.w_file = cls.space.wrap(__file__) def test_repr(self): @@ -167,153 +166,3 @@ prof.disable() stats = prof.getstats() assert len(stats) == 2 - - def test_use_cprofile(self): - import sys, os - # XXX this is evil trickery to walk around the fact that we don't - # have __file__ at app-level here - sys.path.insert(0, os.path.dirname(self.file)) - try: - import re - from cProfile import Profile - from profilee import testfunc, timer - - methodnames = ['print_stats', 'print_callers', 'print_callees'] - - def do_profiling(cls): - results = [] - prof = cls(timer, 0.001) - start_timer = timer() - prof.runctx("testfunc()", {'testfunc':testfunc}, locals()) - results.append(timer() - start_timer) - for methodname in methodnames: - import pstats - from StringIO import StringIO - s = StringIO() - stats = pstats.Stats(prof, stream=s) - stats.strip_dirs().sort_stats("stdname") - getattr(stats, methodname)() - results.append(s.getvalue()) - return results, prof - - res, prof = do_profiling(Profile) - assert res[0] == 1000 - for i, method in enumerate(methodnames): - got = res[i + 1] - expected = self.expected_output[method] - patterns = expected.splitlines() - lines = set(got.splitlines()) - lines.remove('') # ignore blank lines - for pattern in patterns: - if not pattern: - continue # ignore blank lines - pattern = pattern.replace('(', '\\(') - pattern = pattern.replace(')', '\\)') - pattern = pattern.replace('?', '\\?') - pattern = pattern.replace(r'\\?', '?') - pattern = pattern.replace(r'\\(', '(') - pattern = pattern.replace(r'\\)', ')') - repattern = re.compile('^' + pattern + '$') - for line in lines: - if repattern.match(line): - lines.remove(line) - break - else: - print('NOT FOUND: %s' % pattern.rstrip('\n')) - print('--- GOT ---') - print(got) - print('') - print('--- EXPECTED ---') - print(expected) - assert False - assert not lines - finally: - sys.path.pop(0) - - -expected_output = {} -expected_output['print_stats'] = """\ - 126 function calls (106 primitive calls) in 1.000 seconds - - Ordered by: standard name - - ncalls tottime percall cumtime percall filename:lineno(function) - 1 0.000 0.000 1.000 1.000 :1() - 28 0.028 0.001 0.028 0.001 profilee.py:110(__getattr__) - 1 0.270 0.270 1.000 1.000 profilee.py:25(testfunc) - 23/3 0.150 0.007 0.170 0.057 profilee.py:35(factorial) - 20 0.020 0.001 0.020 0.001 profilee.py:48(mul) - 2 0.040 0.020 0.600 0.300 profilee.py:55(helper) - 4 0.116 0.029 0.120 0.030 profilee.py:73(helper1) - 2 0.000 0.000 0.140 0.070 profilee.py:84(helper2_indirect) - 8 0.312 0.039 0.400 0.050 profilee.py:88(helper2) - 8 0.064 0.008 0.080 0.010 profilee.py:98(subhelper) - 4 0.000 0.000 0.000 0.000 {method 'append' of 'list' objects} - 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} - 12 0.000 0.000 0.012 0.001 {hasattr} - 8 0.000 0.000 0.000 0.000 {range} - 4 0.000 0.000 0.000 0.000 {sys.exc_info} - - -""" - -expected_output['print_callers'] = """\ - Ordered by: standard name - -Function * was called by... - * ncalls tottime cumtime -:1() * <- -profilee.py:110(__getattr__) * <- 16 0.016 0.016 profilee.py:98(subhelper) - * 12 0.012 0.012 {hasattr.*} -profilee.py:25(testfunc) * <- 1 0.270 1.000 :1() -profilee.py:35(factorial) * <- 1 0.014 0.130 profilee.py:25(testfunc) - * 20/3 0.130 0.147 profilee.py:35(factorial) - * 2 0.006 0.040 profilee.py:84(helper2_indirect) -profilee.py:48(mul) * <- 20 0.020 0.020 profilee.py:35(factorial) -profilee.py:55(helper) * <- 2 0.040 0.600 profilee.py:25(testfunc) -profilee.py:73(helper1) * <- 4 0.116 0.120 profilee.py:55(helper) -profilee.py:84(helper2_indirect) *<- 2 0.000 0.140 profilee.py:55(helper) -profilee.py:88(helper2) * <- 6 0.234 0.300 profilee.py:55(helper) - * 2 0.078 0.100 profilee.py:84(helper2_indirect) -profilee.py:98(subhelper) * <- 8 0.064 0.080 profilee.py:88(helper2) -{.*append.*} * <- 4 0.000 0.000 profilee.py:73(helper1) -{.*disable.*} * <- -{hasattr.*} * <- 4 0.000 0.004 profilee.py:73(helper1) - * 8 0.000 0.008 profilee.py:88(helper2) -{range.*} * <- 8 0.000 0.000 profilee.py:98(subhelper) -{sys.exc_info.*} * <- 4 0.000 0.000 profilee.py:73(helper1) - - -""" -expected_output['print_callees'] = """\ - Ordered by: standard name - -Function * called... - * ncalls tottime cumtime -:1() * -> 1 0.270 1.000 profilee.py:25(testfunc) -profilee.py:110(__getattr__) * -> -profilee.py:25(testfunc) * -> 1 0.014 0.130 profilee.py:35(factorial) - * 2 0.040 0.600 profilee.py:55(helper) -profilee.py:35(factorial) * -> 20/3 0.130 0.147 profilee.py:35(factorial) - * 20 0.020 0.020 profilee.py:48(mul) -profilee.py:48(mul) * -> -profilee.py:55(helper) * -> 4 0.116 0.120 profilee.py:73(helper1) - * 2 0.000 0.140 profilee.py:84(helper2_indirect) - * 6 0.234 0.300 profilee.py:88(helper2) -\\(profilee.py:73(helper1)\\)\\? * .. 4 0.000 0.000 {.*append.*} -\\(profilee.py:73(helper1)\\)\\? * .. 4 0.000 0.004 {.*hasattr.*} - * 4 0.000 0.000 {sys.exc_info.*} -profilee.py:84(helper2_indirect) * -> 2 0.006 0.040 profilee.py:35(factorial) - * 2 0.078 0.100 profilee.py:88(helper2) -profilee.py:88(helper2) * -> 8 0.064 0.080 profilee.py:98(subhelper) - * 8 0.000 0.008 {hasattr.*} -profilee.py:98(subhelper) * -> 16 0.016 0.016 profilee.py:110(__getattr__) - * 8 0.000 0.000 {range.*} -{.*append.*} * -> -{.*disable.*} * -> -{hasattr.*} * -> 12 0.012 0.012 profilee.py:110(__getattr__) -{range.*} * -> -{sys.exc_info.*} * -> - - -""" From pypy.commits at gmail.com Fri Aug 9 12:14:05 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 09:14:05 -0700 (PDT) Subject: [pypy-commit] pypy default: kill flaky test Message-ID: <5d4d9bcd.1c69fb81.9694e.7aef@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97127:a46bede6925d Date: 2019-08-09 17:13 +0100 http://bitbucket.org/pypy/pypy/changeset/a46bede6925d/ Log: kill flaky test diff --git a/pypy/objspace/std/test/test_methodcache.py b/pypy/objspace/std/test/test_methodcache.py --- a/pypy/objspace/std/test/test_methodcache.py +++ b/pypy/objspace/std/test/test_methodcache.py @@ -72,46 +72,6 @@ assert cache_counter[1] >= 2 # should be (18, 2) assert sum(cache_counter) == 20 - def test_change_methods(self): - @self.retry - def run(): - import __pypy__ - class A(object): - def f(self): - return 42 - l = [A()] * 10 - __pypy__.reset_method_cache_counter() - for i, a in enumerate(l): - assert a.f() == 42 + i - A.f = eval("lambda self: %s" % (42 + i + 1, )) - cache_counter = __pypy__.method_cache_counter("f") - # - # a bit of explanation about what's going on. (1) is the line "a.f()" - # and (2) is "A.f = ...". - # - # at line (1) we do the lookup on type(a).f - # - # at line (2) we do a setattr on A. However, descr_setattr does also a - # lookup of type(A).f i.e. type.f, to check if by chance 'f' is a data - # descriptor. - # - # At the first iteration: - # (1) is a miss because it's the first lookup of A.f. The result is cached - # - # (2) is a miss because it is the first lookup of type.f. The - # (non-existant) result is cached. The version of A changes, and 'f' - # is changed to be a cell object, so that subsequest assignments won't - # change the version of A - # - # At the second iteration: - # (1) is a miss because the version of A changed just before - # (2) is a hit, because type.f is cached. The version of A no longer changes - # - # At the third and subsequent iterations: - # (1) is a hit, because the version of A did not change - # (2) is a hit, see above - assert cache_counter == (17, 3) - def test_subclasses(self): @self.retry def run(): From pypy.commits at gmail.com Fri Aug 9 12:32:38 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 09:32:38 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d4da026.1c69fb81.a8301.3f14@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97128:5dc520fcc4f9 Date: 2019-08-09 17:31 +0100 http://bitbucket.org/pypy/pypy/changeset/5dc520fcc4f9/ Log: hg merge default diff --git a/pypy/module/_lsprof/test/test_cprofile.py b/pypy/module/_lsprof/test/test_cprofile.py --- a/pypy/module/_lsprof/test/test_cprofile.py +++ b/pypy/module/_lsprof/test/test_cprofile.py @@ -4,7 +4,6 @@ } def setup_class(cls): - cls.w_expected_output = cls.space.wrap(expected_output) cls.w_file = cls.space.wrap(__file__) def test_repr(self): @@ -167,152 +166,3 @@ prof.disable() stats = prof.getstats() assert len(stats) == 2 - - def test_use_cprofile(self): - import sys, os - # XXX this is evil trickery to walk around the fact that we don't - # have __file__ at app-level here - sys.path.insert(0, os.path.dirname(self.file)) - try: - import re - from cProfile import Profile - from profilee import testfunc, timer - - methodnames = ['print_stats', 'print_callers', 'print_callees'] - - def do_profiling(cls): - results = [] - prof = cls(timer, 0.001) - start_timer = timer() - prof.runctx("testfunc()", {'testfunc':testfunc}, locals()) - results.append(timer() - start_timer) - for methodname in methodnames: - import pstats - from io import StringIO - s = StringIO() - stats = pstats.Stats(prof, stream=s) - stats.strip_dirs().sort_stats("stdname") - getattr(stats, methodname)() - results.append(s.getvalue()) - return results, prof - - res, prof = do_profiling(Profile) - assert res[0] == 1000 - for i, method in enumerate(methodnames): - got = res[i + 1] - expected = self.expected_output[method] - patterns = expected.splitlines() - lines = set(got.splitlines()) - lines.remove('') # ignore blank lines - for pattern in patterns: - if not pattern: - continue # ignore blank lines - pattern = pattern.replace('(', '\\(') - pattern = pattern.replace(')', '\\)') - pattern = pattern.replace('?', '\\?') - pattern = pattern.replace(r'\\?', '?') - pattern = pattern.replace(r'\\(', '(') - pattern = pattern.replace(r'\\)', ')') - repattern = re.compile('^' + pattern + '$') - for line in lines: - if repattern.match(line): - lines.remove(line) - break - else: - print('NOT FOUND: %s' % pattern.rstrip('\n')) - print('--- GOT ---') - print(got) - print('') - print('--- EXPECTED ---') - print(expected) - assert False - assert not lines - finally: - sys.path.pop(0) - - -expected_output = {} -expected_output['print_stats'] = """\ - 119 function calls (99 primitive calls) in 1.000 seconds - - Ordered by: standard name - - ncalls tottime percall cumtime percall filename:lineno(function) - 1 0.000 0.000 1.000 1.000 :1() - 28 0.028 0.001 0.028 0.001 profilee.py:110(__getattr__) - 1 0.270 0.270 1.000 1.000 profilee.py:25(testfunc) - 23/3 0.150 0.007 0.170 0.057 profilee.py:35(factorial) - 20 0.020 0.001 0.020 0.001 profilee.py:48(mul) - 2 0.040 0.020 0.600 0.300 profilee.py:55(helper) - 4 0.116 0.029 0.120 0.030 profilee.py:73(helper1) - 2 0.000 0.000 0.140 0.070 profilee.py:84(helper2_indirect) - 8 0.312 0.039 0.400 0.050 profilee.py:88(helper2) - 8 0.064 0.008 0.080 0.010 profilee.py:98(subhelper) - 1 0.000 0.000 1.000 1.000 {built-in function exec} - 12 0.000 0.000 0.012 0.001 {built-in function hasattr} - 4 0.000 0.000 0.000 0.000 {built-in function sys.exc_info} - 4 0.000 0.000 0.000 0.000 {method 'append' of 'list' objects} - 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} - - -""" - -expected_output['print_callers'] = """\ - Ordered by: standard name - -Function was called by... - ncalls tottime cumtime -:1() <- 1 0.000 1.000 {built-in function exec} -profilee.py:110(__getattr__) <- 16 0.016 0.016 profilee.py:98(subhelper) - 12 0.012 0.012 {built-in function hasattr} -profilee.py:25(testfunc) <- 1 0.270 1.000 :1() -profilee.py:35(factorial) <- 1 0.014 0.130 profilee.py:25(testfunc) - 20/3 0.130 0.147 profilee.py:35(factorial) - 2 0.006 0.040 profilee.py:84(helper2_indirect) -profilee.py:48(mul) <- 20 0.020 0.020 profilee.py:35(factorial) -profilee.py:55(helper) <- 2 0.040 0.600 profilee.py:25(testfunc) -profilee.py:73(helper1) <- 4 0.116 0.120 profilee.py:55(helper) -profilee.py:84(helper2_indirect) <- 2 0.000 0.140 profilee.py:55(helper) -profilee.py:88(helper2) <- 6 0.234 0.300 profilee.py:55(helper) - 2 0.078 0.100 profilee.py:84(helper2_indirect) -profilee.py:98(subhelper) <- 8 0.064 0.080 profilee.py:88(helper2) -{built-in function exec} <- -{built-in function hasattr} <- 4 0.000 0.004 profilee.py:73(helper1) - 8 0.000 0.008 profilee.py:88(helper2) -{method 'append' of 'list' objects} <- 4 0.000 0.000 profilee.py:73(helper1) -{method 'disable' of '_lsprof.Profiler' objects} <- -{built-in function sys.exc_info} <- 4 0.000 0.000 profilee.py:73(helper1) - - -""" -expected_output['print_callees'] = """\ - Ordered by: standard name - -Function called... - ncalls tottime cumtime -:1() -> 1 0.270 1.000 profilee.py:25(testfunc) -profilee.py:110(__getattr__) -> -profilee.py:25(testfunc) -> 1 0.014 0.130 profilee.py:35(factorial) - 2 0.040 0.600 profilee.py:55(helper) -profilee.py:35(factorial) -> 20/3 0.130 0.147 profilee.py:35(factorial) - 20 0.020 0.020 profilee.py:48(mul) -profilee.py:48(mul) -> -profilee.py:55(helper) -> 4 0.116 0.120 profilee.py:73(helper1) - 2 0.000 0.140 profilee.py:84(helper2_indirect) - 6 0.234 0.300 profilee.py:88(helper2) -profilee.py:73(helper1) -> 4 0.000 0.004 {built-in function hasattr} - 4 0.000 0.000 {method 'append' of 'list' objects} - 4 0.000 0.000 {built-in function sys.exc_info} -profilee.py:84(helper2_indirect) -> 2 0.006 0.040 profilee.py:35(factorial) - 2 0.078 0.100 profilee.py:88(helper2) -profilee.py:88(helper2) -> 8 0.064 0.080 profilee.py:98(subhelper) - 8 0.000 0.008 {built-in function hasattr} -profilee.py:98(subhelper) -> 16 0.016 0.016 profilee.py:110(__getattr__) -{built-in function exec} -> 1 0.000 1.000 :1() -{built-in function hasattr} -> 12 0.012 0.012 profilee.py:110(__getattr__) -{method 'append' of 'list' objects} -> -{method 'disable' of '_lsprof.Profiler' objects} -> -{built-in function sys.exc_info} -> - - -""" diff --git a/pypy/objspace/std/test/test_methodcache.py b/pypy/objspace/std/test/test_methodcache.py --- a/pypy/objspace/std/test/test_methodcache.py +++ b/pypy/objspace/std/test/test_methodcache.py @@ -47,52 +47,6 @@ assert cache_counter[1] >= 3 # should be (27, 3) assert sum(cache_counter) == 30 - def test_change_methods(self): - # this test fails because of the following line in typeobject.py:427 - # if cached_name is name: - - # in py3k, identifiers are stored in W_UnicodeObject and unwrapped by - # calling space.text_w, which .encode('ascii') the string, thus - # creating new strings all the time. The problem should be solved when - # we implement proper unicode identifiers in py3k - @self.retry - def run(): - import __pypy__ - class A(object): - def f(self): - return 42 - l = [A()] * 10 - __pypy__.reset_method_cache_counter() - for i, a in enumerate(l): - assert a.f() == 42 + i - A.f = eval("lambda self: %s" % (42 + i + 1, )) - cache_counter = __pypy__.method_cache_counter("f") - # - # a bit of explanation about what's going on. (1) is the line "a.f()" - # and (2) is "A.f = ...". - # - # at line (1) we do the lookup on type(a).f - # - # at line (2) we do a setattr on A. However, descr_setattr does also a - # lookup of type(A).f i.e. type.f, to check if by chance 'f' is a data - # descriptor. - # - # At the first iteration: - # (1) is a miss because it's the first lookup of A.f. The result is cached - # - # (2) is a miss because it is the first lookup of type.f. The - # (non-existant) result is cached. The version of A changes, and 'f' - # is changed to be a cell object, so that subsequest assignments won't - # change the version of A - # - # At the second iteration: - # (1) is a miss because the version of A changed just before - # (2) is a hit, because type.f is cached. The version of A no longer changes - # - # At the third and subsequent iterations: - # (1) is a hit, because the version of A did not change - # (2) is a hit, see above - assert cache_counter == (17, 3) def test_subclasses(self): @self.retry From pypy.commits at gmail.com Fri Aug 9 13:31:37 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 09 Aug 2019 10:31:37 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix test Message-ID: <5d4dadf9.1c69fb81.536f7.ac20@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97129:060585210eea Date: 2019-08-09 18:30 +0100 http://bitbucket.org/pypy/pypy/changeset/060585210eea/ Log: fix test diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py --- a/pypy/interpreter/executioncontext.py +++ b/pypy/interpreter/executioncontext.py @@ -656,7 +656,7 @@ def make_finalizer_queue(W_Root, space): """Make a FinalizerQueue subclass which responds to GC finalizer events by 'firing' the UserDelAction class above. It does not - directly fetches the objects to finalize at all; they stay in the + directly fetches the objects to finalize at all; they stay in the GC-managed queue, and will only be fetched by UserDelAction (between bytecodes).""" diff --git a/pypy/interpreter/test/apptest_pyframe.py b/pypy/interpreter/test/apptest_pyframe.py --- a/pypy/interpreter/test/apptest_pyframe.py +++ b/pypy/interpreter/test/apptest_pyframe.py @@ -277,11 +277,11 @@ def test_trace_ignore_hidden(): import sys import _testing + _testing.Hidden # avoid module lazy-loading weirdness when untranslated l = [] def trace(a,b,c): - if a.f_code.co_name != "decode": - l.append((a,b,c)) + l.append((a,b,c)) def f(): h = _testing.Hidden() From pypy.commits at gmail.com Fri Aug 9 13:58:04 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 10:58:04 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Be safe and always enable a check that was meant only for Windows Message-ID: <5d4db42c.1c69fb81.a6f5e.e99b@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97130:a781f28cb336 Date: 2019-08-09 19:57 +0200 http://bitbucket.org/pypy/pypy/changeset/a781f28cb336/ Log: Be safe and always enable a check that was meant only for Windows but that makes sense in case there's an attack on the platform's strftime() diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -639,7 +639,7 @@ if rffi.getintfield(buf_value, 'c_tm_isdst') < -1 or rffi.getintfield(buf_value, 'c_tm_isdst') > 1: raise oefmt(space.w_ValueError, "daylight savings flag out of range") - if _WIN: + if _WIN or space.config.translation.sandbox: # check that the format string contains only valid directives length = len(format) i = 0 From pypy.commits at gmail.com Fri Aug 9 15:06:19 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 12:06:19 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: Try to add carefully logic in the GC that disables the most advanced Message-ID: <5d4dc42b.1c69fb81.b3816.2fd5@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97131:23010c892cff Date: 2019-08-09 21:05 +0200 http://bitbucket.org/pypy/pypy/changeset/23010c892cff/ Log: Try to add carefully logic in the GC that disables the most advanced features, incrementality and pinning. diff --git a/pypy/module/gc/moduledef.py b/pypy/module/gc/moduledef.py --- a/pypy/module/gc/moduledef.py +++ b/pypy/module/gc/moduledef.py @@ -16,7 +16,11 @@ def __init__(self, space, w_name): if (not space.config.translating or - space.config.translation.gctransformer == "framework"): + (space.config.translation.gctransformer == "framework" + and not space.config.translation.sandbox)): + # some of these functions allow app-level code to do invalid + # things by trying hard enough. For safety, in sandbox mode + # we don't provide any of them. self.appleveldefs.update({ 'dump_rpy_heap': 'app_referents.dump_rpy_heap', 'get_stats': 'app_referents.get_stats', diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -519,10 +519,21 @@ bigobj = self.nonlarge_max + 1 self.max_number_of_pinned_objects = self.nursery_size / (bigobj * 2) + def safer_variant(self): + # When running in sandbox mode, turn off two features: incrementality + # and object pinning. This should be done in a way that cannot *add* + # any security bug, but it could in theory avoid bugs in this complex + # logic. + return self.config.sandbox + def enable(self): self.enabled = True def disable(self): + if self.safer_variant(): + # gc.disable() is ignored in this mode. It should not be + # allowed to disable major collections. + return self.enabled = False def isenabled(self): @@ -763,6 +774,16 @@ def collect(self, gen=2): """Do a minor (gen=0), start a major (gen=1), or do a full major (gen>=2) collection.""" + self.check_safe_gc_state() + if self.safer_variant(): + # gen < 0 is dangerous, and gen == 1 leaves the GC in the + # middle of a major collection. We disable these two modes + # in the safer variant. + if gen <= 0: + gen = 0 + else: + gen = 2 + # if gen < 0: # Dangerous! this makes no progress on the major GC cycle. # If called too often, the memory usage will keep increasing, @@ -786,6 +807,7 @@ # This does a complete minor and major collection. self.minor_and_major_collection() self.rrc_invoke_callback() + self.check_safe_gc_state() def collect_step(self): """ @@ -795,12 +817,26 @@ This is meant to be used together with gc.disable(), to have a fine-grained control on when the GC runs. """ + # This function should never be called in safer_variant() mode, + # because it leaves the GC in the middle of an incremental step. + # In PyPy the function gc.collect_step() is removed from --sandbox. + if self.safer_variant(): + out_of_memory("sandbox: collect_step() has been disabled") + return False + # old_state = self.gc_state self._minor_collection() self.major_collection_step() self.rrc_invoke_callback() return rgc._encode_states(old_state, self.gc_state) + def check_safe_gc_state(self): + if self.safer_variant(): + # in this variant, gc_state should always be SCANNING when the + # mutator runs + if self.gc_state != STATE_SCANNING: + out_of_memory("sandbox: unexpected internal GC state") + def minor_collection_with_major_progress(self, extrasize=0, force_enabled=False): """Do a minor collection. Then, if the GC is enabled and there @@ -808,6 +844,7 @@ step. If there is no major GC but the threshold is reached, start a major GC. """ + self.check_safe_gc_state() self._minor_collection() if not self.enabled and not force_enabled: return @@ -826,6 +863,10 @@ if self.gc_state != STATE_SCANNING or self.threshold_reached(extrasize): self.major_collection_step(extrasize) + if self.safer_variant(): + # finish the just-started major collection immediately + self.gc_step_until(STATE_SCANNING) + # See documentation in major_collection_step() for target invariants while self.gc_state != STATE_SCANNING: # target (A1) threshold = self.threshold_objects_made_old @@ -840,6 +881,7 @@ self.major_collection_step(extrasize) self.rrc_invoke_callback() + self.check_safe_gc_state() def collect_and_reserve(self, totalsize): @@ -1098,6 +1140,8 @@ return self.is_in_nursery(obj) def pin(self, obj): + if self.safer_variant(): # no pinning in the safer variant + return False if self.pinned_objects_in_nursery >= self.max_number_of_pinned_objects: return False if not self.is_in_nursery(obj): @@ -3074,6 +3118,9 @@ def rawrefcount_init(self, dealloc_trigger_callback): # see pypy/doc/discussion/rawrefcount.rst + if self.safer_variant(): + out_of_memory("sandbox: rawrefcount_init() not supported") + return if not self.rrc_enabled: self.rrc_p_list_young = self.AddressStack() self.rrc_p_list_old = self.AddressStack() From pypy.commits at gmail.com Fri Aug 9 15:18:50 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 12:18:50 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: sandboxing disables sys.setrecursionlimit() too Message-ID: <5d4dc71a.1c69fb81.536f7.b950@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97132:a8758309335d Date: 2019-08-09 21:18 +0200 http://bitbucket.org/pypy/pypy/changeset/a8758309335d/ Log: sandboxing disables sys.setrecursionlimit() too diff --git a/pypy/module/sys/vm.py b/pypy/module/sys/vm.py --- a/pypy/module/sys/vm.py +++ b/pypy/module/sys/vm.py @@ -59,6 +59,13 @@ from rpython.rlib.rgc import increase_root_stack_depth if new_limit <= 0: raise oefmt(space.w_ValueError, "recursion limit must be positive") + # + if space.config.translation.sandbox: + if new_limit > space.sys.recursionlimit: + raise oefmt(space.w_RuntimeError, "sandbox: cannot increase the " + "recursion limit") + return + # space.sys.recursionlimit = new_limit _stack_set_length_fraction(new_limit * 0.001) increase_root_stack_depth(int(new_limit * 0.001 * 163840)) From pypy.commits at gmail.com Fri Aug 9 15:50:15 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 12:50:15 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: translation fix Message-ID: <5d4dce77.1c69fb81.38f19.a7ff@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97133:202992141e0c Date: 2019-08-09 21:37 +0200 http://bitbucket.org/pypy/pypy/changeset/202992141e0c/ Log: translation fix diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -366,6 +366,8 @@ from pypy.module.gc.hook import LowLevelGcHooks if self.space is None: raise Exception("get_gchooks must be called after get_entry_point") + if self.space.config.translation.sandbox: + return None return self.space.fromcache(LowLevelGcHooks) def get_entry_point(self, config): diff --git a/pypy/module/sys/vm.py b/pypy/module/sys/vm.py --- a/pypy/module/sys/vm.py +++ b/pypy/module/sys/vm.py @@ -62,8 +62,8 @@ # if space.config.translation.sandbox: if new_limit > space.sys.recursionlimit: - raise oefmt(space.w_RuntimeError, "sandbox: cannot increase the " - "recursion limit") + msg = "sandbox: cannot increase the recursion limit" + space.warn(space.newtext(msg), space.w_RuntimeWarning) return # space.sys.recursionlimit = new_limit diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -3119,6 +3119,8 @@ def rawrefcount_init(self, dealloc_trigger_callback): # see pypy/doc/discussion/rawrefcount.rst if self.safer_variant(): + # note that the rawrefcount_xxx() functions should not be callable + # if sandbox is enabled; see gctransform/framework.py out_of_memory("sandbox: rawrefcount_init() not supported") return if not self.rrc_enabled: diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -479,7 +479,8 @@ annmodel.SomeInteger(nonneg=True)], annmodel.s_None) - if hasattr(GCClass, 'rawrefcount_init'): + if (hasattr(GCClass, 'rawrefcount_init') + and not self.translator.config.translation.sandbox): self.rawrefcount_init_ptr = getfn( GCClass.rawrefcount_init, [s_gc, SomePtr(GCClass.RAWREFCOUNT_DEALLOC_TRIGGER)], From pypy.commits at gmail.com Fri Aug 9 19:06:33 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 16:06:33 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d4dfc79.1c69fb81.d59b9.3299@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97136:9329c29caa2c Date: 2019-08-10 01:05 +0200 http://bitbucket.org/pypy/pypy/changeset/9329c29caa2c/ Log: hg merge default diff --git a/extra_tests/test_json.py b/extra_tests/test_json.py --- a/extra_tests/test_json.py +++ b/extra_tests/test_json.py @@ -27,3 +27,22 @@ @given(jsondata) def test_roundtrip(d): assert json.loads(json.dumps(d)) == d + +def test_skipkeys(): + assert json.dumps({Ellipsis: 42}, skipkeys=True) == '{}' + assert json.dumps({Ellipsis: 42, 3: 4}, skipkeys=True) == '{"3": 4}' + assert json.dumps({3: 4, Ellipsis: 42}, skipkeys=True) == '{"3": 4}' + assert json.dumps({Ellipsis: 42, NotImplemented: 43}, skipkeys=True) \ + == '{}' + assert json.dumps({3: 4, Ellipsis: 42, NotImplemented: 43}, skipkeys=True)\ + == '{"3": 4}' + assert json.dumps({Ellipsis: 42, 3: 4, NotImplemented: 43}, skipkeys=True)\ + == '{"3": 4}' + assert json.dumps({Ellipsis: 42, NotImplemented: 43, 3: 4}, skipkeys=True)\ + == '{"3": 4}' + assert json.dumps({3: 4, 5: 6, Ellipsis: 42}, skipkeys=True) \ + == '{"3": 4, "5": 6}' + assert json.dumps({3: 4, Ellipsis: 42, 5: 6}, skipkeys=True) \ + == '{"3": 4, "5": 6}' + assert json.dumps({Ellipsis: 42, 3: 4, 5: 6}, skipkeys=True) \ + == '{"3": 4, "5": 6}' diff --git a/lib-python/3/json/encoder.py b/lib-python/3/json/encoder.py --- a/lib-python/3/json/encoder.py +++ b/lib-python/3/json/encoder.py @@ -269,10 +269,6 @@ items = d.items() for key, v in items: - if first: - first = False - else: - builder.append(separator) if isinstance(key, str): pass # JavaScript is weakly typed for these, so it makes sense to @@ -292,6 +288,10 @@ continue else: raise TypeError("key " + repr(key) + " is not a string") + if first: + first = False + else: + builder.append(separator) builder.append('"') builder.append(self.__encoder(key)) builder.append('"') diff --git a/rpython/memory/gctransform/transform.py b/rpython/memory/gctransform/transform.py --- a/rpython/memory/gctransform/transform.py +++ b/rpython/memory/gctransform/transform.py @@ -17,6 +17,7 @@ from rpython.rlib.rarithmetic import ovfcheck from rpython.rtyper.lltypesystem.lloperation import llop from rpython.translator.simplify import cleanup_graph +from rpython.memory.gctransform.log import log class GcHighLevelOp(object): @@ -138,9 +139,15 @@ return any_inlining def inline_helpers_and_postprocess(self, graphs): + next_dot = 0 for graph in graphs: any_inlining = self.inline and self.inline_helpers_into(graph) self.postprocess_graph(graph, any_inlining) + # + next_dot -= 1 + if next_dot <= 0: + log.dot() + next_dot = 50 def postprocess_graph(self, graph, any_inlining): pass From pypy.commits at gmail.com Fri Aug 9 19:17:23 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 16:17:23 -0700 (PDT) Subject: [pypy-commit] pypy default: json: bug-to-bug compatibility in a corner case Message-ID: <5d4dff03.1c69fb81.32a63.1b76@mx.google.com> Author: Armin Rigo Branch: Changeset: r97137:44c2b8930353 Date: 2019-08-10 01:16 +0200 http://bitbucket.org/pypy/pypy/changeset/44c2b8930353/ Log: json: bug-to-bug compatibility in a corner case diff --git a/extra_tests/test_json.py b/extra_tests/test_json.py --- a/extra_tests/test_json.py +++ b/extra_tests/test_json.py @@ -50,3 +50,8 @@ == '{"3": 4, "5": 6}' assert json.dumps({Ellipsis: 42, 3: 4, 5: 6}, skipkeys=True) \ == '{"3": 4, "5": 6}' + +def test_boolean_as_dict_key(): + # it's this way in CPython 2.x. In 3.x it was fixed + assert json.dumps({True: 5}) == '{"True": 5}' # != '{"true": 5}' + assert json.dumps({False: 5}) == '{"False": 5}' diff --git a/lib-python/2.7/json/encoder.py b/lib-python/2.7/json/encoder.py --- a/lib-python/2.7/json/encoder.py +++ b/lib-python/2.7/json/encoder.py @@ -301,9 +301,9 @@ elif isinstance(key, float): key = self.__floatstr(key) elif key is True: - key = 'true' + key = 'True' # XXX != 'true', bug-to-bug compatibility elif key is False: - key = 'false' + key = 'False' # XXX != 'false', bug-to-bug compatibility elif key is None: key = 'null' elif isinstance(key, (int, long)): From pypy.commits at gmail.com Fri Aug 9 19:17:25 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 09 Aug 2019 16:17:25 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d4dff05.1c69fb81.1c180.b008@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97138:bbfb481934ce Date: 2019-08-10 01:16 +0200 http://bitbucket.org/pypy/pypy/changeset/bbfb481934ce/ Log: hg merge default diff --git a/extra_tests/test_json.py b/extra_tests/test_json.py --- a/extra_tests/test_json.py +++ b/extra_tests/test_json.py @@ -46,3 +46,7 @@ == '{"3": 4, "5": 6}' assert json.dumps({Ellipsis: 42, 3: 4, 5: 6}, skipkeys=True) \ == '{"3": 4, "5": 6}' + +def test_boolean_as_dict_key(): + assert json.dumps({True: 5}) == '{"true": 5}' + assert json.dumps({False: 5}) == '{"false": 5}' From pypy.commits at gmail.com Sat Aug 10 13:12:26 2019 From: pypy.commits at gmail.com (rlamy) Date: Sat, 10 Aug 2019 10:12:26 -0700 (PDT) Subject: [pypy-commit] pypy default: Actually use the spaceconfig in AppTestSocket Message-ID: <5d4efafa.1c69fb81.35d71.4c83@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97140:a7aab2f5a528 Date: 2019-08-10 18:11 +0100 http://bitbucket.org/pypy/pypy/changeset/a7aab2f5a528/ Log: Actually use the spaceconfig in AppTestSocket diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -292,10 +292,10 @@ class AppTestSocket: - spaceconfig = dict(usemodules=['_socket', '_weakref', 'struct']) + spaceconfig = dict(usemodules=['_socket', 'struct']) def setup_class(cls): - cls.space = space + space = cls.space cls.w_udir = space.wrap(str(udir)) cls.w_regex_search = space.wrap(interp2app(regex_search)) From pypy.commits at gmail.com Sat Aug 10 14:51:23 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 10 Aug 2019 11:51:23 -0700 (PDT) Subject: [pypy-commit] pypy default: fix for latest virtualenv HEAD Message-ID: <5d4f122b.1c69fb81.1e213.8850@mx.google.com> Author: Matti Picus Branch: Changeset: r97141:adc92f0ac6c1 Date: 2019-08-10 21:41 +0300 http://bitbucket.org/pypy/pypy/changeset/adc92f0ac6c1/ Log: fix for latest virtualenv HEAD diff --git a/testrunner/get_info.py b/testrunner/get_info.py --- a/testrunner/get_info.py +++ b/testrunner/get_info.py @@ -10,9 +10,10 @@ BASE_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) if sys.platform.startswith('win'): TARGET_NAME = r'pypy-c.exe' - TARGET_DIR = 'Scripts' + # PyPy uses bin as of PR https://github.com/pypa/virtualenv/pull/1400 + TARGET_DIR = 'bin' else: - TARGET_NAME = 'pypy-c' + TARGET_NAME = 'pypy3-c' TARGET_DIR = 'bin' VENV_DIR = 'pypy-venv' From pypy.commits at gmail.com Sat Aug 10 14:51:25 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 10 Aug 2019 11:51:25 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default into brach Message-ID: <5d4f122d.1c69fb81.8c7d7.2a9e@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97142:018380765dc8 Date: 2019-08-10 21:50 +0300 http://bitbucket.org/pypy/pypy/changeset/018380765dc8/ Log: merge default into brach diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import sys, os +import socket import pytest from pypy.tool.pytest.objspace import gettestobjspace from pypy.interpreter.gateway import interp2app @@ -10,8 +11,6 @@ def setup_module(mod): mod.space = gettestobjspace(usemodules=['_socket', 'array', 'struct', 'unicodedata']) - global socket - import socket mod.w_socket = space.appexec([], "(): import _socket as m; return m") mod.path = udir.join('fd') mod.path.write('fo') @@ -57,15 +56,11 @@ "(_socket, name): return _socket.getservbyname(name, 'tcp')") assert space.unwrap(port) == 25 # 1 arg version - if sys.version_info < (2, 4): - pytest.skip("getservbyname second argument is not optional before python 2.4") port = space.appexec([w_socket, space.wrap(name)], "(_socket, name): return _socket.getservbyname(name)") assert space.unwrap(port) == 25 def test_getservbyport(): - if sys.version_info < (2, 4): - pytest.skip("getservbyport does not exist before python 2.4") port = 25 # 2 args version name = space.appexec([w_socket, space.wrap(port)], @@ -90,6 +85,7 @@ "(_socket, name): return _socket.getprotobyname(name)") assert space.unwrap(w_n) == socket.IPPROTO_TCP + at pytest.mark.skipif("not hasattr(socket, 'fromfd')") def test_ntohs(): w_n = space.appexec([w_socket, space.wrap(125)], "(_socket, x): return _socket.ntohs(x)") @@ -132,9 +128,8 @@ "(_socket, p): return _socket.inet_ntoa(p)") assert space.utf8_w(w_ip) == ip + at pytest.mark.skipif("not hasattr(socket, 'inet_pton')") def test_pton_ntop_ipv4(): - if not hasattr(socket, 'inet_pton'): - pytest.skip('No socket.inet_pton on this platform') tests = [ ("123.45.67.89", "\x7b\x2d\x43\x59"), ("0.0.0.0", "\x00" * 4), @@ -191,11 +186,6 @@ "(_socket, ip): return _socket.inet_pton(_socket.AF_INET6, ip)") assert space.unwrap(w_packed) == packed -def test_has_ipv6(): - pytest.skip("has_ipv6 is always True on PyPy for now") - res = space.appexec([w_socket], "(_socket): return _socket.has_ipv6") - assert space.unwrap(res) == socket.has_ipv6 - def test_getaddrinfo(): host = b"localhost" port = 25 @@ -311,7 +301,7 @@ 'unicodedata']) def setup_class(cls): - cls.space = space + space = cls.space cls.w_udir = space.wrap(str(udir)) def teardown_class(cls): @@ -762,10 +752,11 @@ raises(TypeError, s.connect, (domain + '\x00', 80)) + at pytest.mark.skipif(not hasattr(os, 'getpid'), + reason="AF_NETLINK needs os.getpid()") class AppTestNetlink: def setup_class(cls): - if not hasattr(os, 'getpid'): - pytest.skip("AF_NETLINK needs os.getpid()") + cls.space = space if cls.runappdirect: import _socket @@ -775,7 +766,6 @@ "return hasattr(_socket, 'AF_NETLINK')") if not space.is_true(w_ok): pytest.skip("no AF_NETLINK on this platform") - cls.space = space def test_connect_to_kernel_netlink_routing_socket(self): import _socket, os @@ -788,10 +778,11 @@ assert b == 0 + at pytest.mark.skipif(not hasattr(os, 'getuid') or os.getuid() != 0, + reason="AF_PACKET needs to be root for testing") class AppTestPacket: def setup_class(cls): - if not hasattr(os, 'getuid') or os.getuid() != 0: - pytest.skip("AF_PACKET needs to be root for testing") + cls.space = space if cls.runappdirect: import _socket w_ok = hasattr(_socket, 'AF_PACKET') @@ -800,7 +791,6 @@ "return hasattr(_socket, 'AF_PACKET')") if not space.is_true(w_ok): pytest.skip("no AF_PACKET on this platform") - cls.space = space def test_convert_between_tuple_and_sockaddr_ll(self): import _socket diff --git a/testrunner/get_info.py b/testrunner/get_info.py --- a/testrunner/get_info.py +++ b/testrunner/get_info.py @@ -10,7 +10,8 @@ BASE_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) if sys.platform.startswith('win'): TARGET_NAME = r'pypy3-c.exe' - TARGET_DIR = 'Scripts' + # PyPy uses bin as of PR https://github.com/pypa/virtualenv/pull/1400 + TARGET_DIR = 'bin' else: TARGET_NAME = 'pypy3-c' TARGET_DIR = 'bin' From pypy.commits at gmail.com Sat Aug 10 14:58:57 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 10 Aug 2019 11:58:57 -0700 (PDT) Subject: [pypy-commit] buildbot default: somehow win32 builds are overlapping, try to restrict that Message-ID: <5d4f13f1.1c69fb81.73527.45b4@mx.google.com> Author: Matti Picus Branch: Changeset: r1087:878ba7516062 Date: 2019-08-10 21:58 +0300 http://bitbucket.org/pypy/buildbot/changeset/878ba7516062/ Log: somehow win32 builds are overlapping, try to restrict that diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -274,7 +274,7 @@ ] extra_opts = {'xerxes': {'keepalive_interval': 15}, 'aurora': {'max_builds': 1}, - 'salsa': {'max_builds': 1}, + 'SalsaSalsa': {'max_builds': 1}, 'hhu-raspberry-pi': {'max_builds': 1}, 'hhu-pypy-pi': {'max_builds': 1}, 'hhu-pypy-pi2': {'max_builds': 1}, @@ -584,7 +584,7 @@ "slavenames": ["SalsaSalsa", "anubis64"], 'builddir' : JITWIN32, 'factory' : pypyJITTranslatedTestFactoryWin, - "locks": [WinSlaveLock.access('counting')], + "locks": [WinSlaveLock.access('exclusive')], 'category' : 'win32', }, # PPC From pypy.commits at gmail.com Sat Aug 10 15:01:37 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 10 Aug 2019 12:01:37 -0700 (PDT) Subject: [pypy-commit] pypy default: typo (thanks Ronan) Message-ID: <5d4f1491.1c69fb81.1518f.73e0@mx.google.com> Author: Matti Picus Branch: Changeset: r97143:800256b0546d Date: 2019-08-10 22:01 +0300 http://bitbucket.org/pypy/pypy/changeset/800256b0546d/ Log: typo (thanks Ronan) diff --git a/testrunner/get_info.py b/testrunner/get_info.py --- a/testrunner/get_info.py +++ b/testrunner/get_info.py @@ -9,11 +9,11 @@ BASE_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) if sys.platform.startswith('win'): - TARGET_NAME = r'pypy-c.exe' + TARGET_NAME = 'pypy-c.exe' # PyPy uses bin as of PR https://github.com/pypa/virtualenv/pull/1400 TARGET_DIR = 'bin' else: - TARGET_NAME = 'pypy3-c' + TARGET_NAME = 'pypy-c' TARGET_DIR = 'bin' VENV_DIR = 'pypy-venv' From pypy.commits at gmail.com Sat Aug 10 16:14:05 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 10 Aug 2019 13:14:05 -0700 (PDT) Subject: [pypy-commit] buildbot default: Backed out changeset: 878ba7516062 Message-ID: <5d4f258d.1c69fb81.f2d8c.4292@mx.google.com> Author: Matti Picus Branch: Changeset: r1088:2d6fb9e9de0b Date: 2019-08-10 23:13 +0300 http://bitbucket.org/pypy/buildbot/changeset/2d6fb9e9de0b/ Log: Backed out changeset: 878ba7516062 diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -274,7 +274,7 @@ ] extra_opts = {'xerxes': {'keepalive_interval': 15}, 'aurora': {'max_builds': 1}, - 'SalsaSalsa': {'max_builds': 1}, + 'salsa': {'max_builds': 1}, 'hhu-raspberry-pi': {'max_builds': 1}, 'hhu-pypy-pi': {'max_builds': 1}, 'hhu-pypy-pi2': {'max_builds': 1}, @@ -584,7 +584,7 @@ "slavenames": ["SalsaSalsa", "anubis64"], 'builddir' : JITWIN32, 'factory' : pypyJITTranslatedTestFactoryWin, - "locks": [WinSlaveLock.access('exclusive')], + "locks": [WinSlaveLock.access('counting')], 'category' : 'win32', }, # PPC From pypy.commits at gmail.com Sat Aug 10 21:58:35 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 10 Aug 2019 18:58:35 -0700 (PDT) Subject: [pypy-commit] buildbot default: add platform-specific factory for aarch64 so that package.py gets the right archive-name Message-ID: <5d4f764b.1c69fb81.1ab3c.5449@mx.google.com> Author: Matti Picus Branch: Changeset: r1089:89139fc90e06 Date: 2019-08-11 04:58 +0300 http://bitbucket.org/pypy/buildbot/changeset/89139fc90e06/ Log: add platform-specific factory for aarch64 so that package.py gets the right archive-name diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -175,6 +175,14 @@ pypyjit=True, app_tests=True) +pypyJITTranslatedTestFactoryAARCH64 = pypybuilds.Translated( + platform='aarch64', + translationArgs=jit_translation_args, + targetArgs=[], + lib_python=True, + pypyjit=True, + app_tests=True) + pypyJITBenchmarkFactory = pypybuilds.JITBenchmark(host='benchmarker') pypyJITBenchmarkFactory64 = pypybuilds.JITBenchmark(platform='linux64', host='benchmarker', @@ -517,7 +525,7 @@ #'slavenames': ["bencher4", "speed-old"], 'slavenames': ["aarch64_aws"], 'builddir': JITAARCH64, - 'factory': pypyJITTranslatedTestFactory64, + 'factory': pypyJITTranslatedTestFactoryAARCH64, 'category': 'aarch64', "locks": [AARCH64Lock.access('counting')], }, From pypy.commits at gmail.com Sun Aug 11 10:07:54 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 07:07:54 -0700 (PDT) Subject: [pypy-commit] pypy default: aarch64: fix test_gc_integration.py Message-ID: <5d50213a.1c69fb81.ace15.c2ba@mx.google.com> Author: Armin Rigo Branch: Changeset: r97144:381752e5cd15 Date: 2019-08-11 14:06 +0000 http://bitbucket.org/pypy/pypy/changeset/381752e5cd15/ Log: aarch64: fix test_gc_integration.py diff --git a/rpython/jit/backend/aarch64/assembler.py b/rpython/jit/backend/aarch64/assembler.py --- a/rpython/jit/backend/aarch64/assembler.py +++ b/rpython/jit/backend/aarch64/assembler.py @@ -31,6 +31,7 @@ ResOpAssembler.__init__(self, cpu, translate_support_code) self.failure_recovery_code = [0, 0, 0, 0] self.wb_slowpath = [0, 0, 0, 0, 0] + self.stack_check_slowpath = 0 def assemble_loop(self, jd_id, unique_id, logger, loopname, inputargs, operations, looptoken, log): @@ -675,7 +676,7 @@ # new value of nursery_free_adr in r1 and the adr of the new object in # r0. - self.mc.B_ofs_cond(10 * 4, c.LO) # 4 for gcmap load, 5 for BL, 1 for B_ofs_cond + self.mc.B_ofs_cond(10 * 4, c.LS) # 4 for gcmap load, 5 for BL, 1 for B_ofs_cond self.mc.gen_load_int_full(r.ip1.value, rffi.cast(lltype.Signed, gcmap)) self.mc.BL(self.malloc_slowpath) @@ -698,7 +699,7 @@ self.mc.CMP_rr(r.x1.value, r.ip0.value) # - self.mc.B_ofs_cond(40, c.LO) # see calculations in malloc_cond + self.mc.B_ofs_cond(40, c.LS) # see calculations in malloc_cond self.mc.gen_load_int_full(r.ip1.value, rffi.cast(lltype.Signed, gcmap)) self.mc.BL(self.malloc_slowpath) diff --git a/rpython/jit/backend/llsupport/test/test_gc_integration.py b/rpython/jit/backend/llsupport/test/test_gc_integration.py --- a/rpython/jit/backend/llsupport/test/test_gc_integration.py +++ b/rpython/jit/backend/llsupport/test/test_gc_integration.py @@ -93,6 +93,8 @@ assert nos == [0, 1, 33] elif self.cpu.backend_name.startswith('zarch'): assert nos == [0, 1, 29] + elif self.cpu.backend_name.startswith('aarch64'): + assert nos == [0, 1, 27] else: raise Exception("write the data here") assert frame.jf_frame[nos[0]] @@ -672,6 +674,8 @@ elif self.cpu.backend_name.startswith('zarch'): # 10 gpr, 14 fpr -> 25 is the first slot assert gcmap == [26, 27, 28] + elif self.cpu.backend_name.startswith('aarch64'): + assert gcmap == [24, 25, 26] elif self.cpu.IS_64_BIT: assert gcmap == [28, 29, 30] elif self.cpu.backend_name.startswith('arm'): From pypy.commits at gmail.com Sun Aug 11 10:10:30 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 07:10:30 -0700 (PDT) Subject: [pypy-commit] pypy default: aarch64: fix test_regalloc_integration.py Message-ID: <5d5021d6.1c69fb81.b5c1f.ddfc@mx.google.com> Author: Armin Rigo Branch: Changeset: r97145:a6010dc54fc4 Date: 2019-08-11 14:09 +0000 http://bitbucket.org/pypy/pypy/changeset/a6010dc54fc4/ Log: aarch64: fix test_regalloc_integration.py diff --git a/rpython/jit/backend/aarch64/runner.py b/rpython/jit/backend/aarch64/runner.py --- a/rpython/jit/backend/aarch64/runner.py +++ b/rpython/jit/backend/aarch64/runner.py @@ -62,6 +62,12 @@ cast_ptr_to_int._annspecialcase_ = 'specialize:arglltype(0)' cast_ptr_to_int = staticmethod(cast_ptr_to_int) + def build_regalloc(self): + ''' for tests''' + from rpython.jit.backend.aarch64.regalloc import Regalloc + assert self.assembler is not None + return Regalloc(self.assembler) + for _i, _r in enumerate(r.all_regs): assert CPU_ARM64.all_reg_indexes[_r.value] == _i From pypy.commits at gmail.com Sun Aug 11 12:52:14 2019 From: pypy.commits at gmail.com (rlamy) Date: Sun, 11 Aug 2019 09:52:14 -0700 (PDT) Subject: [pypy-commit] pypy default: Fix test collection on windows and s390x Message-ID: <5d5047be.1c69fb81.7b0a2.5726@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97146:597f4be1ae97 Date: 2019-08-11 17:51 +0100 http://bitbucket.org/pypy/pypy/changeset/597f4be1ae97/ Log: Fix test collection on windows and s390x diff --git a/pypy/module/_cppyy/test/conftest.py b/pypy/module/_cppyy/test/conftest.py --- a/pypy/module/_cppyy/test/conftest.py +++ b/pypy/module/_cppyy/test/conftest.py @@ -5,7 +5,7 @@ @py.test.mark.tryfirst def pytest_runtest_setup(item): - if py.path.local.sysfind('genreflex') is None: + if not disabled and py.path.local.sysfind('genreflex') is None: import pypy.module._cppyy.capi.loadable_capi as lcapi if 'dummy' in lcapi.backend_library: # run only tests that are covered by the dummy backend and tests @@ -33,16 +33,18 @@ def pytest_ignore_collect(path, config): path = str(path) - if py.path.local.sysfind('genreflex') is None and config.option.runappdirect: - return commonprefix([path, THIS_DIR]) == THIS_DIR if disabled: - return commonprefix([path, THIS_DIR]) == THIS_DIR + if commonprefix([path, THIS_DIR]) == THIS_DIR: # workaround for bug in pytest<3.0.5 + return True disabled = None def pytest_configure(config): + global disabled if config.getoption('runappdirect') or config.getoption('direct_apptest'): - return # "can't run dummy tests in -A" + if py.path.local.sysfind('genreflex') is None: + disabled = True # can't run dummy tests in -A + return if py.path.local.sysfind('genreflex') is None: import pypy.module._cppyy.capi.loadable_capi as lcapi try: @@ -77,7 +79,6 @@ standalone=False) except CompilationError as e: if '-std=c++14' in str(e): - global disabled disabled = str(e) return raise diff --git a/pypy/module/_vmprof/conftest.py b/pypy/module/_vmprof/conftest.py --- a/pypy/module/_vmprof/conftest.py +++ b/pypy/module/_vmprof/conftest.py @@ -1,8 +1,13 @@ -import py, platform, sys +import pytest +import platform +import sys +from os.path import commonprefix, dirname -def pytest_collect_directory(path, parent): - if platform.machine() == 's390x': - py.test.skip("_vmprof tests skipped") - if sys.platform == 'win32': - py.test.skip("_vmprof tests skipped") -pytest_collect_file = pytest_collect_directory +THIS_DIR = dirname(__file__) + + at pytest.hookimpl(tryfirst=True) +def pytest_ignore_collect(path, config): + path = str(path) + if sys.platform == 'win32' or platform.machine() == 's390x': + if commonprefix([path, THIS_DIR]) == THIS_DIR: # workaround for bug in pytest<3.0.5 + return True From pypy.commits at gmail.com Sun Aug 11 13:41:12 2019 From: pypy.commits at gmail.com (mattip) Date: Sun, 11 Aug 2019 10:41:12 -0700 (PDT) Subject: [pypy-commit] buildbot default: add aarch64 to rpython page Message-ID: <5d505338.1c69fb81.c9c31.958a@mx.google.com> Author: Matti Picus Branch: Changeset: r1090:7ea68e40c299 Date: 2019-08-11 20:40 +0300 http://bitbucket.org/pypy/buildbot/changeset/7ea68e40c299/ Log: add aarch64 to rpython page diff --git a/master/templates/layout.html b/master/templates/layout.html --- a/master/templates/layout.html +++ b/master/templates/layout.html @@ -29,7 +29,7 @@ - Summary (trunk) - Summary (py3.6) - Summary - - RPython + - RPython - Nightly builds From pypy.commits at gmail.com Sun Aug 11 14:32:00 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 11:32:00 -0700 (PDT) Subject: [pypy-commit] pypy default: Try to split the run of individual test files in this directory too Message-ID: <5d505f20.1c69fb81.bb2dd.5d3a@mx.google.com> Author: Armin Rigo Branch: Changeset: r97147:872b51a36497 Date: 2019-08-11 20:31 +0200 http://bitbucket.org/pypy/pypy/changeset/872b51a36497/ Log: Try to split the run of individual test files in this directory too diff --git a/pypy/testrunner_cfg.py b/pypy/testrunner_cfg.py --- a/pypy/testrunner_cfg.py +++ b/pypy/testrunner_cfg.py @@ -6,6 +6,7 @@ 'memory/test', 'jit/metainterp', 'jit/backend/arm', 'jit/backend/x86', 'jit/backend/zarch', 'module/cpyext/test', + 'jit/backend/aarch64', ] def collect_one_testdir(testdirs, reldir, tests): From pypy.commits at gmail.com Sun Aug 11 14:56:04 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 11:56:04 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge sandbox-2 Message-ID: <5d5064c4.1c69fb81.bb2dd.6725@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97149:67130f4a3a0c Date: 2019-08-11 20:54 +0200 http://bitbucket.org/pypy/pypy/changeset/67130f4a3a0c/ Log: hg merge sandbox-2 diff too long, truncating to 2000 out of 2641 lines diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -44,6 +44,12 @@ #" _ssl", "_hashlib", "crypt" ]) +# --sandbox +sandbox_modules = default_modules.copy() +sandbox_modules.update([ + "array", "binascii", +]) + import rpython.rlib.rvmprof.cintf if rpython.rlib.rvmprof.cintf.IS_SUPPORTED: working_modules.add('_vmprof') @@ -271,7 +277,7 @@ def enable_allworkingmodules(config): modules = working_modules.copy() if config.translation.sandbox: - modules = default_modules + modules = sandbox_modules.copy() if config.translation.reverse_debugger: for mod in reverse_debugger_disable_modules: setattr(config.objspace.usemodules, mod, False) diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -326,9 +326,7 @@ config.translation.jit = True if config.translation.sandbox: - assert 0, ("--sandbox is not tested nor maintained. If you " - "really want to try it anyway, remove this line in " - "pypy/goal/targetpypystandalone.py.") + config.objspace.lonepycfiles = False if config.objspace.usemodules.cpyext: if config.translation.gc not in ('incminimark', 'boehm'): @@ -387,6 +385,8 @@ from pypy.module.gc.hook import LowLevelGcHooks if self.space is None: raise Exception("get_gchooks must be called after get_entry_point") + if self.space.config.translation.sandbox: + return None return self.space.fromcache(LowLevelGcHooks) def get_entry_point(self, config): diff --git a/pypy/module/gc/moduledef.py b/pypy/module/gc/moduledef.py --- a/pypy/module/gc/moduledef.py +++ b/pypy/module/gc/moduledef.py @@ -16,7 +16,11 @@ def __init__(self, space, w_name): if (not space.config.translating or - space.config.translation.gctransformer == "framework"): + (space.config.translation.gctransformer == "framework" + and not space.config.translation.sandbox)): + # some of these functions allow app-level code to do invalid + # things by trying hard enough. For safety, in sandbox mode + # we don't provide any of them. self.appleveldefs.update({ 'dump_rpy_heap': 'app_referents.dump_rpy_heap', 'get_stats': 'app_referents.get_stats', diff --git a/pypy/module/sys/vm.py b/pypy/module/sys/vm.py --- a/pypy/module/sys/vm.py +++ b/pypy/module/sys/vm.py @@ -66,6 +66,13 @@ from rpython.rlib.rgc import increase_root_stack_depth if new_limit <= 0: raise oefmt(space.w_ValueError, "recursion limit must be positive") + # + if space.config.translation.sandbox: + if new_limit > space.sys.recursionlimit: + msg = "sandbox: cannot increase the recursion limit" + space.warn(space.newtext(msg), space.w_RuntimeWarning) + return + # try: _stack_set_length_fraction(new_limit * 0.001) _stack_check_noinline() diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -316,15 +316,15 @@ TM_P = lltype.Ptr(tm) c_time = external('time', [rffi.TIME_TP], rffi.TIME_T) c_gmtime = external('gmtime', [rffi.TIME_TP], TM_P, - save_err=rffi.RFFI_SAVE_ERRNO) -c_mktime = external('mktime', [TM_P], rffi.TIME_T) + save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe=True) +c_mktime = external('mktime', [TM_P], rffi.TIME_T, sandboxsafe=True) c_localtime = external('localtime', [rffi.TIME_TP], TM_P, - save_err=rffi.RFFI_SAVE_ERRNO) + save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe=True) if HAS_CLOCK_GETTIME: from rpython.rlib.rtime import TIMESPEC, c_clock_gettime from rpython.rlib.rtime import c_clock_settime, c_clock_getres if _POSIX: - c_tzset = external('tzset', [], lltype.Void) + c_tzset = external('tzset', [], lltype.Void, sandboxsafe=True) if _WIN: win_eci = ExternalCompilationInfo( includes = ["time.h"], @@ -363,7 +363,7 @@ rffi.INT, win_eci, calling_conv='c') c_strftime = external('strftime', [rffi.CCHARP, rffi.SIZE_T, rffi.CCHARP, TM_P], - rffi.SIZE_T) + rffi.SIZE_T, sandboxsafe=True) def _init_timezone(space): timezone = daylight = altzone = 0 @@ -853,7 +853,7 @@ rffi.setintfield(buf_value, "c_tm_year", rffi.getintfield(buf_value, "c_tm_year") - 1900) - if _WIN: + if _WIN or space.config.translation.sandbox: # check that the format string contains only valid directives length = len(format) i = 0 diff --git a/rpython/annotator/policy.py b/rpython/annotator/policy.py --- a/rpython/annotator/policy.py +++ b/rpython/annotator/policy.py @@ -72,29 +72,3 @@ for callback in bk.pending_specializations: callback() del bk.pending_specializations[:] - if annotator.added_blocks is not None: - all_blocks = annotator.added_blocks - else: - all_blocks = annotator.annotated - for block in list(all_blocks): - for i, instr in enumerate(block.operations): - if not isinstance(instr, (op.simple_call, op.call_args)): - continue - v_func = instr.args[0] - s_func = annotator.annotation(v_func) - if not hasattr(s_func, 'needs_sandboxing'): - continue - key = ('sandboxing', s_func.const) - if key not in bk.emulated_pbc_calls: - params_s = s_func.args_s - s_result = s_func.s_result - from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline - sandbox_trampoline = make_sandbox_trampoline( - s_func.name, params_s, s_result) - sandbox_trampoline._signature_ = [SomeTuple(items=params_s)], s_result - bk.emulate_pbc_call(key, bk.immutablevalue(sandbox_trampoline), params_s) - else: - s_trampoline = bk.emulated_pbc_calls[key][0] - sandbox_trampoline = s_trampoline.const - new = instr.replace({instr.args[0]: Constant(sandbox_trampoline)}) - block.operations[i] = new diff --git a/rpython/config/translationoption.py b/rpython/config/translationoption.py --- a/rpython/config/translationoption.py +++ b/rpython/config/translationoption.py @@ -115,8 +115,7 @@ BoolOption("sandbox", "Produce a fully-sandboxed executable", default=False, cmdline="--sandbox", requires=[("translation.thread", False)], - suggests=[("translation.gc", "generation"), - ("translation.gcrootfinder", "shadowstack")]), + suggests=[]), BoolOption("rweakref", "The backend supports RPython-level weakrefs", default=True), diff --git a/rpython/memory/gc/env.py b/rpython/memory/gc/env.py --- a/rpython/memory/gc/env.py +++ b/rpython/memory/gc/env.py @@ -132,7 +132,10 @@ # ---------- Linux2 ---------- def get_L2cache_linux2(): - arch = os.uname()[4] # machine + try: + arch = os.uname()[4] # machine + except OSError: # we may simulate a failure from sandboxing, for example + return -1 if arch.endswith('86') or arch == 'x86_64': return get_L2cache_linux2_cpuinfo() if arch in ('alpha', 'ppc'): diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -519,10 +519,21 @@ bigobj = self.nonlarge_max + 1 self.max_number_of_pinned_objects = self.nursery_size / (bigobj * 2) + def safer_variant(self): + # When running in sandbox mode, turn off two features: incrementality + # and object pinning. This should be done in a way that cannot *add* + # any security bug, but it could in theory avoid bugs in this complex + # logic. + return self.config.sandbox + def enable(self): self.enabled = True def disable(self): + if self.safer_variant(): + # gc.disable() is ignored in this mode. It should not be + # allowed to disable major collections. + return self.enabled = False def isenabled(self): @@ -763,6 +774,16 @@ def collect(self, gen=2): """Do a minor (gen=0), start a major (gen=1), or do a full major (gen>=2) collection.""" + self.check_safe_gc_state() + if self.safer_variant(): + # gen < 0 is dangerous, and gen == 1 leaves the GC in the + # middle of a major collection. We disable these two modes + # in the safer variant. + if gen <= 0: + gen = 0 + else: + gen = 2 + # if gen < 0: # Dangerous! this makes no progress on the major GC cycle. # If called too often, the memory usage will keep increasing, @@ -786,6 +807,7 @@ # This does a complete minor and major collection. self.minor_and_major_collection() self.rrc_invoke_callback() + self.check_safe_gc_state() def collect_step(self): """ @@ -795,12 +817,26 @@ This is meant to be used together with gc.disable(), to have a fine-grained control on when the GC runs. """ + # This function should never be called in safer_variant() mode, + # because it leaves the GC in the middle of an incremental step. + # In PyPy the function gc.collect_step() is removed from --sandbox. + if self.safer_variant(): + out_of_memory("sandbox: collect_step() has been disabled") + return False + # old_state = self.gc_state self._minor_collection() self.major_collection_step() self.rrc_invoke_callback() return rgc._encode_states(old_state, self.gc_state) + def check_safe_gc_state(self): + if self.safer_variant(): + # in this variant, gc_state should always be SCANNING when the + # mutator runs + if self.gc_state != STATE_SCANNING: + out_of_memory("sandbox: unexpected internal GC state") + def minor_collection_with_major_progress(self, extrasize=0, force_enabled=False): """Do a minor collection. Then, if the GC is enabled and there @@ -808,6 +844,7 @@ step. If there is no major GC but the threshold is reached, start a major GC. """ + self.check_safe_gc_state() self._minor_collection() if not self.enabled and not force_enabled: return @@ -826,6 +863,10 @@ if self.gc_state != STATE_SCANNING or self.threshold_reached(extrasize): self.major_collection_step(extrasize) + if self.safer_variant(): + # finish the just-started major collection immediately + self.gc_step_until(STATE_SCANNING) + # See documentation in major_collection_step() for target invariants while self.gc_state != STATE_SCANNING: # target (A1) threshold = self.threshold_objects_made_old @@ -840,6 +881,7 @@ self.major_collection_step(extrasize) self.rrc_invoke_callback() + self.check_safe_gc_state() def collect_and_reserve(self, totalsize): @@ -1098,6 +1140,8 @@ return self.is_in_nursery(obj) def pin(self, obj): + if self.safer_variant(): # no pinning in the safer variant + return False if self.pinned_objects_in_nursery >= self.max_number_of_pinned_objects: return False if not self.is_in_nursery(obj): @@ -3074,6 +3118,11 @@ def rawrefcount_init(self, dealloc_trigger_callback): # see pypy/doc/discussion/rawrefcount.rst + if self.safer_variant(): + # note that the rawrefcount_xxx() functions should not be callable + # if sandbox is enabled; see gctransform/framework.py + out_of_memory("sandbox: rawrefcount_init() not supported") + return if not self.rrc_enabled: self.rrc_p_list_young = self.AddressStack() self.rrc_p_list_old = self.AddressStack() diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -479,7 +479,8 @@ annmodel.SomeInteger(nonneg=True)], annmodel.s_None) - if hasattr(GCClass, 'rawrefcount_init'): + if (hasattr(GCClass, 'rawrefcount_init') + and not self.translator.config.translation.sandbox): self.rawrefcount_init_ptr = getfn( GCClass.rawrefcount_init, [s_gc, SomePtr(GCClass.RAWREFCOUNT_DEALLOC_TRIGGER)], diff --git a/rpython/rlib/debug.py b/rpython/rlib/debug.py --- a/rpython/rlib/debug.py +++ b/rpython/rlib/debug.py @@ -6,7 +6,6 @@ from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rlib.objectmodel import we_are_translated, always_inline from rpython.rlib.rarithmetic import is_valid_int, r_longlong -from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem import rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -460,7 +459,10 @@ def attach_gdb(): - import pdb; pdb.set_trace() + if not we_are_translated(): + import pdb; pdb.set_trace() + else: + impl_attach_gdb() if not sys.platform.startswith('win'): if sys.platform.startswith('linux'): @@ -586,11 +588,8 @@ d['separate_module_files'] = [cppfile] return ExternalCompilationInfo(**d) - ll_attach = rffi.llexternal("AttachToVS", [], lltype.Void, - compilation_info=make_vs_attach_eci()) + #ll_attach = rffi.llexternal("AttachToVS", [], lltype.Void, + # compilation_info=make_vs_attach_eci()) def impl_attach_gdb(): #ll_attach() print "AttachToVS is disabled at the moment (compilation failure)" - -register_external(attach_gdb, [], result=None, - export_name="impl_attach_gdb", llimpl=impl_attach_gdb) diff --git a/rpython/rlib/entrypoint.py b/rpython/rlib/entrypoint.py --- a/rpython/rlib/entrypoint.py +++ b/rpython/rlib/entrypoint.py @@ -41,7 +41,9 @@ return deco -pypy_debug_catch_fatal_exception = rffi.llexternal('pypy_debug_catch_fatal_exception', [], lltype.Void) +pypy_debug_catch_fatal_exception = rffi.llexternal( + 'pypy_debug_catch_fatal_exception', [], lltype.Void, + sandboxsafe=True) def entrypoint_highlevel(key, argtypes, c_name=None): """ diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py --- a/rpython/rlib/objectmodel.py +++ b/rpython/rlib/objectmodel.py @@ -311,19 +311,12 @@ def sc_we_are_translated(ctx): return Constant(True) -def register_replacement_for(replaced_function, sandboxed_name=None): +def register_replacement_for(replaced_function): def wrap(func): from rpython.rtyper.extregistry import ExtRegistryEntry - # to support calling func directly - func._sandbox_external_name = sandboxed_name class ExtRegistry(ExtRegistryEntry): _about_ = replaced_function def compute_annotation(self): - if sandboxed_name: - config = self.bookkeeper.annotator.translator.config - if config.translation.sandbox: - func._sandbox_external_name = sandboxed_name - func._dont_inline_ = True return self.bookkeeper.immutablevalue(func) return func return wrap diff --git a/rpython/rlib/rfloat.py b/rpython/rlib/rfloat.py --- a/rpython/rlib/rfloat.py +++ b/rpython/rlib/rfloat.py @@ -5,7 +5,6 @@ from rpython.annotator.model import SomeString, SomeChar from rpython.rlib import objectmodel, unroll -from rpython.rtyper.extfunc import register_external from rpython.rtyper.tool import rffi_platform from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.rlib.objectmodel import not_rpython diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -461,9 +461,7 @@ func = getattr(os, name, None) if func is None: return lambda f: f - return register_replacement_for( - func, - sandboxed_name='ll_os.ll_os_%s' % name) + return register_replacement_for(func) @specialize.arg(0) def handle_posix_error(name, result): @@ -1081,7 +1079,7 @@ # for more details. If this get's fixed we can use lltype.Signed # again. (The exact same issue occurs on ppc64 big-endian.) c_func = external(name, [rffi.INT], lltype.Signed, - macro=_MACRO_ON_POSIX) + macro=_MACRO_ON_POSIX, sandboxsafe=True) returning_int = name in ('WEXITSTATUS', 'WSTOPSIG', 'WTERMSIG') @replace_os_function(name) @@ -1992,9 +1990,12 @@ if sys.platform != 'win32': # These are actually macros on some/most systems - c_makedev = external('makedev', [rffi.INT, rffi.INT], rffi.INT, macro=True) - c_major = external('major', [rffi.INT], rffi.INT, macro=True) - c_minor = external('minor', [rffi.INT], rffi.INT, macro=True) + c_makedev = external('makedev', [rffi.INT, rffi.INT], rffi.INT, macro=True, + sandboxsafe=True) + c_major = external('major', [rffi.INT], rffi.INT, macro=True, + sandboxsafe=True) + c_minor = external('minor', [rffi.INT], rffi.INT, macro=True, + sandboxsafe=True) @replace_os_function('makedev') def makedev(maj, min): diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py --- a/rpython/rlib/rposix_environ.py +++ b/rpython/rlib/rposix_environ.py @@ -5,7 +5,6 @@ from rpython.rlib.objectmodel import enforceargs # importing rposix here creates a cycle on Windows from rpython.rtyper.controllerentry import Controller -from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import rffi, lltype from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -97,9 +96,6 @@ # Lower-level interface: dummy placeholders and external registations def r_envkeys(): - just_a_placeholder - -def envkeys_llimpl(): environ = os_get_environ() result = [] i = 0 @@ -111,10 +107,6 @@ i += 1 return result -register_external(r_envkeys, [], [str0], # returns a list of strings - export_name='ll_os.ll_os_envkeys', - llimpl=envkeys_llimpl) - # ____________________________________________________________ def r_envitems(): @@ -190,18 +182,7 @@ return envitems_llimpl, getenv_llimpl, putenv_llimpl -envitems_llimpl, getenv_llimpl, putenv_llimpl = make_env_impls() - -register_external(r_envitems, [], [(str0, str0)], - export_name='ll_os.ll_os_envitems', - llimpl=envitems_llimpl) -register_external(r_getenv, [str0], - annmodel.SomeString(can_be_None=True, no_nul=True), - export_name='ll_os.ll_os_getenv', - llimpl=getenv_llimpl) -register_external(r_putenv, [str0, str0], annmodel.s_None, - export_name='ll_os.ll_os_putenv', - llimpl=putenv_llimpl) +r_envitems, r_getenv, r_putenv = make_env_impls() # ____________________________________________________________ @@ -215,7 +196,7 @@ os_unsetenv = llexternal('unsetenv', [rffi.CCHARP], rffi.INT, save_err=rffi.RFFI_SAVE_ERRNO) - def unsetenv_llimpl(name): + def r_unsetenv(name): with rffi.scoped_str2charp(name) as l_name: error = rffi.cast(lltype.Signed, os_unsetenv(l_name)) if error: @@ -229,7 +210,4 @@ del envkeepalive.byname[name] rffi.free_charp(l_oldstring) - register_external(r_unsetenv, [str0], annmodel.s_None, - export_name='ll_os.ll_os_unsetenv', - llimpl=unsetenv_llimpl) REAL_UNSETENV = True diff --git a/rpython/rlib/rtime.py b/rpython/rlib/rtime.py --- a/rpython/rlib/rtime.py +++ b/rpython/rlib/rtime.py @@ -103,9 +103,7 @@ func = getattr(pytime, name, None) if func is None: return lambda f: f - return register_replacement_for( - func, - sandboxed_name='ll_time.ll_time_%s' % name) + return register_replacement_for(func) config = rffi_platform.configure(CConfig) globals().update(config) diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -95,9 +95,7 @@ def compute_annotation(self): s_result = SomeExternalFunction( self.name, self.signature_args, self.signature_result) - if (self.bookkeeper.annotator.translator.config.translation.sandbox - and not self.safe_not_sandboxed): - s_result.needs_sandboxing = True + assert self.safe_not_sandboxed return s_result @@ -113,6 +111,12 @@ sandboxsafe: use True if the function performs no I/O (safe for --sandbox) """ + if not sandboxsafe: + raise Exception("Don't use the outdated register_external() protocol " + "to invoke external function; use instead " + "rffi.llexternal(). The old register_external() is " + "now only supported with safeboxsafe=True.") + if export_name is None: export_name = function.__name__ params_s = [annotation(arg) for arg in args] diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py --- a/rpython/rtyper/lltypesystem/rffi.py +++ b/rpython/rtyper/lltypesystem/rffi.py @@ -1405,12 +1405,14 @@ lltype.Void, releasegil=False, calling_conv='c', + sandboxsafe=True, ) c_memset = llexternal("memset", [VOIDP, lltype.Signed, SIZE_T], lltype.Void, releasegil=False, calling_conv='c', + sandboxsafe=True, ) diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -29,7 +29,6 @@ from rpython.rtyper.rclass import RootClassRepr from rpython.tool.pairtype import pair from rpython.translator.unsimplify import insert_empty_block -from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline class RTyperBackend(object): @@ -569,17 +568,6 @@ def getcallable(self, graph): def getconcretetype(v): return self.bindingrepr(v).lowleveltype - if self.annotator.translator.config.translation.sandbox: - try: - name = graph.func._sandbox_external_name - except AttributeError: - pass - else: - args_s = [v.annotation for v in graph.getargs()] - s_result = graph.getreturnvar().annotation - sandboxed = make_sandbox_trampoline(name, args_s, s_result) - return self.getannmixlevel().delayedfunction( - sandboxed, args_s, s_result) return getfunctionptr(graph, getconcretetype) diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -18,7 +18,7 @@ "NOT_RPYTHON" return eval("x+40") - register_external(b, [int], result=int) + register_external(b, [int], result=int, sandboxsafe=True) def f(): return b(2) @@ -42,7 +42,7 @@ return y + x register_external(c, [int, int], result=int, llimpl=llimpl, - export_name='ccc') + export_name='ccc', sandboxsafe=True) def f(): return c(3, 4) @@ -62,7 +62,8 @@ tuple as an argument so that register_external's behavior for tuple-taking functions can be verified. """ - register_external(function_with_tuple_arg, [(int,)], int) + register_external(function_with_tuple_arg, [(int,)], int, + sandboxsafe=True) def f(): return function_with_tuple_arg((1,)) @@ -82,11 +83,11 @@ """ def function_with_list(): pass - register_external(function_with_list, [[int]], int) + register_external(function_with_list, [[int]], int, sandboxsafe=True) def function_returning_list(): pass - register_external(function_returning_list, [], [int]) + register_external(function_returning_list, [], [int], sandboxsafe=True) def f(): return function_with_list(function_returning_list()) @@ -100,7 +101,7 @@ str0 = SomeString(no_nul=True) def os_open(s): pass - register_external(os_open, [str0], None) + register_external(os_open, [str0], None, sandboxsafe=True) def f(s): return os_open(s) policy = AnnotatorPolicy() @@ -121,7 +122,7 @@ def os_execve(l): pass - register_external(os_execve, [[str0]], None) + register_external(os_execve, [[str0]], None, sandboxsafe=True) def f(l): return os_execve(l) @@ -149,7 +150,7 @@ def a_llfakeimpl(i): return i * 3 register_external(a, [int], int, llimpl=a_llimpl, - llfakeimpl=a_llfakeimpl) + llfakeimpl=a_llfakeimpl, sandboxsafe=True) def f(i): return a(i) diff --git a/rpython/rtyper/test/test_llinterp.py b/rpython/rtyper/test/test_llinterp.py --- a/rpython/rtyper/test/test_llinterp.py +++ b/rpython/rtyper/test/test_llinterp.py @@ -584,7 +584,8 @@ def raising(): raise OSError(15, "abcd") - ext = register_external(external, [], llimpl=raising, llfakeimpl=raising) + ext = register_external(external, [], llimpl=raising, llfakeimpl=raising, + sandboxsafe=True) def f(): # this is a useful llfakeimpl that raises an exception diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py --- a/rpython/translator/c/genc.py +++ b/rpython/translator/c/genc.py @@ -928,6 +928,10 @@ fi = incfilename.open('w') fi.write('#ifndef _PY_COMMON_HEADER_H\n#define _PY_COMMON_HEADER_H\n') + if database.sandbox: + from rpython.translator.sandbox import rsandbox + eci = eci.merge(rsandbox.extra_eci(database.translator.rtyper)) + # # Header # diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -885,11 +885,18 @@ if db.sandbox: if (getattr(obj, 'external', None) is not None and not obj._safe_not_sandboxed): - from rpython.translator.sandbox import rsandbox - obj.__dict__['graph'] = rsandbox.get_sandbox_stub( - obj, db.translator.rtyper) - obj.__dict__.pop('_safe_not_sandboxed', None) - obj.__dict__.pop('external', None) + try: + sandbox_mapping = db.sandbox_mapping + except AttributeError: + sandbox_mapping = db.sandbox_mapping = {} + try: + obj = sandbox_mapping[obj] + except KeyError: + from rpython.translator.sandbox import rsandbox + llfunc = rsandbox.get_sandbox_stub( + obj, db.translator.rtyper) + sandbox_mapping[obj] = llfunc._obj + obj = llfunc._obj if forcename: name = forcename else: diff --git a/rpython/translator/sandbox/_marshal.py b/rpython/translator/sandbox/_marshal.py deleted file mode 100644 --- a/rpython/translator/sandbox/_marshal.py +++ /dev/null @@ -1,695 +0,0 @@ -# Copy of lib_pypy/_marshal.py needed by sandlib -"""Internal Python object serialization - -This module contains functions that can read and write Python values in a binary format. The format is specific to Python, but independent of machine architecture issues (e.g., you can write a Python value to a file on a PC, transport the file to a Sun, and read it back there). Details of the format may change between Python versions. -""" - -# NOTE: This module is used in the Python3 interpreter, but also by -# the "sandboxed" process. It must work for Python2 as well. - -import types -from _codecs import utf_8_decode, utf_8_encode - -try: - intern -except NameError: - from sys import intern - -try: from __pypy__ import builtinify -except ImportError: builtinify = lambda f: f - - -TYPE_NULL = '0' -TYPE_NONE = 'N' -TYPE_FALSE = 'F' -TYPE_TRUE = 'T' -TYPE_STOPITER = 'S' -TYPE_ELLIPSIS = '.' -TYPE_INT = 'i' -TYPE_INT64 = 'I' -TYPE_FLOAT = 'f' -TYPE_COMPLEX = 'x' -TYPE_LONG = 'l' -TYPE_STRING = 's' -TYPE_INTERNED = 't' -TYPE_STRINGREF= 'R' -TYPE_TUPLE = '(' -TYPE_LIST = '[' -TYPE_DICT = '{' -TYPE_CODE = 'c' -TYPE_UNICODE = 'u' -TYPE_UNKNOWN = '?' -TYPE_SET = '<' -TYPE_FROZENSET= '>' - -class _Marshaller: - - dispatch = {} - - def __init__(self, writefunc): - self._write = writefunc - - def dump(self, x): - try: - self.dispatch[type(x)](self, x) - except KeyError: - for tp in type(x).mro(): - func = self.dispatch.get(tp) - if func: - break - else: - raise ValueError("unmarshallable object") - func(self, x) - - def w_long64(self, x): - self.w_long(x) - self.w_long(x>>32) - - def w_long(self, x): - a = chr(x & 0xff) - x >>= 8 - b = chr(x & 0xff) - x >>= 8 - c = chr(x & 0xff) - x >>= 8 - d = chr(x & 0xff) - self._write(a + b + c + d) - - def w_short(self, x): - self._write(chr((x) & 0xff)) - self._write(chr((x>> 8) & 0xff)) - - def dump_none(self, x): - self._write(TYPE_NONE) - dispatch[type(None)] = dump_none - - def dump_bool(self, x): - if x: - self._write(TYPE_TRUE) - else: - self._write(TYPE_FALSE) - dispatch[bool] = dump_bool - - def dump_stopiter(self, x): - if x is not StopIteration: - raise ValueError("unmarshallable object") - self._write(TYPE_STOPITER) - dispatch[type(StopIteration)] = dump_stopiter - - def dump_ellipsis(self, x): - self._write(TYPE_ELLIPSIS) - - try: - dispatch[type(Ellipsis)] = dump_ellipsis - except NameError: - pass - - # In Python3, this function is not used; see dump_long() below. - def dump_int(self, x): - y = x>>31 - if y and y != -1: - self._write(TYPE_INT64) - self.w_long64(x) - else: - self._write(TYPE_INT) - self.w_long(x) - dispatch[int] = dump_int - - def dump_long(self, x): - self._write(TYPE_LONG) - sign = 1 - if x < 0: - sign = -1 - x = -x - digits = [] - while x: - digits.append(x & 0x7FFF) - x = x>>15 - self.w_long(len(digits) * sign) - for d in digits: - self.w_short(d) - try: - long - except NameError: - dispatch[int] = dump_long - else: - dispatch[long] = dump_long - - def dump_float(self, x): - write = self._write - write(TYPE_FLOAT) - s = repr(x) - write(chr(len(s))) - write(s) - dispatch[float] = dump_float - - def dump_complex(self, x): - write = self._write - write(TYPE_COMPLEX) - s = repr(x.real) - write(chr(len(s))) - write(s) - s = repr(x.imag) - write(chr(len(s))) - write(s) - try: - dispatch[complex] = dump_complex - except NameError: - pass - - def dump_string(self, x): - # XXX we can't check for interned strings, yet, - # so we (for now) never create TYPE_INTERNED or TYPE_STRINGREF - self._write(TYPE_STRING) - self.w_long(len(x)) - self._write(x) - dispatch[bytes] = dump_string - - def dump_unicode(self, x): - self._write(TYPE_UNICODE) - #s = x.encode('utf8') - s, len_s = utf_8_encode(x) - self.w_long(len_s) - self._write(s) - try: - unicode - except NameError: - dispatch[str] = dump_unicode - else: - dispatch[unicode] = dump_unicode - - def dump_tuple(self, x): - self._write(TYPE_TUPLE) - self.w_long(len(x)) - for item in x: - self.dump(item) - dispatch[tuple] = dump_tuple - - def dump_list(self, x): - self._write(TYPE_LIST) - self.w_long(len(x)) - for item in x: - self.dump(item) - dispatch[list] = dump_list - - def dump_dict(self, x): - self._write(TYPE_DICT) - for key, value in x.items(): - self.dump(key) - self.dump(value) - self._write(TYPE_NULL) - dispatch[dict] = dump_dict - - def dump_code(self, x): - self._write(TYPE_CODE) - self.w_long(x.co_argcount) - self.w_long(x.co_nlocals) - self.w_long(x.co_stacksize) - self.w_long(x.co_flags) - self.dump(x.co_code) - self.dump(x.co_consts) - self.dump(x.co_names) - self.dump(x.co_varnames) - self.dump(x.co_freevars) - self.dump(x.co_cellvars) - self.dump(x.co_filename) - self.dump(x.co_name) - self.w_long(x.co_firstlineno) - self.dump(x.co_lnotab) - try: - dispatch[types.CodeType] = dump_code - except NameError: - pass - - def dump_set(self, x): - self._write(TYPE_SET) - self.w_long(len(x)) - for each in x: - self.dump(each) - try: - dispatch[set] = dump_set - except NameError: - pass - - def dump_frozenset(self, x): - self._write(TYPE_FROZENSET) - self.w_long(len(x)) - for each in x: - self.dump(each) - try: - dispatch[frozenset] = dump_frozenset - except NameError: - pass - -class _NULL: - pass - -class _StringBuffer: - def __init__(self, value): - self.bufstr = value - self.bufpos = 0 - - def read(self, n): - pos = self.bufpos - newpos = pos + n - ret = self.bufstr[pos : newpos] - self.bufpos = newpos - return ret - - -class _Unmarshaller: - - dispatch = {} - - def __init__(self, readfunc): - self._read = readfunc - self._stringtable = [] - - def load(self): - c = self._read(1) - if not c: - raise EOFError - try: - return self.dispatch[c](self) - except KeyError: - raise ValueError("bad marshal code: %c (%d)" % (c, ord(c))) - - def r_short(self): - lo = ord(self._read(1)) - hi = ord(self._read(1)) - x = lo | (hi<<8) - if x & 0x8000: - x = x - 0x10000 - return x - - def r_long(self): - s = self._read(4) - a = ord(s[0]) - b = ord(s[1]) - c = ord(s[2]) - d = ord(s[3]) - x = a | (b<<8) | (c<<16) | (d<<24) - if d & 0x80 and x > 0: - x = -((1<<32) - x) - return int(x) - else: - return x - - def r_long64(self): - a = ord(self._read(1)) - b = ord(self._read(1)) - c = ord(self._read(1)) - d = ord(self._read(1)) - e = ord(self._read(1)) - f = ord(self._read(1)) - g = ord(self._read(1)) - h = ord(self._read(1)) - x = a | (b<<8) | (c<<16) | (d<<24) - x = x | (e<<32) | (f<<40) | (g<<48) | (h<<56) - if h & 0x80 and x > 0: - x = -((1<<64) - x) - return x - - def load_null(self): - return _NULL - dispatch[TYPE_NULL] = load_null - - def load_none(self): - return None - dispatch[TYPE_NONE] = load_none - - def load_true(self): - return True - dispatch[TYPE_TRUE] = load_true - - def load_false(self): - return False - dispatch[TYPE_FALSE] = load_false - - def load_stopiter(self): - return StopIteration - dispatch[TYPE_STOPITER] = load_stopiter - - def load_ellipsis(self): - return Ellipsis - dispatch[TYPE_ELLIPSIS] = load_ellipsis - - dispatch[TYPE_INT] = r_long - - dispatch[TYPE_INT64] = r_long64 - - def load_long(self): - size = self.r_long() - sign = 1 - if size < 0: - sign = -1 - size = -size - x = 0 - for i in range(size): - d = self.r_short() - x = x | (d<<(i*15)) - return x * sign - dispatch[TYPE_LONG] = load_long - - def load_float(self): - n = ord(self._read(1)) - s = self._read(n) - return float(s) - dispatch[TYPE_FLOAT] = load_float - - def load_complex(self): - n = ord(self._read(1)) - s = self._read(n) - real = float(s) - n = ord(self._read(1)) - s = self._read(n) - imag = float(s) - return complex(real, imag) - dispatch[TYPE_COMPLEX] = load_complex - - def load_string(self): - n = self.r_long() - return self._read(n) - dispatch[TYPE_STRING] = load_string - - def load_interned(self): - n = self.r_long() - ret = intern(self._read(n)) - self._stringtable.append(ret) - return ret - dispatch[TYPE_INTERNED] = load_interned - - def load_stringref(self): - n = self.r_long() - return self._stringtable[n] - dispatch[TYPE_STRINGREF] = load_stringref - - def load_unicode(self): - n = self.r_long() - s = self._read(n) - #ret = s.decode('utf8') - ret, len_ret = utf_8_decode(s) - return ret - dispatch[TYPE_UNICODE] = load_unicode - - def load_tuple(self): - return tuple(self.load_list()) - dispatch[TYPE_TUPLE] = load_tuple - - def load_list(self): - n = self.r_long() - list = [self.load() for i in range(n)] - return list - dispatch[TYPE_LIST] = load_list - - def load_dict(self): - d = {} - while 1: - key = self.load() - if key is _NULL: - break - value = self.load() - d[key] = value - return d - dispatch[TYPE_DICT] = load_dict - - def load_code(self): - argcount = self.r_long() - nlocals = self.r_long() - stacksize = self.r_long() - flags = self.r_long() - code = self.load() - consts = self.load() - names = self.load() - varnames = self.load() - freevars = self.load() - cellvars = self.load() - filename = self.load() - name = self.load() - firstlineno = self.r_long() - lnotab = self.load() - return types.CodeType(argcount, nlocals, stacksize, flags, code, consts, - names, varnames, filename, name, firstlineno, - lnotab, freevars, cellvars) - dispatch[TYPE_CODE] = load_code - - def load_set(self): - n = self.r_long() - args = [self.load() for i in range(n)] - return set(args) - dispatch[TYPE_SET] = load_set - - def load_frozenset(self): - n = self.r_long() - args = [self.load() for i in range(n)] - return frozenset(args) - dispatch[TYPE_FROZENSET] = load_frozenset - -# ________________________________________________________________ - -def _read(self, n): - pos = self.bufpos - newpos = pos + n - if newpos > len(self.bufstr): raise EOFError - ret = self.bufstr[pos : newpos] - self.bufpos = newpos - return ret - -def _read1(self): - ret = self.bufstr[self.bufpos] - self.bufpos += 1 - return ret - -def _r_short(self): - lo = ord(_read1(self)) - hi = ord(_read1(self)) - x = lo | (hi<<8) - if x & 0x8000: - x = x - 0x10000 - return x - -def _r_long(self): - # inlined this most common case - p = self.bufpos - s = self.bufstr - a = ord(s[p]) - b = ord(s[p+1]) - c = ord(s[p+2]) - d = ord(s[p+3]) - self.bufpos += 4 - x = a | (b<<8) | (c<<16) | (d<<24) - if d & 0x80 and x > 0: - x = -((1<<32) - x) - return int(x) - else: - return x - -def _r_long64(self): - a = ord(_read1(self)) - b = ord(_read1(self)) - c = ord(_read1(self)) - d = ord(_read1(self)) - e = ord(_read1(self)) - f = ord(_read1(self)) - g = ord(_read1(self)) - h = ord(_read1(self)) - x = a | (b<<8) | (c<<16) | (d<<24) - x = x | (e<<32) | (f<<40) | (g<<48) | (h<<56) - if h & 0x80 and x > 0: - x = -((1<<64) - x) - return x - -_load_dispatch = {} - -class _FastUnmarshaller: - - dispatch = {} - - def __init__(self, buffer): - self.bufstr = buffer - self.bufpos = 0 - self._stringtable = [] - - def load(self): - # make flow space happy - c = '?' - try: - c = self.bufstr[self.bufpos] - self.bufpos += 1 - return _load_dispatch[c](self) - except KeyError: - raise ValueError("bad marshal code: %c (%d)" % (c, ord(c))) - except IndexError: - raise EOFError - - def load_null(self): - return _NULL - dispatch[TYPE_NULL] = load_null - - def load_none(self): - return None - dispatch[TYPE_NONE] = load_none - - def load_true(self): - return True - dispatch[TYPE_TRUE] = load_true - - def load_false(self): - return False - dispatch[TYPE_FALSE] = load_false - - def load_stopiter(self): - return StopIteration - dispatch[TYPE_STOPITER] = load_stopiter - - def load_ellipsis(self): - return Ellipsis - dispatch[TYPE_ELLIPSIS] = load_ellipsis - - def load_int(self): - return _r_long(self) - dispatch[TYPE_INT] = load_int - - def load_int64(self): - return _r_long64(self) - dispatch[TYPE_INT64] = load_int64 - - def load_long(self): - size = _r_long(self) - sign = 1 - if size < 0: - sign = -1 - size = -size - x = 0 - for i in range(size): - d = _r_short(self) - x = x | (d<<(i*15)) - return x * sign - dispatch[TYPE_LONG] = load_long - - def load_float(self): - n = ord(_read1(self)) - s = _read(self, n) - return float(s) - dispatch[TYPE_FLOAT] = load_float - - def load_complex(self): - n = ord(_read1(self)) - s = _read(self, n) - real = float(s) - n = ord(_read1(self)) - s = _read(self, n) - imag = float(s) - return complex(real, imag) - dispatch[TYPE_COMPLEX] = load_complex - - def load_string(self): - n = _r_long(self) - return _read(self, n) - dispatch[TYPE_STRING] = load_string - - def load_interned(self): - n = _r_long(self) - ret = intern(_read(self, n)) - self._stringtable.append(ret) - return ret - dispatch[TYPE_INTERNED] = load_interned - - def load_stringref(self): - n = _r_long(self) - return self._stringtable[n] - dispatch[TYPE_STRINGREF] = load_stringref - - def load_unicode(self): - n = _r_long(self) - s = _read(self, n) - ret = s.decode('utf8') - return ret - dispatch[TYPE_UNICODE] = load_unicode - - def load_tuple(self): - return tuple(self.load_list()) - dispatch[TYPE_TUPLE] = load_tuple - - def load_list(self): - n = _r_long(self) - list = [] - for i in range(n): - list.append(self.load()) - return list - dispatch[TYPE_LIST] = load_list - - def load_dict(self): - d = {} - while 1: - key = self.load() - if key is _NULL: - break - value = self.load() - d[key] = value - return d - dispatch[TYPE_DICT] = load_dict - - def load_code(self): - argcount = _r_long(self) - nlocals = _r_long(self) - stacksize = _r_long(self) - flags = _r_long(self) - code = self.load() - consts = self.load() - names = self.load() - varnames = self.load() - freevars = self.load() - cellvars = self.load() - filename = self.load() - name = self.load() - firstlineno = _r_long(self) - lnotab = self.load() - return types.CodeType(argcount, nlocals, stacksize, flags, code, consts, - names, varnames, filename, name, firstlineno, - lnotab, freevars, cellvars) - dispatch[TYPE_CODE] = load_code - - def load_set(self): - n = _r_long(self) - args = [self.load() for i in range(n)] - return set(args) - dispatch[TYPE_SET] = load_set - - def load_frozenset(self): - n = _r_long(self) - args = [self.load() for i in range(n)] - return frozenset(args) - dispatch[TYPE_FROZENSET] = load_frozenset - -_load_dispatch = _FastUnmarshaller.dispatch - -# _________________________________________________________________ -# -# user interface - -version = 1 - - at builtinify -def dump(x, f, version=version): - # XXX 'version' is ignored, we always dump in a version-0-compatible format - m = _Marshaller(f.write) - m.dump(x) - - at builtinify -def load(f): - um = _Unmarshaller(f.read) - return um.load() - - at builtinify -def dumps(x, version=version): - # XXX 'version' is ignored, we always dump in a version-0-compatible format - buffer = [] - m = _Marshaller(buffer.append) - m.dump(x) - return ''.join(buffer) - - at builtinify -def loads(s): - um = _FastUnmarshaller(s) - return um.load() diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py --- a/rpython/translator/sandbox/rsandbox.py +++ b/rpython/translator/sandbox/rsandbox.py @@ -4,17 +4,19 @@ and wait for an answer on STDIN. Enable with 'translate.py --sandbox'. """ import py +import sys -from rpython.rlib import rmarshal, types +from rpython.rlib import types +from rpython.rlib.objectmodel import specialize from rpython.rlib.signature import signature +from rpython.rlib.unroll import unrolling_iterable # ____________________________________________________________ # # Sandboxing code generator for external functions # -from rpython.rlib import rposix -from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rtyper.lltypesystem import lltype, llmemory, rffi from rpython.rtyper.llannotation import lltype_to_annotation from rpython.rtyper.annlowlevel import MixLevelHelperAnnotator from rpython.tool.ansi_print import AnsiLogger @@ -22,107 +24,63 @@ log = AnsiLogger("sandbox") -# a version of os.read() and os.write() that are not mangled -# by the sandboxing mechanism -ll_read_not_sandboxed = rposix.external('read', - [rffi.INT, rffi.CCHARP, rffi.SIZE_T], - rffi.SIZE_T, - sandboxsafe=True, - _nowrapper=True) +def getkind(TYPE, parent_function): + if TYPE is lltype.Void: + return 'v' + elif isinstance(TYPE, lltype.Primitive): + if TYPE is lltype.Float or TYPE is lltype.SingleFloat: + return 'f' + if TYPE is lltype.LongFloat: + log.WARNING("%r uses a 'long double' argument or return value; " + "sandboxing will export it only as 'double'" % + (parent_function,)) + return 'f' + if TYPE == llmemory.Address: + return 'p' + return 'i' + elif isinstance(TYPE, lltype.Ptr): + return 'p' + else: + log.WARNING("%r: sandboxing does not support argument " + "or return type %r" % (parent_function, TYPE)) + return 'v' -ll_write_not_sandboxed = rposix.external('write', - [rffi.INT, rffi.CCHARP, rffi.SIZE_T], - rffi.SIZE_T, - sandboxsafe=True, - _nowrapper=True) +def extra_eci(rtyper): + from rpython.translator.c.support import c_string_constant - at signature(types.int(), types.ptr(rffi.CCHARP.TO), types.int(), - returns=types.none()) -def writeall_not_sandboxed(fd, buf, length): - fd = rffi.cast(rffi.INT, fd) - while length > 0: - size = rffi.cast(rffi.SIZE_T, length) - count = rffi.cast(lltype.Signed, ll_write_not_sandboxed(fd, buf, size)) - if count <= 0: - raise IOError - length -= count - buf = lltype.direct_ptradd(lltype.direct_arrayitems(buf), count) - buf = rffi.cast(rffi.CCHARP, buf) + sandboxed_functions = getattr(rtyper, '_sandboxed_functions', []) + dump = ( + "Version: 20001\n" + + "Platform: %s\n" % sys.platform + + "Funcs: %s" % ' '.join(sorted(sandboxed_functions)) + ) + dump = c_string_constant(dump).replace('\n', '\\\n') + return rffi.ExternalCompilationInfo(separate_module_sources=[ + '#define RPY_SANDBOX_DUMP %s\n' % (dump,) + + py.path.local(__file__).join('..', 'src', 'rsandbox.c').read(), + ], + post_include_bits=[ + py.path.local(__file__).join('..', 'src', 'rsandbox.h').read(), + ]) -class FdLoader(rmarshal.Loader): - def __init__(self, fd): - rmarshal.Loader.__init__(self, "") - self.fd = fd - self.buflen = 4096 +def external(funcname, ARGS, RESULT): + return rffi.llexternal(funcname, ARGS, RESULT, + sandboxsafe=True, _nowrapper=True) - def need_more_data(self): - buflen = self.buflen - with lltype.scoped_alloc(rffi.CCHARP.TO, buflen) as buf: - buflen = rffi.cast(rffi.SIZE_T, buflen) - fd = rffi.cast(rffi.INT, self.fd) - count = ll_read_not_sandboxed(fd, buf, buflen) - count = rffi.cast(lltype.Signed, count) - if count <= 0: - raise IOError - self.buf += ''.join([buf[i] for i in range(count)]) - self.buflen *= 2 +rpy_sandbox_arg = { + 'i': external('rpy_sandbox_arg_i', [lltype.UnsignedLongLong], lltype.Void), + 'f': external('rpy_sandbox_arg_f', [lltype.Float], lltype.Void), + 'p': external('rpy_sandbox_arg_p', [llmemory.Address], lltype.Void), +} +rpy_sandbox_res = { + 'v': external('rpy_sandbox_res_v', [rffi.CCHARP], lltype.Void), + 'i': external('rpy_sandbox_res_i', [rffi.CCHARP], lltype.UnsignedLongLong), + 'f': external('rpy_sandbox_res_f', [rffi.CCHARP], lltype.Float), + 'p': external('rpy_sandbox_res_p', [rffi.CCHARP], llmemory.Address), +} -def sandboxed_io(buf): - STDIN = 0 - STDOUT = 1 - # send the buffer with the marshalled fnname and input arguments to STDOUT - with lltype.scoped_alloc(rffi.CCHARP.TO, len(buf)) as p: - for i in range(len(buf)): - p[i] = buf[i] - writeall_not_sandboxed(STDOUT, p, len(buf)) - # build a Loader that will get the answer from STDIN - loader = FdLoader(STDIN) - # check for errors - error = load_int(loader) - if error != 0: - reraise_error(error, loader) - else: - # no exception; the caller will decode the actual result - return loader - -def reraise_error(error, loader): - if error == 1: - raise OSError(load_int(loader), "external error") - elif error == 2: - raise IOError - elif error == 3: - raise OverflowError - elif error == 4: - raise ValueError - elif error == 5: - raise ZeroDivisionError - elif error == 6: - raise MemoryError - elif error == 7: - raise KeyError - elif error == 8: - raise IndexError - else: - raise RuntimeError - - - at signature(types.str(), returns=types.impossible()) -def not_implemented_stub(msg): - STDERR = 2 - with rffi.scoped_str2charp(msg + '\n') as buf: - writeall_not_sandboxed(STDERR, buf, len(msg) + 1) - raise RuntimeError(msg) # XXX in RPython, the msg is ignored - -def make_stub(fnname, msg): - """Build always-raising stub function to replace unsupported external.""" - log.WARNING(msg) - - def execute(*args): - not_implemented_stub(msg) - execute.__name__ = 'sandboxed_%s' % (fnname,) - return execute def sig_ll(fnobj): FUNCTYPE = lltype.typeOf(fnobj) @@ -130,47 +88,53 @@ s_result = lltype_to_annotation(FUNCTYPE.RESULT) return args_s, s_result -dump_string = rmarshal.get_marshaller(str) -load_int = rmarshal.get_loader(int) - def get_sandbox_stub(fnobj, rtyper): fnname = fnobj._name + FUNCTYPE = lltype.typeOf(fnobj) + arg_kinds = [getkind(ARG, fnname) for ARG in FUNCTYPE.ARGS] + result_kind = getkind(FUNCTYPE.RESULT, fnname) + + unroll_args = unrolling_iterable([ + (arg_kind, rpy_sandbox_arg[arg_kind], + lltype.typeOf(rpy_sandbox_arg[arg_kind]).TO.ARGS[0]) + for arg_kind in arg_kinds]) + + result_func = rpy_sandbox_res[result_kind] + RESTYPE = FUNCTYPE.RESULT + + try: + lst = rtyper._sandboxed_functions + except AttributeError: + lst = rtyper._sandboxed_functions = [] + name_and_sig = '%s(%s)%s' % (fnname, ''.join(arg_kinds), result_kind) + lst.append(name_and_sig) + log(name_and_sig) + name_and_sig = rffi.str2charp(name_and_sig, track_allocation=False) + + def execute(*args): + # + # serialize the arguments + i = 0 + for arg_kind, func, ARGTYPE in unroll_args: + if arg_kind == 'v': + continue + func(rffi.cast(ARGTYPE, args[i])) + i = i + 1 + # + # send the function name and the arguments and wait for an answer + result = result_func(name_and_sig) + # + # result the answer, if any + if RESTYPE is not lltype.Void: + return rffi.cast(RESTYPE, result) + execute.__name__ = 'sandboxed_%s' % (fnname,) + # args_s, s_result = sig_ll(fnobj) - msg = "Not implemented: sandboxing for external function '%s'" % (fnname,) - execute = make_stub(fnname, msg) return _annotate(rtyper, execute, args_s, s_result) -def make_sandbox_trampoline(fnname, args_s, s_result): - """Create a trampoline function with the specified signature. - - The trampoline is meant to be used in place of real calls to the external - function named 'fnname'. It marshals its input arguments, dumps them to - STDOUT, and waits for an answer on STDIN. - """ - try: - dump_arguments = rmarshal.get_marshaller(tuple(args_s)) - load_result = rmarshal.get_loader(s_result) - except (rmarshal.CannotMarshal, rmarshal.CannotUnmarshall) as e: - msg = "Cannot sandbox function '%s': %s" % (fnname, e) - execute = make_stub(fnname, msg) - else: - def execute(*args): - # marshal the function name and input arguments - buf = [] - dump_string(buf, fnname) - dump_arguments(buf, args) - # send the buffer and wait for the answer - loader = sandboxed_io(buf) - # decode the answer - result = load_result(loader) - loader.check_finished() - return result - execute.__name__ = 'sandboxed_%s' % (fnname,) - return execute - - def _annotate(rtyper, f, args_s, s_result): ann = MixLevelHelperAnnotator(rtyper) - graph = ann.getgraph(f, args_s, s_result) + llfunc = ann.delayedfunction(f, args_s, s_result, needtype=True) ann.finish() - return graph + ann.backend_optimize() + return llfunc diff --git a/rpython/translator/sandbox/sandboxio.py b/rpython/translator/sandbox/sandboxio.py new file mode 100644 --- /dev/null +++ b/rpython/translator/sandbox/sandboxio.py @@ -0,0 +1,150 @@ +import struct + + +class SandboxError(Exception): + """The sandboxed process misbehaved""" + + +class Ptr(object): + def __init__(self, addr): + self.addr = addr + + def __repr__(self): + return 'Ptr(%s)' % (hex(self.addr),) + + +_ptr_size = struct.calcsize("P") +_ptr_code = 'q' if _ptr_size == 8 else 'i' +_pack_one_ptr = struct.Struct("=" + _ptr_code).pack +_pack_one_longlong = struct.Struct("=q").pack +_pack_one_double = struct.Struct("=d").pack +_pack_one_int = struct.Struct("=i").pack +_pack_two_ptrs = struct.Struct("=" + _ptr_code + _ptr_code).pack +_unpack_one_ptr = struct.Struct("=" + _ptr_code).unpack + + +class SandboxedIO(object): + _message_decoders = {} + + + def __init__(self, popen): + self.popen = popen + self.child_stdin = popen.stdin + self.child_stdout = popen.stdout + + def close(self): + """Kill the subprocess and close the file descriptors to the pipe. + """ + if self.popen.returncode is None: + self.popen.terminate() + self.child_stdin.close() + self.child_stdout.close() + self.popen.stderr.close() + + def _read(self, count): + result = self.child_stdout.read(count) + if len(result) != count: + raise SandboxError( + "connection interrupted with the sandboxed process") + return result + + @staticmethod + def _make_message_decoder(data): + i1 = data.find('(') + i2 = data.find(')') + if not (i1 > 0 and i1 < i2 and i2 == len(data) - 2): + raise SandboxError( + "badly formatted data received from the sandboxed process") + pack_args = ['='] + for c in data[i1+1:i2]: + if c == 'p': + pack_args.append(_ptr_code) + elif c == 'i': + pack_args.append('q') + elif c == 'f': + pack_args.append('d') + elif c == 'v': + pass + else: + raise SandboxError( + "unsupported format string in parentheses: %r" % (data,)) + unpacker = struct.Struct(''.join(pack_args)) + decoder = unpacker, data[i1+1:i2] + + SandboxedIO._message_decoders[data] = decoder + return decoder + + def read_message(self): + """Wait for the next message and returns it. Raises EOFError if the + subprocess finished. Raises SandboxError if there is another kind + of detected misbehaviour. + """ + ch = self.child_stdout.read(1) + if len(ch) == 0: + raise EOFError + n = ord(ch) + msg = self._read(n) + decoder = self._message_decoders.get(msg) + if decoder is None: + decoder = self._make_message_decoder(msg) + + unpacker, codes = decoder + raw_args = iter(unpacker.unpack(self._read(unpacker.size))) + args = [] + for c in codes: + if c == 'p': + args.append(Ptr(next(raw_args))) + elif c == 'v': + args.append(None) + else: + args.append(next(raw_args)) + return msg, args + + def read_buffer(self, ptr, length): + g = self.child_stdin + g.write("R" + _pack_two_ptrs(ptr.addr, length)) + g.flush() + return self._read(length) + + def read_charp(self, ptr, maxlen=-1): + g = self.child_stdin + g.write("Z" + _pack_two_ptrs(ptr.addr, maxlen)) + g.flush() + length = _unpack_one_ptr(self._read(_ptr_size))[0] + return self._read(length) + + def write_buffer(self, ptr, bytes_data): + g = self.child_stdin + g.write("W" + _pack_two_ptrs(ptr.addr, len(bytes_data))) + g.write(bytes_data) + # g.flush() not necessary here + + def write_result(self, result): + g = self.child_stdin + if result is None: + g.write('v') + elif isinstance(result, Ptr): + g.write('p' + _pack_one_ptr(result.addr)) + elif isinstance(result, float): + g.write('f' + _pack_one_double(result)) + else: + g.write('i' + _pack_one_longlong(result)) + g.flush() + + def set_errno(self, err): + g = self.child_stdin + g.write("E" + _pack_one_int(err)) + # g.flush() not necessary here + + def malloc(self, bytes_data): + g = self.child_stdin + g.write("M" + _pack_one_ptr(len(bytes_data))) + g.write(bytes_data) + g.flush() + addr = _unpack_one_ptr(self._read(_ptr_size))[0] + return Ptr(addr) + + def free(self, ptr): + g = self.child_stdin + g.write("F" + _pack_one_ptr(ptr.addr)) + # g.flush() not necessary here diff --git a/rpython/translator/sandbox/sandlib.py b/rpython/translator/sandbox/sandlib.py --- a/rpython/translator/sandbox/sandlib.py +++ b/rpython/translator/sandbox/sandlib.py @@ -18,65 +18,6 @@ from rpython.tool.ansi_print import AnsiLogger return AnsiLogger("sandlib") -# Note: we use lib_pypy/marshal.py instead of the built-in marshal -# for two reasons. The built-in module could be made to segfault -# or be attackable in other ways by sending malicious input to -# load(). Also, marshal.load(f) blocks with the GIL held when -# f is a pipe with no data immediately avaialble, preventing the -# _waiting_thread to run. -from rpython.translator.sandbox import _marshal as marshal - -# Non-marshal result types -RESULTTYPE_STATRESULT = object() -RESULTTYPE_LONGLONG = object() - -def read_message(f): - return marshal.load(f) - -def write_message(g, msg, resulttype=None): - if resulttype is None: - if sys.version_info < (2, 4): - marshal.dump(msg, g) - else: - marshal.dump(msg, g, 0) - elif resulttype is RESULTTYPE_STATRESULT: - # Hand-coded marshal for stat results that mimics what rmarshal expects. - # marshal.dump(tuple(msg)) would have been too easy. rmarshal insists - # on 64-bit ints at places, even when the value fits in 32 bits. - import struct - st = tuple(msg) - fmt = "iIIiiiIfff" - buf = [] - buf.append(struct.pack(" +#include +#include +#include +#include +#include + + +#define RPY_SANDBOX_ARGBUF 512 +#define RPY_SANDBOX_NAMEMAX 256 + +#define RPY_FD_STDIN 0 +#define RPY_FD_STDOUT 1 + +static char sand_argbuf[RPY_SANDBOX_ARGBUF]; +static size_t sand_nextarg = RPY_SANDBOX_NAMEMAX; +static int sand_dump_checked = 0; + + +static void sand_writeall(const char *buf, size_t count) +{ + while (count > 0) { + ssize_t result = write(RPY_FD_STDOUT, buf, count); + if (result <= 0) { + if (result == 0) { + fprintf(stderr, "sandbox: write(stdout) gives the result 0, " + "which is not expected\n"); + } + else { + perror("sandbox: write(stdout)"); + } + abort(); + } + if (result > count) { + fprintf(stderr, "sandbox: write(stdout) wrote more data than " + "request, which is not expected\n"); + abort(); + } + buf += result; + count -= result; + } +} + +static void sand_readall(char *buf, size_t count) +{ + while (count > 0) { + ssize_t result = read(RPY_FD_STDIN, buf, count); + if (result <= 0) { + if (result == 0) { + fprintf(stderr, "sandbox: stdin is closed, subprocess " + "interrupted\n"); + } + else { + perror("sandbox: read(stdin)"); + } + abort(); + } + if (result > count) { + fprintf(stderr, "sandbox: read(stdin) returned more data than " + "expected\n"); + abort(); + } + buf += result; + count -= result; + } +} + + From pypy.commits at gmail.com Sun Aug 11 15:48:32 2019 From: pypy.commits at gmail.com (mattip) Date: Sun, 11 Aug 2019 12:48:32 -0700 (PDT) Subject: [pypy-commit] pypy default: sync with upstream vmprof Message-ID: <5d507110.1c69fb81.23f61.2a33@mx.google.com> Author: Matti Picus Branch: Changeset: r97150:8cb85ca95940 Date: 2019-08-11 22:44 +0300 http://bitbucket.org/pypy/pypy/changeset/8cb85ca95940/ Log: sync with upstream vmprof diff --git a/rpython/rlib/rvmprof/src/shared/_vmprof.c b/rpython/rlib/rvmprof/src/shared/_vmprof.c --- a/rpython/rlib/rvmprof/src/shared/_vmprof.c +++ b/rpython/rlib/rvmprof/src/shared/_vmprof.c @@ -383,8 +383,22 @@ #ifdef VMPROF_UNIX static PyObject * -insert_real_time_thread(PyObject *module, PyObject * noargs) { +insert_real_time_thread(PyObject *module, PyObject * args) { ssize_t thread_count; + unsigned long thread_id = 0; + pthread_t th = pthread_self(); + + if (!PyArg_ParseTuple(args, "|k", &thread_id)) { + return NULL; + } + + if (thread_id) { +#if SIZEOF_LONG <= SIZEOF_PTHREAD_T + th = (pthread_t) thread_id; +#else + th = (pthread_t) *(unsigned long *) &thread_id; +#endif + } if (!vmprof_is_enabled()) { PyErr_SetString(PyExc_ValueError, "vmprof is not enabled"); @@ -397,15 +411,29 @@ } vmprof_aquire_lock(); - thread_count = insert_thread(pthread_self(), -1); + thread_count = insert_thread(th, -1); vmprof_release_lock(); return PyLong_FromSsize_t(thread_count); } static PyObject * -remove_real_time_thread(PyObject *module, PyObject * noargs) { +remove_real_time_thread(PyObject *module, PyObject * args) { ssize_t thread_count; + unsigned long thread_id = 0; + pthread_t th = pthread_self(); + + if (!PyArg_ParseTuple(args, "|k", &thread_id)) { + return NULL; + } + + if (thread_id) { +#if SIZEOF_LONG <= SIZEOF_PTHREAD_T + th = (pthread_t) thread_id; +#else + th = (pthread_t) *(unsigned long *) &thread_id; +#endif + } if (!vmprof_is_enabled()) { PyErr_SetString(PyExc_ValueError, "vmprof is not enabled"); @@ -418,7 +446,7 @@ } vmprof_aquire_lock(); - thread_count = remove_thread(pthread_self(), -1); + thread_count = remove_thread(th, -1); vmprof_release_lock(); return PyLong_FromSsize_t(thread_count); @@ -445,9 +473,9 @@ #ifdef VMPROF_UNIX {"get_profile_path", vmp_get_profile_path, METH_NOARGS, "Profile path the profiler logs to."}, - {"insert_real_time_thread", insert_real_time_thread, METH_NOARGS, + {"insert_real_time_thread", insert_real_time_thread, METH_VARARGS, "Insert a thread into the real time profiling list."}, - {"remove_real_time_thread", remove_real_time_thread, METH_NOARGS, + {"remove_real_time_thread", remove_real_time_thread, METH_VARARGS, "Remove a thread from the real time profiling list."}, #endif {NULL, NULL, 0, NULL} /* Sentinel */ diff --git a/rpython/rlib/rvmprof/src/shared/vmp_stack.c b/rpython/rlib/rvmprof/src/shared/vmp_stack.c --- a/rpython/rlib/rvmprof/src/shared/vmp_stack.c +++ b/rpython/rlib/rvmprof/src/shared/vmp_stack.c @@ -280,7 +280,7 @@ // this is possible because compiler align to 8 bytes. // if (func_addr != 0x0) { - depth = _write_native_stack((void*)(((intptr_t)func_addr) | 0x1), result, depth, max_depth); + depth = _write_native_stack((void*)(((uint64_t)func_addr) | 0x1), result, depth, max_depth); } } diff --git a/rpython/rlib/rvmprof/src/shared/vmprof_unix.c b/rpython/rlib/rvmprof/src/shared/vmprof_unix.c --- a/rpython/rlib/rvmprof/src/shared/vmprof_unix.c +++ b/rpython/rlib/rvmprof/src/shared/vmprof_unix.c @@ -244,11 +244,7 @@ if (commit) { commit_buffer(fd, p); } else { -#ifndef RPYTHON_VMPROF fprintf(stderr, "WARNING: canceled buffer, no stack trace was written\n"); -#else - fprintf(stderr, "WARNING: canceled buffer, no stack trace was written\n"); -#endif cancel_buffer(p); } } From pypy.commits at gmail.com Sun Aug 11 15:48:34 2019 From: pypy.commits at gmail.com (mattip) Date: Sun, 11 Aug 2019 12:48:34 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default into branch Message-ID: <5d507112.1c69fb81.409b9.79a7@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97151:17f20f51c2cb Date: 2019-08-11 22:47 +0300 http://bitbucket.org/pypy/pypy/changeset/17f20f51c2cb/ Log: merge default into branch diff --git a/pypy/module/_cppyy/test/conftest.py b/pypy/module/_cppyy/test/conftest.py --- a/pypy/module/_cppyy/test/conftest.py +++ b/pypy/module/_cppyy/test/conftest.py @@ -5,7 +5,7 @@ @py.test.mark.tryfirst def pytest_runtest_setup(item): - if py.path.local.sysfind('genreflex') is None: + if not disabled and py.path.local.sysfind('genreflex') is None: import pypy.module._cppyy.capi.loadable_capi as lcapi if 'dummy' in lcapi.backend_library: # run only tests that are covered by the dummy backend and tests @@ -33,16 +33,18 @@ def pytest_ignore_collect(path, config): path = str(path) - if py.path.local.sysfind('genreflex') is None and config.option.runappdirect: - return commonprefix([path, THIS_DIR]) == THIS_DIR if disabled: - return commonprefix([path, THIS_DIR]) == THIS_DIR + if commonprefix([path, THIS_DIR]) == THIS_DIR: # workaround for bug in pytest<3.0.5 + return True disabled = None def pytest_configure(config): + global disabled if config.getoption('runappdirect') or config.getoption('direct_apptest'): - return # "can't run dummy tests in -A" + if py.path.local.sysfind('genreflex') is None: + disabled = True # can't run dummy tests in -A + return if py.path.local.sysfind('genreflex') is None: import pypy.module._cppyy.capi.loadable_capi as lcapi try: @@ -77,7 +79,6 @@ standalone=False) except CompilationError as e: if '-std=c++14' in str(e): - global disabled disabled = str(e) return raise diff --git a/pypy/module/_vmprof/conftest.py b/pypy/module/_vmprof/conftest.py --- a/pypy/module/_vmprof/conftest.py +++ b/pypy/module/_vmprof/conftest.py @@ -1,8 +1,13 @@ -import py, platform, sys +import pytest +import platform +import sys +from os.path import commonprefix, dirname -def pytest_collect_directory(path, parent): - if platform.machine() == 's390x': - py.test.skip("_vmprof tests skipped") - if sys.platform == 'win32': - py.test.skip("_vmprof tests skipped") -pytest_collect_file = pytest_collect_directory +THIS_DIR = dirname(__file__) + + at pytest.hookimpl(tryfirst=True) +def pytest_ignore_collect(path, config): + path = str(path) + if sys.platform == 'win32' or platform.machine() == 's390x': + if commonprefix([path, THIS_DIR]) == THIS_DIR: # workaround for bug in pytest<3.0.5 + return True diff --git a/pypy/testrunner_cfg.py b/pypy/testrunner_cfg.py --- a/pypy/testrunner_cfg.py +++ b/pypy/testrunner_cfg.py @@ -6,6 +6,7 @@ 'memory/test', 'jit/metainterp', 'jit/backend/arm', 'jit/backend/x86', 'jit/backend/zarch', 'module/cpyext/test', + 'jit/backend/aarch64', # python3 slowness ... 'module/_cffi_backend/test', 'module/__pypy__/test', ] diff --git a/rpython/jit/backend/aarch64/assembler.py b/rpython/jit/backend/aarch64/assembler.py --- a/rpython/jit/backend/aarch64/assembler.py +++ b/rpython/jit/backend/aarch64/assembler.py @@ -31,6 +31,7 @@ ResOpAssembler.__init__(self, cpu, translate_support_code) self.failure_recovery_code = [0, 0, 0, 0] self.wb_slowpath = [0, 0, 0, 0, 0] + self.stack_check_slowpath = 0 def assemble_loop(self, jd_id, unique_id, logger, loopname, inputargs, operations, looptoken, log): @@ -675,7 +676,7 @@ # new value of nursery_free_adr in r1 and the adr of the new object in # r0. - self.mc.B_ofs_cond(10 * 4, c.LO) # 4 for gcmap load, 5 for BL, 1 for B_ofs_cond + self.mc.B_ofs_cond(10 * 4, c.LS) # 4 for gcmap load, 5 for BL, 1 for B_ofs_cond self.mc.gen_load_int_full(r.ip1.value, rffi.cast(lltype.Signed, gcmap)) self.mc.BL(self.malloc_slowpath) @@ -698,7 +699,7 @@ self.mc.CMP_rr(r.x1.value, r.ip0.value) # - self.mc.B_ofs_cond(40, c.LO) # see calculations in malloc_cond + self.mc.B_ofs_cond(40, c.LS) # see calculations in malloc_cond self.mc.gen_load_int_full(r.ip1.value, rffi.cast(lltype.Signed, gcmap)) self.mc.BL(self.malloc_slowpath) diff --git a/rpython/jit/backend/aarch64/runner.py b/rpython/jit/backend/aarch64/runner.py --- a/rpython/jit/backend/aarch64/runner.py +++ b/rpython/jit/backend/aarch64/runner.py @@ -62,6 +62,12 @@ cast_ptr_to_int._annspecialcase_ = 'specialize:arglltype(0)' cast_ptr_to_int = staticmethod(cast_ptr_to_int) + def build_regalloc(self): + ''' for tests''' + from rpython.jit.backend.aarch64.regalloc import Regalloc + assert self.assembler is not None + return Regalloc(self.assembler) + for _i, _r in enumerate(r.all_regs): assert CPU_ARM64.all_reg_indexes[_r.value] == _i diff --git a/rpython/jit/backend/llsupport/test/test_gc_integration.py b/rpython/jit/backend/llsupport/test/test_gc_integration.py --- a/rpython/jit/backend/llsupport/test/test_gc_integration.py +++ b/rpython/jit/backend/llsupport/test/test_gc_integration.py @@ -93,6 +93,8 @@ assert nos == [0, 1, 33] elif self.cpu.backend_name.startswith('zarch'): assert nos == [0, 1, 29] + elif self.cpu.backend_name.startswith('aarch64'): + assert nos == [0, 1, 27] else: raise Exception("write the data here") assert frame.jf_frame[nos[0]] @@ -672,6 +674,8 @@ elif self.cpu.backend_name.startswith('zarch'): # 10 gpr, 14 fpr -> 25 is the first slot assert gcmap == [26, 27, 28] + elif self.cpu.backend_name.startswith('aarch64'): + assert gcmap == [24, 25, 26] elif self.cpu.IS_64_BIT: assert gcmap == [28, 29, 30] elif self.cpu.backend_name.startswith('arm'): diff --git a/rpython/jit/metainterp/test/test_float.py b/rpython/jit/metainterp/test/test_float.py --- a/rpython/jit/metainterp/test/test_float.py +++ b/rpython/jit/metainterp/test/test_float.py @@ -43,7 +43,7 @@ a = float(r_singlefloat(a)) a *= 4.25 return float(r_singlefloat(a)) - res = self.interp_operations(f, [-2.0]) + res = self.interp_operations(f, [-2.0], supports_singlefloats=True) assert res == -8.5 def test_cast_float_to_int(self): diff --git a/rpython/rlib/rvmprof/src/shared/_vmprof.c b/rpython/rlib/rvmprof/src/shared/_vmprof.c --- a/rpython/rlib/rvmprof/src/shared/_vmprof.c +++ b/rpython/rlib/rvmprof/src/shared/_vmprof.c @@ -383,8 +383,22 @@ #ifdef VMPROF_UNIX static PyObject * -insert_real_time_thread(PyObject *module, PyObject * noargs) { +insert_real_time_thread(PyObject *module, PyObject * args) { ssize_t thread_count; + unsigned long thread_id = 0; + pthread_t th = pthread_self(); + + if (!PyArg_ParseTuple(args, "|k", &thread_id)) { + return NULL; + } + + if (thread_id) { +#if SIZEOF_LONG <= SIZEOF_PTHREAD_T + th = (pthread_t) thread_id; +#else + th = (pthread_t) *(unsigned long *) &thread_id; +#endif + } if (!vmprof_is_enabled()) { PyErr_SetString(PyExc_ValueError, "vmprof is not enabled"); @@ -397,15 +411,29 @@ } vmprof_aquire_lock(); - thread_count = insert_thread(pthread_self(), -1); + thread_count = insert_thread(th, -1); vmprof_release_lock(); return PyLong_FromSsize_t(thread_count); } static PyObject * -remove_real_time_thread(PyObject *module, PyObject * noargs) { +remove_real_time_thread(PyObject *module, PyObject * args) { ssize_t thread_count; + unsigned long thread_id = 0; + pthread_t th = pthread_self(); + + if (!PyArg_ParseTuple(args, "|k", &thread_id)) { + return NULL; + } + + if (thread_id) { +#if SIZEOF_LONG <= SIZEOF_PTHREAD_T + th = (pthread_t) thread_id; +#else + th = (pthread_t) *(unsigned long *) &thread_id; +#endif + } if (!vmprof_is_enabled()) { PyErr_SetString(PyExc_ValueError, "vmprof is not enabled"); @@ -418,7 +446,7 @@ } vmprof_aquire_lock(); - thread_count = remove_thread(pthread_self(), -1); + thread_count = remove_thread(th, -1); vmprof_release_lock(); return PyLong_FromSsize_t(thread_count); @@ -445,9 +473,9 @@ #ifdef VMPROF_UNIX {"get_profile_path", vmp_get_profile_path, METH_NOARGS, "Profile path the profiler logs to."}, - {"insert_real_time_thread", insert_real_time_thread, METH_NOARGS, + {"insert_real_time_thread", insert_real_time_thread, METH_VARARGS, "Insert a thread into the real time profiling list."}, - {"remove_real_time_thread", remove_real_time_thread, METH_NOARGS, + {"remove_real_time_thread", remove_real_time_thread, METH_VARARGS, "Remove a thread from the real time profiling list."}, #endif {NULL, NULL, 0, NULL} /* Sentinel */ diff --git a/rpython/rlib/rvmprof/src/shared/vmp_stack.c b/rpython/rlib/rvmprof/src/shared/vmp_stack.c --- a/rpython/rlib/rvmprof/src/shared/vmp_stack.c +++ b/rpython/rlib/rvmprof/src/shared/vmp_stack.c @@ -280,7 +280,7 @@ // this is possible because compiler align to 8 bytes. // if (func_addr != 0x0) { - depth = _write_native_stack((void*)(((intptr_t)func_addr) | 0x1), result, depth, max_depth); + depth = _write_native_stack((void*)(((uint64_t)func_addr) | 0x1), result, depth, max_depth); } } diff --git a/rpython/rlib/rvmprof/src/shared/vmprof_unix.c b/rpython/rlib/rvmprof/src/shared/vmprof_unix.c --- a/rpython/rlib/rvmprof/src/shared/vmprof_unix.c +++ b/rpython/rlib/rvmprof/src/shared/vmprof_unix.c @@ -244,11 +244,7 @@ if (commit) { commit_buffer(fd, p); } else { -#ifndef RPYTHON_VMPROF fprintf(stderr, "WARNING: canceled buffer, no stack trace was written\n"); -#else - fprintf(stderr, "WARNING: canceled buffer, no stack trace was written\n"); -#endif cancel_buffer(p); } } diff --git a/testrunner/get_info.py b/testrunner/get_info.py --- a/testrunner/get_info.py +++ b/testrunner/get_info.py @@ -13,6 +13,7 @@ # PyPy uses bin as of PR https://github.com/pypa/virtualenv/pull/1400 TARGET_DIR = 'bin' else: + TARGET_NAME = 'pypy-c' TARGET_NAME = 'pypy3-c' TARGET_DIR = 'bin' VENV_DIR = 'pypy-venv' From pypy.commits at gmail.com Sun Aug 11 17:12:24 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 14:12:24 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: fixes Message-ID: <5d5084b8.1c69fb81.417e1.ee7d@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97152:5fadf669fc02 Date: 2019-08-11 22:46 +0200 http://bitbucket.org/pypy/pypy/changeset/5fadf669fc02/ Log: fixes diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -326,7 +326,8 @@ config.translation.jit = True if config.translation.sandbox: - config.objspace.lonepycfiles = False + #config.objspace.lonepycfiles = False --- not available in py3.x + pass if config.objspace.usemodules.cpyext: if config.translation.gc not in ('incminimark', 'boehm'): diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py --- a/rpython/rlib/rsiphash.py +++ b/rpython/rlib/rsiphash.py @@ -139,6 +139,8 @@ translator = hop.rtyper.annotator.translator if translator.config.translation.reverse_debugger: return # ignore and use the regular hash, with reverse-debugger + if translator.config.translation.sandbox: + return # ignore and use the regular hash, with sandboxing bk = hop.rtyper.annotator.bookkeeper s_callable = bk.immutablevalue(initialize_from_env) r_callable = hop.rtyper.getrepr(s_callable) From pypy.commits at gmail.com Sun Aug 11 17:12:26 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 14:12:26 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Remove the only usage of open() in the py3.6 core parts (as found in the branch Message-ID: <5d5084ba.1c69fb81.2887a.127b@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97153:4006ceea6169 Date: 2019-08-11 22:45 +0200 http://bitbucket.org/pypy/pypy/changeset/4006ceea6169/ Log: Remove the only usage of open() in the py3.6 core parts (as found in the branch py3.6-sandbox-2). Also wrap the raw RPython exceptions that can arise. diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -7,7 +7,7 @@ from pypy.interpreter.module import Module, init_extra_module_attrs from pypy.interpreter.gateway import interp2app, unwrap_spec from pypy.interpreter.typedef import TypeDef, generic_new_descr -from pypy.interpreter.error import OperationError, oefmt +from pypy.interpreter.error import OperationError, oefmt, wrap_oserror from pypy.interpreter.baseobjspace import W_Root, CannotHaveLock from pypy.interpreter.eval import Code from pypy.interpreter.pycode import PyCode @@ -77,6 +77,22 @@ lib_pypy = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'lib_pypy') +def _readall(space, filename): + try: + fd = os.open(filename, os.O_RDONLY, 0400) + try: + result = [] + while True: + data = os.read(fd, 8192) + if not data: + break + result.append(data) + finally: + os.close(fd) + except OSError as e: + raise wrap_oserror(space, e, filename) + return ''.join(result) + @unwrap_spec(modulename='fsencode', level=int) def importhook(space, modulename, w_globals=None, w_locals=None, w_fromlist=None, level=0): # A minimal version, that can only import builtin and lib_pypy modules! @@ -94,8 +110,7 @@ return space.getbuiltinmodule(modulename) ec = space.getexecutioncontext() - with open(os.path.join(lib_pypy, modulename + '.py')) as fp: - source = fp.read() + source = _readall(space, os.path.join(lib_pypy, modulename + '.py')) pathname = "" % modulename code_w = ec.compiler.compile(source, pathname, 'exec', 0) w_mod = add_module(space, space.newtext(modulename)) From pypy.commits at gmail.com Sun Aug 11 17:12:27 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 14:12:27 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge py3.6 Message-ID: <5d5084bb.1c69fb81.bd7c9.427d@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97154:f19d31ff52b6 Date: 2019-08-11 22:55 +0200 http://bitbucket.org/pypy/pypy/changeset/f19d31ff52b6/ Log: hg merge py3.6 diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -7,7 +7,7 @@ from pypy.interpreter.module import Module, init_extra_module_attrs from pypy.interpreter.gateway import interp2app, unwrap_spec from pypy.interpreter.typedef import TypeDef, generic_new_descr -from pypy.interpreter.error import OperationError, oefmt +from pypy.interpreter.error import OperationError, oefmt, wrap_oserror from pypy.interpreter.baseobjspace import W_Root, CannotHaveLock from pypy.interpreter.eval import Code from pypy.interpreter.pycode import PyCode @@ -77,6 +77,22 @@ lib_pypy = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'lib_pypy') +def _readall(space, filename): + try: + fd = os.open(filename, os.O_RDONLY, 0400) + try: + result = [] + while True: + data = os.read(fd, 8192) + if not data: + break + result.append(data) + finally: + os.close(fd) + except OSError as e: + raise wrap_oserror(space, e, filename) + return ''.join(result) + @unwrap_spec(modulename='fsencode', level=int) def importhook(space, modulename, w_globals=None, w_locals=None, w_fromlist=None, level=0): # A minimal version, that can only import builtin and lib_pypy modules! @@ -94,8 +110,7 @@ return space.getbuiltinmodule(modulename) ec = space.getexecutioncontext() - with open(os.path.join(lib_pypy, modulename + '.py')) as fp: - source = fp.read() + source = _readall(space, os.path.join(lib_pypy, modulename + '.py')) pathname = "" % modulename code_w = ec.compiler.compile(source, pathname, 'exec', 0) w_mod = add_module(space, space.newtext(modulename)) From pypy.commits at gmail.com Sun Aug 11 17:12:29 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 14:12:29 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: comment Message-ID: <5d5084bd.1c69fb81.8fa4b.8c41@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97155:7f7d17b89d46 Date: 2019-08-11 23:07 +0200 http://bitbucket.org/pypy/pypy/changeset/7f7d17b89d46/ Log: comment diff --git a/pypy/module/posix/interp_scandir.py b/pypy/module/posix/interp_scandir.py --- a/pypy/module/posix/interp_scandir.py +++ b/pypy/module/posix/interp_scandir.py @@ -47,7 +47,7 @@ path_prefix += u'\\' w_path_prefix = space.newtext(path_prefix) if rposix.HAVE_FSTATAT: - dirfd = rposix.c_dirfd(dirp) + dirfd = rposix.c_dirfd(dirp) # may return -1; errors are ignored else: dirfd = -1 return W_ScandirIterator(space, dirp, dirfd, w_path_prefix, result_is_bytes) From pypy.commits at gmail.com Sun Aug 11 17:12:31 2019 From: pypy.commits at gmail.com (arigo) Date: Sun, 11 Aug 2019 14:12:31 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge heads Message-ID: <5d5084bf.1c69fb81.9bf63.fb9b@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97156:d21e9a6b4037 Date: 2019-08-11 23:11 +0200 http://bitbucket.org/pypy/pypy/changeset/d21e9a6b4037/ Log: merge heads diff --git a/pypy/module/_cppyy/test/conftest.py b/pypy/module/_cppyy/test/conftest.py --- a/pypy/module/_cppyy/test/conftest.py +++ b/pypy/module/_cppyy/test/conftest.py @@ -5,7 +5,7 @@ @py.test.mark.tryfirst def pytest_runtest_setup(item): - if py.path.local.sysfind('genreflex') is None: + if not disabled and py.path.local.sysfind('genreflex') is None: import pypy.module._cppyy.capi.loadable_capi as lcapi if 'dummy' in lcapi.backend_library: # run only tests that are covered by the dummy backend and tests @@ -33,16 +33,18 @@ def pytest_ignore_collect(path, config): path = str(path) - if py.path.local.sysfind('genreflex') is None and config.option.runappdirect: - return commonprefix([path, THIS_DIR]) == THIS_DIR if disabled: - return commonprefix([path, THIS_DIR]) == THIS_DIR + if commonprefix([path, THIS_DIR]) == THIS_DIR: # workaround for bug in pytest<3.0.5 + return True disabled = None def pytest_configure(config): + global disabled if config.getoption('runappdirect') or config.getoption('direct_apptest'): - return # "can't run dummy tests in -A" + if py.path.local.sysfind('genreflex') is None: + disabled = True # can't run dummy tests in -A + return if py.path.local.sysfind('genreflex') is None: import pypy.module._cppyy.capi.loadable_capi as lcapi try: @@ -77,7 +79,6 @@ standalone=False) except CompilationError as e: if '-std=c++14' in str(e): - global disabled disabled = str(e) return raise diff --git a/pypy/module/_vmprof/conftest.py b/pypy/module/_vmprof/conftest.py --- a/pypy/module/_vmprof/conftest.py +++ b/pypy/module/_vmprof/conftest.py @@ -1,8 +1,13 @@ -import py, platform, sys +import pytest +import platform +import sys +from os.path import commonprefix, dirname -def pytest_collect_directory(path, parent): - if platform.machine() == 's390x': - py.test.skip("_vmprof tests skipped") - if sys.platform == 'win32': - py.test.skip("_vmprof tests skipped") -pytest_collect_file = pytest_collect_directory +THIS_DIR = dirname(__file__) + + at pytest.hookimpl(tryfirst=True) +def pytest_ignore_collect(path, config): + path = str(path) + if sys.platform == 'win32' or platform.machine() == 's390x': + if commonprefix([path, THIS_DIR]) == THIS_DIR: # workaround for bug in pytest<3.0.5 + return True diff --git a/pypy/testrunner_cfg.py b/pypy/testrunner_cfg.py --- a/pypy/testrunner_cfg.py +++ b/pypy/testrunner_cfg.py @@ -6,6 +6,7 @@ 'memory/test', 'jit/metainterp', 'jit/backend/arm', 'jit/backend/x86', 'jit/backend/zarch', 'module/cpyext/test', + 'jit/backend/aarch64', # python3 slowness ... 'module/_cffi_backend/test', 'module/__pypy__/test', ] diff --git a/rpython/jit/backend/aarch64/assembler.py b/rpython/jit/backend/aarch64/assembler.py --- a/rpython/jit/backend/aarch64/assembler.py +++ b/rpython/jit/backend/aarch64/assembler.py @@ -31,6 +31,7 @@ ResOpAssembler.__init__(self, cpu, translate_support_code) self.failure_recovery_code = [0, 0, 0, 0] self.wb_slowpath = [0, 0, 0, 0, 0] + self.stack_check_slowpath = 0 def assemble_loop(self, jd_id, unique_id, logger, loopname, inputargs, operations, looptoken, log): @@ -675,7 +676,7 @@ # new value of nursery_free_adr in r1 and the adr of the new object in # r0. - self.mc.B_ofs_cond(10 * 4, c.LO) # 4 for gcmap load, 5 for BL, 1 for B_ofs_cond + self.mc.B_ofs_cond(10 * 4, c.LS) # 4 for gcmap load, 5 for BL, 1 for B_ofs_cond self.mc.gen_load_int_full(r.ip1.value, rffi.cast(lltype.Signed, gcmap)) self.mc.BL(self.malloc_slowpath) @@ -698,7 +699,7 @@ self.mc.CMP_rr(r.x1.value, r.ip0.value) # - self.mc.B_ofs_cond(40, c.LO) # see calculations in malloc_cond + self.mc.B_ofs_cond(40, c.LS) # see calculations in malloc_cond self.mc.gen_load_int_full(r.ip1.value, rffi.cast(lltype.Signed, gcmap)) self.mc.BL(self.malloc_slowpath) diff --git a/rpython/jit/backend/aarch64/runner.py b/rpython/jit/backend/aarch64/runner.py --- a/rpython/jit/backend/aarch64/runner.py +++ b/rpython/jit/backend/aarch64/runner.py @@ -62,6 +62,12 @@ cast_ptr_to_int._annspecialcase_ = 'specialize:arglltype(0)' cast_ptr_to_int = staticmethod(cast_ptr_to_int) + def build_regalloc(self): + ''' for tests''' + from rpython.jit.backend.aarch64.regalloc import Regalloc + assert self.assembler is not None + return Regalloc(self.assembler) + for _i, _r in enumerate(r.all_regs): assert CPU_ARM64.all_reg_indexes[_r.value] == _i diff --git a/rpython/jit/backend/llsupport/test/test_gc_integration.py b/rpython/jit/backend/llsupport/test/test_gc_integration.py --- a/rpython/jit/backend/llsupport/test/test_gc_integration.py +++ b/rpython/jit/backend/llsupport/test/test_gc_integration.py @@ -93,6 +93,8 @@ assert nos == [0, 1, 33] elif self.cpu.backend_name.startswith('zarch'): assert nos == [0, 1, 29] + elif self.cpu.backend_name.startswith('aarch64'): + assert nos == [0, 1, 27] else: raise Exception("write the data here") assert frame.jf_frame[nos[0]] @@ -672,6 +674,8 @@ elif self.cpu.backend_name.startswith('zarch'): # 10 gpr, 14 fpr -> 25 is the first slot assert gcmap == [26, 27, 28] + elif self.cpu.backend_name.startswith('aarch64'): + assert gcmap == [24, 25, 26] elif self.cpu.IS_64_BIT: assert gcmap == [28, 29, 30] elif self.cpu.backend_name.startswith('arm'): diff --git a/rpython/jit/metainterp/test/test_float.py b/rpython/jit/metainterp/test/test_float.py --- a/rpython/jit/metainterp/test/test_float.py +++ b/rpython/jit/metainterp/test/test_float.py @@ -43,7 +43,7 @@ a = float(r_singlefloat(a)) a *= 4.25 return float(r_singlefloat(a)) - res = self.interp_operations(f, [-2.0]) + res = self.interp_operations(f, [-2.0], supports_singlefloats=True) assert res == -8.5 def test_cast_float_to_int(self): diff --git a/rpython/rlib/rvmprof/src/shared/_vmprof.c b/rpython/rlib/rvmprof/src/shared/_vmprof.c --- a/rpython/rlib/rvmprof/src/shared/_vmprof.c +++ b/rpython/rlib/rvmprof/src/shared/_vmprof.c @@ -383,8 +383,22 @@ #ifdef VMPROF_UNIX static PyObject * -insert_real_time_thread(PyObject *module, PyObject * noargs) { +insert_real_time_thread(PyObject *module, PyObject * args) { ssize_t thread_count; + unsigned long thread_id = 0; + pthread_t th = pthread_self(); + + if (!PyArg_ParseTuple(args, "|k", &thread_id)) { + return NULL; + } + + if (thread_id) { +#if SIZEOF_LONG <= SIZEOF_PTHREAD_T + th = (pthread_t) thread_id; +#else + th = (pthread_t) *(unsigned long *) &thread_id; +#endif + } if (!vmprof_is_enabled()) { PyErr_SetString(PyExc_ValueError, "vmprof is not enabled"); @@ -397,15 +411,29 @@ } vmprof_aquire_lock(); - thread_count = insert_thread(pthread_self(), -1); + thread_count = insert_thread(th, -1); vmprof_release_lock(); return PyLong_FromSsize_t(thread_count); } static PyObject * -remove_real_time_thread(PyObject *module, PyObject * noargs) { +remove_real_time_thread(PyObject *module, PyObject * args) { ssize_t thread_count; + unsigned long thread_id = 0; + pthread_t th = pthread_self(); + + if (!PyArg_ParseTuple(args, "|k", &thread_id)) { + return NULL; + } + + if (thread_id) { +#if SIZEOF_LONG <= SIZEOF_PTHREAD_T + th = (pthread_t) thread_id; +#else + th = (pthread_t) *(unsigned long *) &thread_id; +#endif + } if (!vmprof_is_enabled()) { PyErr_SetString(PyExc_ValueError, "vmprof is not enabled"); @@ -418,7 +446,7 @@ } vmprof_aquire_lock(); - thread_count = remove_thread(pthread_self(), -1); + thread_count = remove_thread(th, -1); vmprof_release_lock(); return PyLong_FromSsize_t(thread_count); @@ -445,9 +473,9 @@ #ifdef VMPROF_UNIX {"get_profile_path", vmp_get_profile_path, METH_NOARGS, "Profile path the profiler logs to."}, - {"insert_real_time_thread", insert_real_time_thread, METH_NOARGS, + {"insert_real_time_thread", insert_real_time_thread, METH_VARARGS, "Insert a thread into the real time profiling list."}, - {"remove_real_time_thread", remove_real_time_thread, METH_NOARGS, + {"remove_real_time_thread", remove_real_time_thread, METH_VARARGS, "Remove a thread from the real time profiling list."}, #endif {NULL, NULL, 0, NULL} /* Sentinel */ diff --git a/rpython/rlib/rvmprof/src/shared/vmp_stack.c b/rpython/rlib/rvmprof/src/shared/vmp_stack.c --- a/rpython/rlib/rvmprof/src/shared/vmp_stack.c +++ b/rpython/rlib/rvmprof/src/shared/vmp_stack.c @@ -280,7 +280,7 @@ // this is possible because compiler align to 8 bytes. // if (func_addr != 0x0) { - depth = _write_native_stack((void*)(((intptr_t)func_addr) | 0x1), result, depth, max_depth); + depth = _write_native_stack((void*)(((uint64_t)func_addr) | 0x1), result, depth, max_depth); } } diff --git a/rpython/rlib/rvmprof/src/shared/vmprof_unix.c b/rpython/rlib/rvmprof/src/shared/vmprof_unix.c --- a/rpython/rlib/rvmprof/src/shared/vmprof_unix.c +++ b/rpython/rlib/rvmprof/src/shared/vmprof_unix.c @@ -244,11 +244,7 @@ if (commit) { commit_buffer(fd, p); } else { -#ifndef RPYTHON_VMPROF fprintf(stderr, "WARNING: canceled buffer, no stack trace was written\n"); -#else - fprintf(stderr, "WARNING: canceled buffer, no stack trace was written\n"); -#endif cancel_buffer(p); } } diff --git a/testrunner/get_info.py b/testrunner/get_info.py --- a/testrunner/get_info.py +++ b/testrunner/get_info.py @@ -13,6 +13,7 @@ # PyPy uses bin as of PR https://github.com/pypa/virtualenv/pull/1400 TARGET_DIR = 'bin' else: + TARGET_NAME = 'pypy-c' TARGET_NAME = 'pypy3-c' TARGET_DIR = 'bin' VENV_DIR = 'pypy-venv' From pypy.commits at gmail.com Sun Aug 11 18:19:14 2019 From: pypy.commits at gmail.com (andrewjlawrence) Date: Sun, 11 Aug 2019 15:19:14 -0700 (PDT) Subject: [pypy-commit] pypy winconsoleio: Added a few more functions Message-ID: <5d509462.1c69fb81.4e043.b18a@mx.google.com> Author: andrewjlawrence Branch: winconsoleio Changeset: r97157:d892abac55ac Date: 2019-08-11 23:17 +0100 http://bitbucket.org/pypy/pypy/changeset/d892abac55ac/ Log: Added a few more functions diff --git a/pypy/module/_io/__init__.py b/pypy/module/_io/__init__.py --- a/pypy/module/_io/__init__.py +++ b/pypy/module/_io/__init__.py @@ -23,7 +23,7 @@ 'BufferedRWPair': 'interp_bufferedio.W_BufferedRWPair', 'BufferedRandom': 'interp_bufferedio.W_BufferedRandom', 'TextIOWrapper': 'interp_textio.W_TextIOWrapper', - '_WindowsConsoleIO': 'interp_win32consoleio.W_WinConsoleIO', + 'WindowsConsoleIO': 'interp_win32consoleio.W_WinConsoleIO', 'open': 'interp_io.open', 'IncrementalNewlineDecoder': 'interp_textio.W_IncrementalNewlineDecoder', diff --git a/pypy/module/_io/interp_io.py b/pypy/module/_io/interp_io.py --- a/pypy/module/_io/interp_io.py +++ b/pypy/module/_io/interp_io.py @@ -95,7 +95,8 @@ rawclass = W_FileIO if _WIN32: from pypy.module._io.interp_win32consoleio import W_WinConsoleIO, _pyio_get_console_type - if _pyio_get_console_type(space, w_file) != '\0': + type = _pyio_get_console_type(space, w_file) + if type != '\0': rawclass = W_WinConsoleIO encoding = "utf-8" diff --git a/pypy/module/_io/interp_win32consoleio.py b/pypy/module/_io/interp_win32consoleio.py --- a/pypy/module/_io/interp_win32consoleio.py +++ b/pypy/module/_io/interp_win32consoleio.py @@ -16,6 +16,10 @@ SMALLBUF = 4 +def err_closed(space): + raise oefmt(space.w_ValueError, + "I/O operation on closed file") + def _get_console_type(handle): mode = lltype.malloc(rwin32.LPDWORD.TO,0,flavor='raw') peek_count = lltype.malloc(rwin32.LPDWORD.TO,0,flavor='raw') @@ -42,7 +46,7 @@ return '\0' return _get_console_type(handle) - + return None decoded = space.fsdecode_w(w_path_or_fd) if not decoded: return '\0' @@ -54,14 +58,15 @@ m = '\0' # In CPython the _wcsicmp function is used to perform case insensitive comparison - normdecoded = unicodedata.normalize("NFKD", decoded.lower()) - if normdecoded == unicodedata.normalize("NFKD", "CONIN$".lower()): + decoded.lower() + if not rwin32.wcsicmp(decoded_wstr, "CONIN$"): m = 'r' - elif normdecoded == unicodedata.normalize("NFKD", "CONOUT$".lower()): + elif not rwin32.wcsicmp(decoded_wstr, "CONOUT$"): m = 'w' - elif normdecoded == unicodedata.normalize("NFKD", "CON".lower()): + elif not rwin32.wcsicmp(decoded_wstr, "CON"): m = 'x' + if m != '\0': return m @@ -85,15 +90,13 @@ if length >= 4 and pname_buf[3] == '\\' and \ (pname_buf[2] == '.' or pname_buf[2] == '?') and \ pname_buf[1] == '\\' and pname_buf[0] == '\\': - pname_buf += 4 - normdecoded = unicodedata.normalize("NFKD", decoded.lower()) - if normdecoded == unicodedata.normalize("NFKD", "CONIN$".lower()): - m = 'r' - elif normdecoded == unicodedata.normalize("NFKD", "CONOUT$".lower()): - m = 'w' - elif normdecoded == unicodedata.normalize("NFKD", "CON".lower()): - m = 'x' - + pname_buf += 4 + if not rwin32.wcsicmp(decoded_wstr, "CONIN$"): + m = 'r' + elif not rwin32.wcsicmp(decoded_wstr, "CONOUT$"): + m = 'w' + elif not rwin32.wcsicmp(decoded_wstr, "CON"): + m = 'x' lltype.free(pname_buf, flavor='raw') return m @@ -109,11 +112,12 @@ self.closehandle = 0 self.blksize = 0 - def _internal_close(self, space): - pass + # def _internal_close(self, space): + # pass @unwrap_spec(w_mode=WrappedDefault("r"), w_closefd=WrappedDefault(True), w_opener=WrappedDefault(None)) def descr_init(self, space, w_nameobj, w_mode, w_closefd, w_opener): + return None #self.fd = -1 #self.created = 0 name = None @@ -220,7 +224,22 @@ lltype.free(self.buf, flavor='raw') return None - + + def readable_w(self, space): + if self.handle == rwin32.INVALID_HANDLE_VALUE: + return err_closed(space) + return space.newbool(self.readable) + + def writable_w(self, space): + if self.handle == rwin32.INVALID_HANDLE_VALUE: + return err_closed(space) + return space.newbool(self.writable) + + def isatty_w(self, space): + if self.handle == rwin32.INVALID_HANDLE_VALUE: + return err_closed(space) + return space.newbool(True) + def repr_w(self, space): typename = space.type(self).name try: @@ -241,12 +260,26 @@ self.fd = rwin32.open_osfhandle(self.handle, rwin32._O_RDONLY | rwin32._O_BINARY) if self.fd < 0: return err_mode("fileno") - return space.newint(self.fd) - + + def readinto_w(self, space): + if self.handle == rwin32.INVALID_HANDLE_VALUE: + return err_closed(space) + + + def get_blksize(self,space): + return space.newint(self.blksize) + W_WinConsoleIO.typedef = TypeDef( - '_io._WinConsoleIO', W_WinConsoleIO.typedef, + '_io.WinConsoleIO', W_RawIOBase.typedef, __new__ = generic_new_descr(W_WinConsoleIO), __init__ = interp2app(W_WinConsoleIO.descr_init), __repr__ = interp2app(W_WinConsoleIO.repr_w), + + readable = interp2app(W_WinConsoleIO.readable_w), + writable = interp2app(W_WinConsoleIO.writable_w), + isatty = interp2app(W_WinConsoleIO.isatty_w), + readinto = interp2app(W_WinConsoleIO.readinto_w), + + _blksize = GetSetProperty(W_WinConsoleIO.get_blksize), ) diff --git a/rpython/rlib/rwin32.py b/rpython/rlib/rwin32.py --- a/rpython/rlib/rwin32.py +++ b/rpython/rlib/rwin32.py @@ -255,6 +255,8 @@ fd = _open_osfhandle(handle, flags) with FdValidator(fd): return fd + + wcsicmp = rffi.llexternal('_wcsicmp', [rffi.CWCHARP, rffi.CWCHARP], rffi.INT) def build_winerror_to_errno(): """Build a dictionary mapping windows error numbers to POSIX errno. From pypy.commits at gmail.com Sun Aug 11 19:59:19 2019 From: pypy.commits at gmail.com (stefanor) Date: Sun, 11 Aug 2019 16:59:19 -0700 (PDT) Subject: [pypy-commit] pypy default: On GNU/Hurd use the sysconf() method (some day, it'll have SMP...) Message-ID: <5d50abd7.1c69fb81.6c597.0fc4@mx.google.com> Author: Stefano Rivera Branch: Changeset: r97158:7fc3484b9993 Date: 2019-08-11 20:49 -0300 http://bitbucket.org/pypy/pypy/changeset/7fc3484b9993/ Log: On GNU/Hurd use the sysconf() method (some day, it'll have SMP...) diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -2542,7 +2542,7 @@ post_include_bits=['RPY_EXTERN int rpy_cpu_count(void);'] # cpu count for linux, windows and mac (+ bsds) # note that the code is copied from cpython and split up here -if sys.platform.startswith('linux'): +if sys.platform.startswith(('linux', 'gnu')): cpucount_eci = ExternalCompilationInfo(includes=["unistd.h"], separate_module_sources=[""" RPY_EXTERN int rpy_cpu_count(void) { From pypy.commits at gmail.com Mon Aug 12 03:45:12 2019 From: pypy.commits at gmail.com (arigo) Date: Mon, 12 Aug 2019 00:45:12 -0700 (PDT) Subject: [pypy-commit] pypy default: fix test_buffer for platforms which refuse misaligned accesses Message-ID: <5d511908.1c69fb81.94ca.f65b@mx.google.com> Author: Armin Rigo Branch: Changeset: r97159:8dd6a15f4357 Date: 2019-08-12 07:44 +0000 http://bitbucket.org/pypy/pypy/changeset/8dd6a15f4357/ Log: fix test_buffer for platforms which refuse misaligned accesses diff --git a/rpython/rlib/test/test_buffer.py b/rpython/rlib/test/test_buffer.py --- a/rpython/rlib/test/test_buffer.py +++ b/rpython/rlib/test/test_buffer.py @@ -125,8 +125,8 @@ class TestSubBufferTypedReadDirect(BaseTypedReadTest): def read(self, TYPE, data, offset): - buf = StringBuffer('xx' + data) - subbuf = SubBuffer(buf, 2, len(data)) + buf = StringBuffer('x' * 16 + data) + subbuf = SubBuffer(buf, 16, len(data)) return subbuf.typed_read(TYPE, offset) From pypy.commits at gmail.com Mon Aug 12 05:45:03 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 12 Aug 2019 02:45:03 -0700 (PDT) Subject: [pypy-commit] pypy default: fix tests for win32 Message-ID: <5d51351f.1c69fb81.ae618.0631@mx.google.com> Author: Matti Picus Branch: Changeset: r97160:d08393b13818 Date: 2019-08-12 11:54 +0300 http://bitbucket.org/pypy/pypy/changeset/d08393b13818/ Log: fix tests for win32 diff --git a/extra_tests/ctypes_tests/test_win32.py b/extra_tests/ctypes_tests/test_win32.py --- a/extra_tests/ctypes_tests/test_win32.py +++ b/extra_tests/ctypes_tests/test_win32.py @@ -5,7 +5,7 @@ import pytest @pytest.mark.skipif("sys.platform != 'win32'") -def test_VARIANT(self): +def test_VARIANT(): from ctypes import wintypes a = wintypes.VARIANT_BOOL() assert a.value is False diff --git a/extra_tests/test_datetime.py b/extra_tests/test_datetime.py --- a/extra_tests/test_datetime.py +++ b/extra_tests/test_datetime.py @@ -128,7 +128,7 @@ import os import time if os.name == 'nt': - skip("setting os.environ['TZ'] ineffective on windows") + pytest.skip("setting os.environ['TZ'] ineffective on windows") try: prev_tz = os.environ.get("TZ") os.environ["TZ"] = "GMT" From pypy.commits at gmail.com Mon Aug 12 06:04:47 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 12 Aug 2019 03:04:47 -0700 (PDT) Subject: [pypy-commit] pypy default: Add link to nice rpython tutorial, fix some doc build errors Message-ID: <5d5139bf.1c69fb81.ede47.4c9f@mx.google.com> Author: Matti Picus Branch: Changeset: r97161:523f22b1cafe Date: 2019-08-12 13:04 +0300 http://bitbucket.org/pypy/pypy/changeset/523f22b1cafe/ Log: Add link to nice rpython tutorial, fix some doc build errors diff --git a/pypy/doc/architecture.rst b/pypy/doc/architecture.rst --- a/pypy/doc/architecture.rst +++ b/pypy/doc/architecture.rst @@ -88,7 +88,9 @@ The RPython standard library is to be found in the ``rlib`` subdirectory. -Consult `Getting Started with RPython`_ for further reading +Consult `Getting Started with RPython`_ for further reading or `RPython By +Example`_ for another take on what can be done using RPython without writing an +interpreter over it. Translation ~~~~~~~~~~~ @@ -132,4 +134,6 @@ GC written as more RPython code. The best one we have so far is in ``rpython/memory/gc/incminimark.py``. -.. _`Getting started with RPython`: http://rpython.readthedocs.org/en/latest/getting-started.html +.. _`Getting Started with RPython`: http://rpython.readthedocs.org/en/latest/getting-started.html +.. _RPython By Example: http://mesapy.org/rpython-by-example/ + diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -47,6 +47,7 @@ even decide not to implement. .. branch: arm64 + Add a JIT backend for ARM64 (aarch64) .. branch: fix-test-vmprof-closed-file diff --git a/rpython/doc/getting-started.rst b/rpython/doc/getting-started.rst --- a/rpython/doc/getting-started.rst +++ b/rpython/doc/getting-started.rst @@ -49,12 +49,15 @@ * `How to write interpreters in RPython`_ and `part 2`_ by Andrew Brown. +* `RPython By Example`_ + .. _Fast Enough VMs in Fast Enough Time: http://tratt.net/laurie/tech_articles/articles/fast_enough_vms_in_fast_enough_time .. _How to write interpreters in RPython: http://morepypy.blogspot.com/2011/04/tutorial-writing-interpreter-with-pypy.html .. _part 2: http://morepypy.blogspot.com/2011/04/tutorial-part-2-adding-jit.html +.. _RPython By Example: http://mesapy.org/rpython-by-example/ .. _try-out-the-translator: diff --git a/rpython/doc/glossary.rst b/rpython/doc/glossary.rst --- a/rpython/doc/glossary.rst +++ b/rpython/doc/glossary.rst @@ -51,10 +51,10 @@ some operation in terms of the target :term:`type system`. ootypesystem - An :ref:`object oriented type model ` + An object oriented type model containing classes and instances. A :term:`backend` that uses this type system - is also called a high-level backend. The JVM and CLI backends - all use this typesystem. + is also called a high-level backend. The JVM and CLI backends (now removed) + used this typesystem. prebuilt constant In :term:`RPython` module globals are considered constants. Moreover, diff --git a/rpython/doc/translation.rst b/rpython/doc/translation.rst --- a/rpython/doc/translation.rst +++ b/rpython/doc/translation.rst @@ -583,9 +583,6 @@ rate; this means that the memory management implementation is critical to the performance of the PyPy interpreter. -You can choose which garbage collection strategy to use with -:config:`translation.gc`. - .. _genc: From pypy.commits at gmail.com Mon Aug 12 08:42:55 2019 From: pypy.commits at gmail.com (rlamy) Date: Mon, 12 Aug 2019 05:42:55 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix translation when both futimens() and utimensat() are defined Message-ID: <5d515ecf.1c69fb81.2f7f9.957c@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97162:ea03a1e77a76 Date: 2019-08-12 13:42 +0100 http://bitbucket.org/pypy/pypy/changeset/ea03a1e77a76/ Log: fix translation when both futimens() and utimensat() are defined diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py --- a/pypy/module/posix/interp_posix.py +++ b/pypy/module/posix/interp_posix.py @@ -1608,6 +1608,7 @@ mtime_s, mtime_ns = convert_ns(space, args_w[1]) return now, atime_s, atime_ns, mtime_s, mtime_ns + at specialize.arg(1) def do_utimens(space, func, arg, utime, *args): """Common implementation for futimens/utimensat etc.""" now, atime_s, atime_ns, mtime_s, mtime_ns = utime From pypy.commits at gmail.com Mon Aug 12 12:11:38 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 12 Aug 2019 09:11:38 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: the exception types have changed for import errors Message-ID: <5d518fba.1c69fb81.324d6.a8b4@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97163:a24307b274dc Date: 2019-08-12 19:09 +0300 http://bitbucket.org/pypy/pypy/changeset/a24307b274dc/ Log: the exception types have changed for import errors diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -461,14 +461,14 @@ print('__name__ =', __name__) from .struct import inpackage """, ns) - raises(SystemError, ns['imp']) + raises(ImportError, ns['imp']) def test_future_relative_import_error_when_in_non_package2(self): ns = {'__name__': __name__} exec("""def imp(): from .. import inpackage """, ns) - raises(SystemError, ns['imp']) + raises(ImportError, ns['imp']) def test_relative_import_with___name__(self): import sys @@ -517,12 +517,12 @@ # Check relative fails with only __package__ wrong ns = dict(__package__='foo', __name__='pkg.notarealmodule') check_absolute() # XXX check warnings - raises(SystemError, check_relative) + raises(ModuleNotFoundError, check_relative) # Check relative fails with __package__ and __name__ wrong ns = dict(__package__='foo', __name__='notarealpkg.notarealmodule') check_absolute() # XXX check warnings - raises(SystemError, check_relative) + raises(ModuleNotFoundError, check_relative) # Check relative fails when __package__ set to a non-string ns = dict(__package__=object()) From pypy.commits at gmail.com Tue Aug 13 11:51:22 2019 From: pypy.commits at gmail.com (mattip) Date: Tue, 13 Aug 2019 08:51:22 -0700 (PDT) Subject: [pypy-commit] pypy cryptograhpt-2.7: fix typo and deprecate SSLv3 Message-ID: <5d52dc7a.1c69fb81.4870c.e1fc@mx.google.com> Author: Matti Picus Branch: cryptograhpt-2.7 Changeset: r97169:7de556aeb5e8 Date: 2019-08-13 11:43 +0300 http://bitbucket.org/pypy/pypy/changeset/7de556aeb5e8/ Log: fix typo and deprecate SSLv3 diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -767,7 +767,7 @@ self.ctx = ffi.NULL if protocol == PROTOCOL_TLSv1: method = lib.TLSv1_method() - elif lib.Cryptography_HAS_TLSv1_2 and protocol == PROTOCOL_TLSv1_1: + elif lib.Cryptography_HAS_TLSv1_1 and protocol == PROTOCOL_TLSv1_1: method = lib.TLSv1_1_method() elif lib.Cryptography_HAS_TLSv1_2 and protocol == PROTOCOL_TLSv1_2 : method = lib.TLSv1_2_method() diff --git a/lib_pypy/_ssl_build.py b/lib_pypy/_ssl_build.py --- a/lib_pypy/_ssl_build.py +++ b/lib_pypy/_ssl_build.py @@ -24,6 +24,7 @@ "cmac", "conf", "crypto", + "ct", "dh", "dsa", "ec", @@ -32,11 +33,13 @@ "engine", "err", "evp", + "fips", "hmac", "nid", "objects", "ocsp", "opensslv", + "osrandom_engine", "pem", "pkcs12", "rand", @@ -50,6 +53,7 @@ "callbacks", ] + pypy_win32_extra, libraries=_get_openssl_libraries(sys.platform), + extra_compile_args=['-DOPENSSL_NO_SSL3_METHOD'], extra_link_args=extra_link_args(compiler_type()), ) From pypy.commits at gmail.com Tue Aug 13 14:01:13 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 13 Aug 2019 11:01:13 -0700 (PDT) Subject: [pypy-commit] pypy default: workaround (xxx no idea how things end up here but only on arm64-2 when running pypy) Message-ID: <5d52fae9.1c69fb81.7561d.81b9@mx.google.com> Author: Armin Rigo Branch: Changeset: r97170:f95a2bad0572 Date: 2019-08-13 18:00 +0000 http://bitbucket.org/pypy/pypy/changeset/f95a2bad0572/ Log: workaround (xxx no idea how things end up here but only on arm64-2 when running pypy) diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -5,7 +5,7 @@ from rpython.flowspace.model import Variable, Constant, mkentrymap from rpython.rtyper.lltypesystem.lltype import (Ptr, Void, Bool, Signed, Unsigned, SignedLongLong, Float, UnsignedLongLong, Char, UniChar, ContainerType, - Array, FixedSizeArray, ForwardReference, FuncType) + Array, FixedSizeArray, ForwardReference, FuncType, typeOf) from rpython.rtyper.lltypesystem.rffi import INT from rpython.rtyper.lltypesystem.llmemory import Address from rpython.translator.backendopt.ssa import SSI_to_SSA @@ -471,7 +471,10 @@ targets = [fn.value._obj.graph] except AttributeError: targets = None - return self.generic_call(fn.concretetype, self.expr(fn), + FN_TYPE = fn.concretetype + if FN_TYPE is Void: # XXX no idea how this is possible + FN_TYPE = typeOf(fn.value) + return self.generic_call(FN_TYPE, self.expr(fn), op.args[1:], op.result, targets) def OP_INDIRECT_CALL(self, op): From pypy.commits at gmail.com Tue Aug 13 16:12:35 2019 From: pypy.commits at gmail.com (mattip) Date: Tue, 13 Aug 2019 13:12:35 -0700 (PDT) Subject: [pypy-commit] pypy cryptograhpt-2.7: fix wrong function call Message-ID: <5d5319b3.1c69fb81.1c180.3d98@mx.google.com> Author: Matti Picus Branch: cryptograhpt-2.7 Changeset: r97171:7b89acff0749 Date: 2019-08-13 22:35 +0300 http://bitbucket.org/pypy/pypy/changeset/7b89acff0749/ Log: fix wrong function call diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py @@ -38,6 +38,7 @@ int BIO_should_io_special(BIO *); int BIO_should_retry(BIO *); int BIO_reset(BIO *); +long BIO_set_nbio(BIO *, long); void BIO_set_retry_read(BIO *); void BIO_clear_retry_flags(BIO *); """ diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -255,7 +255,8 @@ # timeout = _socket_timeout(sock) if sock and timeout >= 0: - lib.SSL_set_bio(ssl, lib.SSL_get_rbio(ssl), lib.SSL_get_wbio(ssl)) + lib.BIO_set_nbio(lib.SSL_get_rbio(ssl), 1) + lib.BIO_set_nbio(lib.SSL_get_wbio(ssl), 1) if socket_type == SSL_CLIENT: lib.SSL_set_connect_state(ssl) @@ -316,7 +317,8 @@ timeout = _socket_timeout(sock) if sock: nonblocking = timeout >= 0 - lib.SSL_set_bio(ssl, lib.SSL_get_rbio(ssl), lib.SSL_get_wbio(ssl)) + lib.BIO_set_nbio(lib.SSL_get_rbio(ssl), nonblocking) + lib.BIO_set_nbio(lib.SSL_get_wbio(ssl), nonblocking) # Actually negotiate SSL connection # XXX If SSL_do_handshake() returns 0, it's also a failure. @@ -384,7 +386,8 @@ timeout = _socket_timeout(sock) if sock: nonblocking = timeout >= 0 - lib.SSL_set_bio(ssl, lib.SSL_get_rbio(ssl), lib.SSL_get_wbio(ssl)) + lib.BIO_set_nbio(lib.SSL_get_rbio(ssl), nonblocking) + lib.BIO_set_nbio(lib.SSL_get_wbio(ssl), nonblocking) sockstate = _ssl_select(sock, 1, timeout) if sockstate == SOCKET_HAS_TIMED_OUT: @@ -446,7 +449,8 @@ if sock: timeout = _socket_timeout(sock) nonblocking = timeout >= 0 - lib.SSL_set_bio(ssl, lib.SSL_get_rbio(ssl), lib.SSL_get_wbio(ssl)) + lib.BIO_set_nbio(lib.SSL_get_rbio(ssl), nonblocking) + lib.BIO_set_nbio(lib.SSL_get_wbio(ssl), nonblocking) deadline = 0 timeout = _socket_timeout(sock) @@ -567,7 +571,8 @@ timeout = _socket_timeout(sock) nonblocking = timeout >= 0 if sock and timeout >= 0: - lib.SSL_set_bio(ssl, lib.SSL_get_rbio(ssl), lib.SSL_get_wbio(ssl)) + lib.BIO_set_nbio(lib.SSL_get_rbio(ssl), nonblocking) + lib.BIO_set_nbio(lib.SSL_get_wbio(ssl), nonblocking) else: timeout = 0 From pypy.commits at gmail.com Wed Aug 14 12:42:05 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 09:42:05 -0700 (PDT) Subject: [pypy-commit] pypy cryptograhpt-2.7: fix last failing test - add lower case error message stanza Message-ID: <5d5439dd.1c69fb81.47660.0e3a@mx.google.com> Author: Matti Picus Branch: cryptograhpt-2.7 Changeset: r97172:401d69215728 Date: 2019-08-14 16:53 +0300 http://bitbucket.org/pypy/pypy/changeset/401d69215728/ Log: fix last failing test - add lower case error message stanza diff --git a/lib_pypy/_cffi_ssl/_stdssl/error.py b/lib_pypy/_cffi_ssl/_stdssl/error.py --- a/lib_pypy/_cffi_ssl/_stdssl/error.py +++ b/lib_pypy/_cffi_ssl/_stdssl/error.py @@ -147,8 +147,8 @@ err_reason = lib.ERR_GET_REASON(errcode) reason_str = ERR_CODES_TO_NAMES.get((err_lib, err_reason), None) lib_str = LIB_CODES_TO_NAMES.get(err_lib, None) - if errstr is None: - errstr = _str_from_buf(lib.ERR_reason_error_string(errcode)) + # Set last part of msg to a lower-case version of reason_str + errstr = _str_from_buf(lib.ERR_reason_error_string(errcode)) msg = errstr if not errstr: msg = "unknown error" From pypy.commits at gmail.com Wed Aug 14 12:42:07 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 09:42:07 -0700 (PDT) Subject: [pypy-commit] pypy cryptograhpt-2.7: close branch to be merged Message-ID: <5d5439df.1c69fb81.f73cf.0e58@mx.google.com> Author: Matti Picus Branch: cryptograhpt-2.7 Changeset: r97173:7ddff8a0053b Date: 2019-08-14 17:00 +0300 http://bitbucket.org/pypy/pypy/changeset/7ddff8a0053b/ Log: close branch to be merged diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -55,3 +55,14 @@ .. branch: fix_darwin_list_dir_test +.. branch: apptest-file + +New mechanism for app-level testing using -D to test all apptest_*.py files + +.. branch: feature_closed_prop_to_rfile + +Add RFile.closed + +.. branch: cryptograhpt-2.7 + +Update vendored cryptography used for _ssl to 2.7 From pypy.commits at gmail.com Wed Aug 14 12:42:09 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 09:42:09 -0700 (PDT) Subject: [pypy-commit] pypy default: merge cryptograhpt-2.7 which updates vendored cryptography to 2.7 Message-ID: <5d5439e1.1c69fb81.4b02b.64b3@mx.google.com> Author: Matti Picus Branch: Changeset: r97174:5921676473ae Date: 2019-08-14 17:01 +0300 http://bitbucket.org/pypy/pypy/changeset/5921676473ae/ Log: merge cryptograhpt-2.7 which updates vendored cryptography to 2.7 diff too long, truncating to 2000 out of 25452 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -70,7 +70,9 @@ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ ^lib_pypy/_libmpdec/.+.o$ -^lib_pypy/.+.c$ +^lib_pypy/.+_cffi.c$ +^lib_pypy/_curses_cffi_check.c +^lib_pypy/_pypy_openssl.c ^lib_pypy/.+.o$ ^lib_pypy/.+.so$ ^lib_pypy/.+.pyd$ diff --git a/lib_pypy/_cffi_ssl/README.md b/lib_pypy/_cffi_ssl/README.md --- a/lib_pypy/_cffi_ssl/README.md +++ b/lib_pypy/_cffi_ssl/README.md @@ -1,21 +1,22 @@ # PyPy's SSL module -All of the CFFI code is copied from cryptography, wich patches contributed -back to cryptography. PyPy vendors it's own copy of the cffi backend thus -it renames the compiled shared object to _pypy_openssl.so (which means -that cryptography can ship their own cffi backend) +All of the CFFI code is copied from cryptography. PyPy vendors it's own copy of +the cffi backend thus it renames the compiled shared object to _pypy_openssl.so +(which means that cryptography can ship their own cffi backend) -NOTE: currently, we have the following changes: +# Modifications to cryptography 2.7 -* ``_cffi_src/openssl/callbacks.py`` to not rely on the CPython C API - (this change is now backported) - -* ``_cffi_src/utils.py`` for issue #2575 (29c9a89359e4) - -* ``_cffi_src/openssl/x509_vfy.py`` for issue #2605 (ca4d0c90f5a1) - -* ``_cffi_src/openssl/pypy_win32_extra.py`` for Win32-only functionality like ssl.enum_certificates() - +- `_cffi_src/openssl/asn1.py` : revert removal of `ASN1_TIME_print`, + `ASN1_ITEM`, `ASN1_ITEM_EXP`, `ASN1_VALUE`, `ASN1_item_d2i` +- `_cffi_src/openssl/bio.py` : revert removal of `BIO_s_file`, `BIO_read_filename` +- `_cffi_src/openssl/evp.py` : revert removal of `EVP_MD_size` +- `_cffi_src/openssl/nid.py` : revert removal of `NID_ad_OCSP`, + `NID_info_access`, `NID_ad_ca_issuers`, `NID_crl_distribution_points` +- `_cffi_src/openssl/pem.py` : revert removal of `PEM_read_bio_X509_AUX` +- `_cffi_src/openssl/x509.py` : revert removal of `X509_get_ext_by_NID`, + `i2d_X509` +- `_cffi_src/openssl/x509v3.py` : revert removal of `X509V3_EXT_get`, + `X509V3_EXT_METHOD` # Tests? @@ -25,11 +26,8 @@ Copy over all the sources into the folder `lib_pypy/_cffi_ssl/*`. Updating the cffi backend can be simply done by the following command:: - $ cp -r /src/_cffi_src/* . - -NOTE: you need to keep our version of ``_cffi_src/openssl/callbacks.py`` -for now! + $ cp -r /src/* . # Crpytography version -Copied over release version `1.7.2` +Copied over release version `2.7` diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py b/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -from _cffi_src.utils import build_ffi_for_binding - - -ffi = build_ffi_for_binding( - module_name="_commoncrypto", - module_prefix="_cffi_src.commoncrypto.", - modules=[ - "cf", - "common_digest", - "common_hmac", - "common_key_derivation", - "common_cryptor", - "common_symmetric_key_wrap", - "seccertificate", - "secimport", - "secitem", - "seckey", - "seckeychain", - "secpolicy", - "sectransform", - "sectrust", - "secure_transport", - ], - extra_link_args=[ - "-framework", "Security", "-framework", "CoreFoundation" - ], -) diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py --- a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py @@ -13,31 +13,43 @@ def _get_openssl_libraries(platform): + if os.environ.get("CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS", None): + return [] # OpenSSL goes by a different library name on different operating systems. - if platform == "darwin": - return _osx_libraries( - os.environ.get("CRYPTOGRAPHY_OSX_NO_LINK_FLAGS") + if platform == "win32" and compiler_type() == "msvc": + windows_link_legacy_openssl = os.environ.get( + "CRYPTOGRAPHY_WINDOWS_LINK_LEGACY_OPENSSL", None ) - elif platform == "win32": - if compiler_type() == "msvc": + if windows_link_legacy_openssl is None: + # Link against the 1.1.0 names + libs = ["libssl", "libcrypto"] + else: + # Link against the 1.0.2 and lower names libs = ["libeay32", "ssleay32"] - else: - libs = ["ssl", "crypto"] return libs + ["advapi32", "crypt32", "gdi32", "user32", "ws2_32"] else: + # darwin, linux, mingw all use this path # In some circumstances, the order in which these libs are # specified on the linker command-line is significant; # libssl must come before libcrypto - # (http://marc.info/?l=openssl-users&m=135361825921871) + # (https://marc.info/?l=openssl-users&m=135361825921871) return ["ssl", "crypto"] -def _osx_libraries(build_static): - # For building statically we don't want to pass the -lssl or -lcrypto flags - if build_static == "1": +def _extra_compile_args(platform): + """ + We set -Wconversion args here so that we only do Wconversion checks on the + code we're compiling and not on cffi itself (as passing -Wconversion in + CFLAGS would do). We set no error on sign conversion because some + function signatures in OpenSSL have changed from long -> unsigned long + in the past. Since that isn't a precision issue we don't care. + When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 we can + revisit this. + """ + if platform not in ["win32", "hp-ux11", "sunos5"]: + return ["-Wconversion", "-Wno-error=sign-conversion"] + else: return [] - else: - return ["ssl", "crypto"] ffi = build_ffi_for_binding( @@ -52,9 +64,9 @@ "bignum", "bio", "cmac", - "cms", "conf", "crypto", + "ct", "dh", "dsa", "ec", @@ -63,6 +75,7 @@ "engine", "err", "evp", + "fips", "hmac", "nid", "objects", @@ -82,5 +95,13 @@ "callbacks", ], libraries=_get_openssl_libraries(sys.platform), + # These args are passed here so that we only do Wconversion checks on the + # code we're compiling and not on cffi itself (as passing -Wconversion in + # CFLAGS would do). We set no error on sign convesrion because some + # function signatures in OpenSSL have changed from long -> unsigned long + # in the past. Since that isn't a precision issue we don't care. + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 we can + # revisit this. + extra_compile_args=_extra_compile_args(sys.platform), extra_link_args=extra_link_args(compiler_type()), ) diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/__init__.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/__init__.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/cf.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/cf.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/cf.py +++ /dev/null @@ -1,113 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef bool Boolean; -typedef signed long OSStatus; -typedef unsigned char UInt8; -typedef uint32_t UInt32; - -typedef const void * CFAllocatorRef; -const CFAllocatorRef kCFAllocatorDefault; -typedef ... *CFDataRef; -typedef signed long long CFIndex; -typedef ... *CFStringRef; -typedef ... *CFArrayRef; -typedef ... *CFMutableArrayRef; -typedef ... *CFBooleanRef; -typedef ... *CFErrorRef; -typedef ... *CFNumberRef; -typedef ... *CFTypeRef; -typedef ... *CFDictionaryRef; -typedef ... *CFMutableDictionaryRef; -typedef struct { - ...; -} CFDictionaryKeyCallBacks; -typedef struct { - ...; -} CFDictionaryValueCallBacks; -typedef struct { - ...; -} CFRange; -typedef struct { - ...; -} CFArrayCallBacks; - -typedef UInt32 CFStringEncoding; -enum { - kCFStringEncodingASCII = 0x0600 -}; - -enum { - kCFNumberSInt8Type = 1, - kCFNumberSInt16Type = 2, - kCFNumberSInt32Type = 3, - kCFNumberSInt64Type = 4, - kCFNumberFloat32Type = 5, - kCFNumberFloat64Type = 6, - kCFNumberCharType = 7, - kCFNumberShortType = 8, - kCFNumberIntType = 9, - kCFNumberLongType = 10, - kCFNumberLongLongType = 11, - kCFNumberFloatType = 12, - kCFNumberDoubleType = 13, - kCFNumberCFIndexType = 14, - kCFNumberNSIntegerType = 15, - kCFNumberCGFloatType = 16, - kCFNumberMaxType = 16 -}; -typedef int CFNumberType; - -const CFDictionaryKeyCallBacks kCFTypeDictionaryKeyCallBacks; -const CFDictionaryValueCallBacks kCFTypeDictionaryValueCallBacks; - -const CFArrayCallBacks kCFTypeArrayCallBacks; - -const CFBooleanRef kCFBooleanTrue; -const CFBooleanRef kCFBooleanFalse; -""" - -FUNCTIONS = """ -CFDataRef CFDataCreate(CFAllocatorRef, const UInt8 *, CFIndex); -CFStringRef CFStringCreateWithCString(CFAllocatorRef, const char *, - CFStringEncoding); -CFDictionaryRef CFDictionaryCreate(CFAllocatorRef, const void **, - const void **, CFIndex, - const CFDictionaryKeyCallBacks *, - const CFDictionaryValueCallBacks *); -CFMutableDictionaryRef CFDictionaryCreateMutable( - CFAllocatorRef, - CFIndex, - const CFDictionaryKeyCallBacks *, - const CFDictionaryValueCallBacks * -); -void CFDictionarySetValue(CFMutableDictionaryRef, const void *, const void *); -CFIndex CFArrayGetCount(CFArrayRef); -const void *CFArrayGetValueAtIndex(CFArrayRef, CFIndex); -CFIndex CFDataGetLength(CFDataRef); -void CFDataGetBytes(CFDataRef, CFRange, UInt8 *); -CFRange CFRangeMake(CFIndex, CFIndex); -void CFShow(CFTypeRef); -Boolean CFBooleanGetValue(CFBooleanRef); -CFNumberRef CFNumberCreate(CFAllocatorRef, CFNumberType, const void *); -void CFRelease(CFTypeRef); -CFTypeRef CFRetain(CFTypeRef); - -CFMutableArrayRef CFArrayCreateMutable(CFAllocatorRef, CFIndex, - const CFArrayCallBacks *); -void CFArrayAppendValue(CFMutableArrayRef, const void *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_cryptor.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_cryptor.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_cryptor.py +++ /dev/null @@ -1,99 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCAlgorithmAES128 = 0, - kCCAlgorithmDES, - kCCAlgorithm3DES, - kCCAlgorithmCAST, - kCCAlgorithmRC4, - kCCAlgorithmRC2, - kCCAlgorithmBlowfish -}; -typedef uint32_t CCAlgorithm; -enum { - kCCSuccess = 0, - kCCParamError = -4300, - kCCBufferTooSmall = -4301, - kCCMemoryFailure = -4302, - kCCAlignmentError = -4303, - kCCDecodeError = -4304, - kCCUnimplemented = -4305 -}; -typedef int32_t CCCryptorStatus; -typedef uint32_t CCOptions; -enum { - kCCEncrypt = 0, - kCCDecrypt, -}; -typedef uint32_t CCOperation; -typedef ... *CCCryptorRef; - -enum { - kCCModeOptionCTR_LE = 0x0001, - kCCModeOptionCTR_BE = 0x0002 -}; - -typedef uint32_t CCModeOptions; - -enum { - kCCModeECB = 1, - kCCModeCBC = 2, - kCCModeCFB = 3, - kCCModeCTR = 4, - kCCModeF8 = 5, - kCCModeLRW = 6, - kCCModeOFB = 7, - kCCModeXTS = 8, - kCCModeRC4 = 9, - kCCModeCFB8 = 10, - kCCModeGCM = 11 -}; -typedef uint32_t CCMode; -enum { - ccNoPadding = 0, - ccPKCS7Padding = 1, -}; -typedef uint32_t CCPadding; -""" - -FUNCTIONS = """ -CCCryptorStatus CCCryptorCreateWithMode(CCOperation, CCMode, CCAlgorithm, - CCPadding, const void *, const void *, - size_t, const void *, size_t, int, - CCModeOptions, CCCryptorRef *); -CCCryptorStatus CCCryptorCreate(CCOperation, CCAlgorithm, CCOptions, - const void *, size_t, const void *, - CCCryptorRef *); -CCCryptorStatus CCCryptorUpdate(CCCryptorRef, const void *, size_t, void *, - size_t, size_t *); -CCCryptorStatus CCCryptorFinal(CCCryptorRef, void *, size_t, size_t *); -CCCryptorStatus CCCryptorRelease(CCCryptorRef); - -CCCryptorStatus CCCryptorGCMAddIV(CCCryptorRef, const void *, size_t); -CCCryptorStatus CCCryptorGCMAddAAD(CCCryptorRef, const void *, size_t); -CCCryptorStatus CCCryptorGCMEncrypt(CCCryptorRef, const void *, size_t, - void *); -CCCryptorStatus CCCryptorGCMDecrypt(CCCryptorRef, const void *, size_t, - void *); -CCCryptorStatus CCCryptorGCMFinal(CCCryptorRef, const void *, size_t *); -CCCryptorStatus CCCryptorGCMReset(CCCryptorRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -/* Not defined in the public header */ -enum { - kCCModeGCM = 11 -}; -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_digest.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_digest.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_digest.py +++ /dev/null @@ -1,58 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef uint32_t CC_LONG; -typedef uint64_t CC_LONG64; -typedef struct CC_MD5state_st { - ...; -} CC_MD5_CTX; -typedef struct CC_SHA1state_st { - ...; -} CC_SHA1_CTX; -typedef struct CC_SHA256state_st { - ...; -} CC_SHA256_CTX; -typedef struct CC_SHA512state_st { - ...; -} CC_SHA512_CTX; -""" - -FUNCTIONS = """ -int CC_MD5_Init(CC_MD5_CTX *); -int CC_MD5_Update(CC_MD5_CTX *, const void *, CC_LONG); -int CC_MD5_Final(unsigned char *, CC_MD5_CTX *); - -int CC_SHA1_Init(CC_SHA1_CTX *); -int CC_SHA1_Update(CC_SHA1_CTX *, const void *, CC_LONG); -int CC_SHA1_Final(unsigned char *, CC_SHA1_CTX *); - -int CC_SHA224_Init(CC_SHA256_CTX *); -int CC_SHA224_Update(CC_SHA256_CTX *, const void *, CC_LONG); -int CC_SHA224_Final(unsigned char *, CC_SHA256_CTX *); - -int CC_SHA256_Init(CC_SHA256_CTX *); -int CC_SHA256_Update(CC_SHA256_CTX *, const void *, CC_LONG); -int CC_SHA256_Final(unsigned char *, CC_SHA256_CTX *); - -int CC_SHA384_Init(CC_SHA512_CTX *); -int CC_SHA384_Update(CC_SHA512_CTX *, const void *, CC_LONG); -int CC_SHA384_Final(unsigned char *, CC_SHA512_CTX *); - -int CC_SHA512_Init(CC_SHA512_CTX *); -int CC_SHA512_Update(CC_SHA512_CTX *, const void *, CC_LONG); -int CC_SHA512_Final(unsigned char *, CC_SHA512_CTX *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_hmac.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_hmac.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_hmac.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct { - ...; -} CCHmacContext; -enum { - kCCHmacAlgSHA1, - kCCHmacAlgMD5, - kCCHmacAlgSHA256, - kCCHmacAlgSHA384, - kCCHmacAlgSHA512, - kCCHmacAlgSHA224 -}; -typedef uint32_t CCHmacAlgorithm; -""" - -FUNCTIONS = """ -void CCHmacInit(CCHmacContext *, CCHmacAlgorithm, const void *, size_t); -void CCHmacUpdate(CCHmacContext *, const void *, size_t); -void CCHmacFinal(CCHmacContext *, void *); - -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_key_derivation.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_key_derivation.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_key_derivation.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCPBKDF2 = 2, -}; -typedef uint32_t CCPBKDFAlgorithm; -enum { - kCCPRFHmacAlgSHA1 = 1, - kCCPRFHmacAlgSHA224 = 2, - kCCPRFHmacAlgSHA256 = 3, - kCCPRFHmacAlgSHA384 = 4, - kCCPRFHmacAlgSHA512 = 5, -}; -typedef uint32_t CCPseudoRandomAlgorithm; -typedef unsigned int uint; -""" - -FUNCTIONS = """ -int CCKeyDerivationPBKDF(CCPBKDFAlgorithm, const char *, size_t, - const uint8_t *, size_t, CCPseudoRandomAlgorithm, - uint, uint8_t *, size_t); -uint CCCalibratePBKDF(CCPBKDFAlgorithm, size_t, size_t, - CCPseudoRandomAlgorithm, size_t, uint32_t); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_symmetric_key_wrap.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_symmetric_key_wrap.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_symmetric_key_wrap.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCWRAPAES = 1, -}; - -typedef uint32_t CCWrappingAlgorithm; -""" - -FUNCTIONS = """ -int CCSymmetricKeyWrap(CCWrappingAlgorithm, const uint8_t *, const size_t, - const uint8_t *, size_t, const uint8_t *, size_t, - uint8_t *, size_t *); -int CCSymmetricKeyUnwrap(CCWrappingAlgorithm algorithm, const uint8_t *, - const size_t, const uint8_t *, size_t, - const uint8_t *, size_t, uint8_t *, size_t *); -size_t CCSymmetricWrappedSize(CCWrappingAlgorithm, size_t); -size_t CCSymmetricUnwrappedSize(CCWrappingAlgorithm, size_t); - -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seccertificate.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seccertificate.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seccertificate.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecCertificateRef; -""" - -FUNCTIONS = """ -SecCertificateRef SecCertificateCreateWithData(CFAllocatorRef, CFDataRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secimport.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secimport.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secimport.py +++ /dev/null @@ -1,86 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecAccessRef; - -CFStringRef kSecImportExportPassphrase; -CFStringRef kSecImportExportKeychain; -CFStringRef kSecImportExportAccess; - -typedef uint32_t SecExternalItemType; -enum { - kSecItemTypeUnknown, - kSecItemTypePrivateKey, - kSecItemTypePublicKey, - kSecItemTypeSessionKey, - kSecItemTypeCertificate, - kSecItemTypeAggregate -}; - - -typedef uint32_t SecExternalFormat; -enum { - kSecFormatUnknown = 0, - kSecFormatOpenSSL, - kSecFormatSSH, - kSecFormatBSAFE, - kSecFormatRawKey, - kSecFormatWrappedPKCS8, - kSecFormatWrappedOpenSSL, - kSecFormatWrappedSSH, - kSecFormatWrappedLSH, - kSecFormatX509Cert, - kSecFormatPEMSequence, - kSecFormatPKCS7, - kSecFormatPKCS12, - kSecFormatNetscapeCertSequence, - kSecFormatSSHv2 -}; - -typedef uint32_t SecItemImportExportFlags; -enum { - kSecKeyImportOnlyOne = 0x00000001, - kSecKeySecurePassphrase = 0x00000002, - kSecKeyNoAccessControl = 0x00000004 -}; -typedef uint32_t SecKeyImportExportFlags; - -typedef struct { - /* for import and export */ - uint32_t version; - SecKeyImportExportFlags flags; - CFTypeRef passphrase; - CFStringRef alertTitle; - CFStringRef alertPrompt; - - /* for import only */ - SecAccessRef accessRef; - CFArrayRef keyUsage; - - CFArrayRef keyAttributes; -} SecItemImportExportKeyParameters; -""" - -FUNCTIONS = """ -OSStatus SecItemImport(CFDataRef, CFStringRef, SecExternalFormat *, - SecExternalItemType *, SecItemImportExportFlags, - const SecItemImportExportKeyParameters *, - SecKeychainRef, CFArrayRef *); -OSStatus SecPKCS12Import(CFDataRef, CFDictionaryRef, CFArrayRef *); -OSStatus SecItemExport(CFTypeRef, SecExternalFormat, SecItemImportExportFlags, - const SecItemImportExportKeyParameters *, CFDataRef *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secitem.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secitem.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secitem.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -const CFTypeRef kSecAttrKeyType; -const CFTypeRef kSecAttrKeySizeInBits; -const CFTypeRef kSecAttrIsPermanent; -const CFTypeRef kSecAttrKeyTypeRSA; -const CFTypeRef kSecAttrKeyTypeDSA; -const CFTypeRef kSecUseKeychain; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckey.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckey.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckey.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecKeyRef; -""" - -FUNCTIONS = """ -OSStatus SecKeyGeneratePair(CFDictionaryRef, SecKeyRef *, SecKeyRef *); -size_t SecKeyGetBlockSize(SecKeyRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckeychain.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckeychain.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckeychain.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecKeychainRef; -""" - -FUNCTIONS = """ -OSStatus SecKeychainCreate(const char *, UInt32, const void *, Boolean, - SecAccessRef, SecKeychainRef *); -OSStatus SecKeychainDelete(SecKeychainRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secpolicy.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secpolicy.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secpolicy.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecPolicyRef; -""" - -FUNCTIONS = """ -SecPolicyRef SecPolicyCreateSSL(Boolean, CFStringRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectransform.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectransform.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectransform.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -#include -#include -""" - -TYPES = """ -typedef ... *SecTransformRef; - -CFStringRef kSecImportExportPassphrase; -CFStringRef kSecImportExportKeychain; -CFStringRef kSecImportExportAccess; - -CFStringRef kSecEncryptionMode; -CFStringRef kSecEncryptKey; -CFStringRef kSecIVKey; -CFStringRef kSecModeCBCKey; -CFStringRef kSecModeCFBKey; -CFStringRef kSecModeECBKey; -CFStringRef kSecModeNoneKey; -CFStringRef kSecModeOFBKey; -CFStringRef kSecOAEPEncodingParametersAttributeName; -CFStringRef kSecPaddingKey; -CFStringRef kSecPaddingNoneKey; -CFStringRef kSecPaddingOAEPKey; -CFStringRef kSecPaddingPKCS1Key; -CFStringRef kSecPaddingPKCS5Key; -CFStringRef kSecPaddingPKCS7Key; - -const CFStringRef kSecTransformInputAttributeName; -const CFStringRef kSecTransformOutputAttributeName; -const CFStringRef kSecTransformDebugAttributeName; -const CFStringRef kSecTransformTransformName; -const CFStringRef kSecTransformAbortAttributeName; - -CFStringRef kSecInputIsAttributeName; -CFStringRef kSecInputIsPlainText; -CFStringRef kSecInputIsDigest; -CFStringRef kSecInputIsRaw; - -const CFStringRef kSecDigestTypeAttribute; -const CFStringRef kSecDigestLengthAttribute; -const CFStringRef kSecDigestMD5; -const CFStringRef kSecDigestSHA1; -const CFStringRef kSecDigestSHA2; -""" - -FUNCTIONS = """ -Boolean SecTransformSetAttribute(SecTransformRef, CFStringRef, CFTypeRef, - CFErrorRef *); -SecTransformRef SecDecryptTransformCreate(SecKeyRef, CFErrorRef *); -SecTransformRef SecEncryptTransformCreate(SecKeyRef, CFErrorRef *); -SecTransformRef SecVerifyTransformCreate(SecKeyRef, CFDataRef, CFErrorRef *); -SecTransformRef SecSignTransformCreate(SecKeyRef, CFErrorRef *) ; -CFTypeRef SecTransformExecute(SecTransformRef, CFErrorRef *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectrust.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectrust.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectrust.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecTrustRef; -typedef uint32_t SecTrustResultType; - -enum { - kSecTrustResultInvalid, - kSecTrustResultProceed, - kSecTrustResultDeny, - kSecTrustResultUnspecified, - kSecTrustResultRecoverableTrustFailure, - kSecTrustResultFatalTrustFailure, - kSecTrustResultOtherError -}; -""" - -FUNCTIONS = """ -OSStatus SecTrustEvaluate(SecTrustRef, SecTrustResultType *); -OSStatus SecTrustCopyAnchorCertificates(CFArrayRef *); -""" - -MACROS = """ -/* The first argument changed from CFArrayRef to CFTypeRef in 10.8, so this - * has to go here for compatibility. - */ -OSStatus SecTrustCreateWithCertificates(CFTypeRef, CFTypeRef, SecTrustRef *); -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secure_transport.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secure_transport.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secure_transport.py +++ /dev/null @@ -1,308 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SSLContextRef; -typedef const void *SSLConnectionRef; - -typedef enum { - kSSLSessionOptionBreakOnServerAuth, - kSSLSessionOptionBreakOnCertRequested, -} SSLSessionOption; - -typedef enum { - kNeverAuthenticate, - kAlwaysAuthenticate, - kTryAuthenticate -} SSLAuthenticate; - -typedef enum { - kSSLIdle, - kSSLHandshake, - kSSLConnected, - kSSLClosed, - kSSLAborted -} SSLSessionState; - -typedef enum { - kSSLProtocolUnknown = 0, - kSSLProtocol3 = 2, - kTLSProtocol1 = 4, - /* DEPRECATED on iOS */ - kSSLProtocol2 = 1, - kSSLProtocol3Only = 3, - kTLSProtocol1Only = 5, - kSSLProtocolAll = 6, -} SSLProtocol; - -typedef UInt32 SSLCipherSuite; -enum { - SSL_NULL_WITH_NULL_NULL = 0x0000, - SSL_RSA_WITH_NULL_MD5 = 0x0001, - SSL_RSA_WITH_NULL_SHA = 0x0002, - SSL_RSA_EXPORT_WITH_RC4_40_MD5 = 0x0003, - SSL_RSA_WITH_RC4_128_MD5 = 0x0004, - SSL_RSA_WITH_RC4_128_SHA = 0x0005, - SSL_RSA_EXPORT_WITH_RC2_CBC_40_MD5 = 0x0006, - SSL_RSA_WITH_IDEA_CBC_SHA = 0x0007, - SSL_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0008, - SSL_RSA_WITH_DES_CBC_SHA = 0x0009, - SSL_RSA_WITH_3DES_EDE_CBC_SHA = 0x000A, - SSL_DH_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x000B, - SSL_DH_DSS_WITH_DES_CBC_SHA = 0x000C, - SSL_DH_DSS_WITH_3DES_EDE_CBC_SHA = 0x000D, - SSL_DH_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x000E, - SSL_DH_RSA_WITH_DES_CBC_SHA = 0x000F, - SSL_DH_RSA_WITH_3DES_EDE_CBC_SHA = 0x0010, - SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x0011, - SSL_DHE_DSS_WITH_DES_CBC_SHA = 0x0012, - SSL_DHE_DSS_WITH_3DES_EDE_CBC_SHA = 0x0013, - SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0014, - SSL_DHE_RSA_WITH_DES_CBC_SHA = 0x0015, - SSL_DHE_RSA_WITH_3DES_EDE_CBC_SHA = 0x0016, - SSL_DH_anon_EXPORT_WITH_RC4_40_MD5 = 0x0017, - SSL_DH_anon_WITH_RC4_128_MD5 = 0x0018, - SSL_DH_anon_EXPORT_WITH_DES40_CBC_SHA = 0x0019, - SSL_DH_anon_WITH_DES_CBC_SHA = 0x001A, - SSL_DH_anon_WITH_3DES_EDE_CBC_SHA = 0x001B, - SSL_FORTEZZA_DMS_WITH_NULL_SHA = 0x001C, - SSL_FORTEZZA_DMS_WITH_FORTEZZA_CBC_SHA =0x001D, - - /* TLS addenda using AES, per RFC 3268 */ - TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F, - TLS_DH_DSS_WITH_AES_128_CBC_SHA = 0x0030, - TLS_DH_RSA_WITH_AES_128_CBC_SHA = 0x0031, - TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032, - TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033, - TLS_DH_anon_WITH_AES_128_CBC_SHA = 0x0034, - TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035, - TLS_DH_DSS_WITH_AES_256_CBC_SHA = 0x0036, - TLS_DH_RSA_WITH_AES_256_CBC_SHA = 0x0037, - TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038, - TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039, - TLS_DH_anon_WITH_AES_256_CBC_SHA = 0x003A, - - /* ECDSA addenda, RFC 4492 */ - TLS_ECDH_ECDSA_WITH_NULL_SHA = 0xC001, - TLS_ECDH_ECDSA_WITH_RC4_128_SHA = 0xC002, - TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC003, - TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA = 0xC004, - TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA = 0xC005, - TLS_ECDHE_ECDSA_WITH_NULL_SHA = 0xC006, - TLS_ECDHE_ECDSA_WITH_RC4_128_SHA = 0xC007, - TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC008, - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009, - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A, - TLS_ECDH_RSA_WITH_NULL_SHA = 0xC00B, - TLS_ECDH_RSA_WITH_RC4_128_SHA = 0xC00C, - TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA = 0xC00D, - TLS_ECDH_RSA_WITH_AES_128_CBC_SHA = 0xC00E, - TLS_ECDH_RSA_WITH_AES_256_CBC_SHA = 0xC00F, - TLS_ECDHE_RSA_WITH_NULL_SHA = 0xC010, - TLS_ECDHE_RSA_WITH_RC4_128_SHA = 0xC011, - TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA = 0xC012, - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013, - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014, - TLS_ECDH_anon_WITH_NULL_SHA = 0xC015, - TLS_ECDH_anon_WITH_RC4_128_SHA = 0xC016, - TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA = 0xC017, - TLS_ECDH_anon_WITH_AES_128_CBC_SHA = 0xC018, - TLS_ECDH_anon_WITH_AES_256_CBC_SHA = 0xC019, - - /* TLS 1.2 addenda, RFC 5246 */ - /* Initial state. */ - TLS_NULL_WITH_NULL_NULL = 0x0000, - - /* Server provided RSA certificate for key exchange. */ - TLS_RSA_WITH_NULL_MD5 = 0x0001, - TLS_RSA_WITH_NULL_SHA = 0x0002, - TLS_RSA_WITH_RC4_128_MD5 = 0x0004, - TLS_RSA_WITH_RC4_128_SHA = 0x0005, - TLS_RSA_WITH_3DES_EDE_CBC_SHA = 0x000A, - TLS_RSA_WITH_NULL_SHA256 = 0x003B, - TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C, - TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D, - - /* Server-authenticated (and optionally client-authenticated) - Diffie-Hellman. */ - TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA = 0x000D, - TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA = 0x0010, - TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA = 0x0013, - TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA = 0x0016, - TLS_DH_DSS_WITH_AES_128_CBC_SHA256 = 0x003E, - TLS_DH_RSA_WITH_AES_128_CBC_SHA256 = 0x003F, - TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040, - TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067, - TLS_DH_DSS_WITH_AES_256_CBC_SHA256 = 0x0068, - TLS_DH_RSA_WITH_AES_256_CBC_SHA256 = 0x0069, - TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A, - TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B, - - /* Completely anonymous Diffie-Hellman */ - TLS_DH_anon_WITH_RC4_128_MD5 = 0x0018, - TLS_DH_anon_WITH_3DES_EDE_CBC_SHA = 0x001B, - TLS_DH_anon_WITH_AES_128_CBC_SHA256 = 0x006C, - TLS_DH_anon_WITH_AES_256_CBC_SHA256 = 0x006D, - - /* Addenda from rfc 5288 AES Galois Counter Mode (GCM) Cipher Suites - for TLS. */ - TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C, - TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D, - TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E, - TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F, - TLS_DH_RSA_WITH_AES_128_GCM_SHA256 = 0x00A0, - TLS_DH_RSA_WITH_AES_256_GCM_SHA384 = 0x00A1, - TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2, - TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3, - TLS_DH_DSS_WITH_AES_128_GCM_SHA256 = 0x00A4, - TLS_DH_DSS_WITH_AES_256_GCM_SHA384 = 0x00A5, - TLS_DH_anon_WITH_AES_128_GCM_SHA256 = 0x00A6, - TLS_DH_anon_WITH_AES_256_GCM_SHA384 = 0x00A7, - - /* Addenda from rfc 5289 Elliptic Curve Cipher Suites with - HMAC SHA-256/384. */ - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023, - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024, - TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC025, - TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC026, - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027, - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028, - TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256 = 0xC029, - TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384 = 0xC02A, - - /* Addenda from rfc 5289 Elliptic Curve Cipher Suites with - SHA-256/384 and AES Galois Counter Mode (GCM) */ - TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B, - TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C, - TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02D, - TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02E, - TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F, - TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030, - TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256 = 0xC031, - TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384 = 0xC032, - - /* RFC 5746 - Secure Renegotiation */ - TLS_EMPTY_RENEGOTIATION_INFO_SCSV = 0x00FF, - - /* - * Tags for SSL 2 cipher kinds that are not specified - * for SSL 3. - */ - SSL_RSA_WITH_RC2_CBC_MD5 = 0xFF80, - SSL_RSA_WITH_IDEA_CBC_MD5 = 0xFF81, - SSL_RSA_WITH_DES_CBC_MD5 = 0xFF82, - SSL_RSA_WITH_3DES_EDE_CBC_MD5 = 0xFF83, - SSL_NO_SUCH_CIPHERSUITE = 0xFFFF -}; - -typedef enum { - kSSLClientCertNone, - kSSLClientCertRequested, - kSSLClientCertSent, - kSSLClientCertRejected -} SSLClientCertificateState; - -enum { - errSSLProtocol = -9800, - errSSLNegotiation = -9801, - errSSLFatalAlert = -9802, - errSSLWouldBlock = -9803, - errSSLSessionNotFound = -9804, - errSSLClosedGraceful = -9805, - errSSLClosedAbort = -9806, - errSSLXCertChainInvalid = -9807, - errSSLBadCert = -9808, - errSSLCrypto = -9809, - errSSLInternal = -9810, - errSSLModuleAttach = -9811, - errSSLUnknownRootCert = -9812, - errSSLNoRootCert = -9813, - errSSLCertExpired = -9814, - errSSLCertNotYetValid = -9815, - errSSLClosedNoNotify = -9816, - errSSLBufferOverflow = -9817, - errSSLBadCipherSuite = -9818, - errSSLPeerUnexpectedMsg = -9819, - errSSLPeerBadRecordMac = -9820, - errSSLPeerDecryptionFail = -9821, - errSSLPeerRecordOverflow = -9822, - errSSLPeerDecompressFail = -9823, - errSSLPeerHandshakeFail = -9824, - errSSLPeerBadCert = -9825, - errSSLPeerUnsupportedCert = -9826, - errSSLPeerCertRevoked = -9827, - errSSLPeerCertExpired = -9828, - errSSLPeerCertUnknown = -9829, - errSSLIllegalParam = -9830, - errSSLPeerUnknownCA = -9831, - errSSLPeerAccessDenied = -9832, - errSSLPeerDecodeError = -9833, - errSSLPeerDecryptError = -9834, - errSSLPeerExportRestriction = -9835, - errSSLPeerProtocolVersion = -9836, - errSSLPeerInsufficientSecurity = -9837, - errSSLPeerInternalError = -9838, - errSSLPeerUserCancelled = -9839, - errSSLPeerNoRenegotiation = -9840, - errSSLServerAuthCompleted = -9841, - errSSLClientCertRequested = -9842, - errSSLHostNameMismatch = -9843, - errSSLConnectionRefused = -9844, - errSSLDecryptionFail = -9845, - errSSLBadRecordMac = -9846, - errSSLRecordOverflow = -9847, - errSSLBadConfiguration = -9848, - errSSLLast = -9849 /* end of range, to be deleted */ -}; -""" - -FUNCTIONS = """ -OSStatus SSLSetConnection(SSLContextRef, SSLConnectionRef); -OSStatus SSLGetConnection(SSLContextRef, SSLConnectionRef *); -OSStatus SSLSetSessionOption(SSLContextRef, SSLSessionOption, Boolean); -OSStatus SSLSetClientSideAuthenticate(SSLContextRef, SSLAuthenticate); - -OSStatus SSLHandshake(SSLContextRef); -OSStatus SSLGetSessionState(SSLContextRef, SSLSessionState *); -OSStatus SSLGetNegotiatedProtocolVersion(SSLContextRef, SSLProtocol *); -OSStatus SSLSetPeerID(SSLContextRef, const void *, size_t); -OSStatus SSLGetPeerID(SSLContextRef, const void **, size_t *); -OSStatus SSLGetBufferedReadSize(SSLContextRef, size_t *); -OSStatus SSLRead(SSLContextRef, void *, size_t, size_t *); -OSStatus SSLWrite(SSLContextRef, const void *, size_t, size_t *); -OSStatus SSLClose(SSLContextRef); - -OSStatus SSLGetNumberSupportedCiphers(SSLContextRef, size_t *); -OSStatus SSLGetSupportedCiphers(SSLContextRef, SSLCipherSuite *, size_t *); -OSStatus SSLSetEnabledCiphers(SSLContextRef, const SSLCipherSuite *, size_t); -OSStatus SSLGetNumberEnabledCiphers(SSLContextRef, size_t *); -OSStatus SSLGetEnabledCiphers(SSLContextRef, SSLCipherSuite *, size_t *); -OSStatus SSLGetNegotiatedCipher(SSLContextRef, SSLCipherSuite *); -OSStatus SSLSetDiffieHellmanParams(SSLContextRef, const void *, size_t); -OSStatus SSLGetDiffieHellmanParams(SSLContextRef, const void **, size_t *); - -OSStatus SSLSetCertificateAuthorities(SSLContextRef, CFTypeRef, Boolean); -OSStatus SSLCopyCertificateAuthorities(SSLContextRef, CFArrayRef *); -OSStatus SSLCopyDistinguishedNames(SSLContextRef, CFArrayRef *); -OSStatus SSLSetCertificate(SSLContextRef, CFArrayRef); -OSStatus SSLGetClientCertificateState(SSLContextRef, - SSLClientCertificateState *); -OSStatus SSLCopyPeerTrust(SSLContextRef, SecTrustRef *trust); - -OSStatus SSLSetPeerDomainName(SSLContextRef, const char *, size_t); -OSStatus SSLGetPeerDomainNameLength(SSLContextRef, size_t *); -OSStatus SSLGetPeerDomainName(SSLContextRef, char *, size_t *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py @@ -9,13 +9,7 @@ """ TYPES = """ -static const int Cryptography_HAS_AES_WRAP; -static const int Cryptography_HAS_AES_CTR128_ENCRYPT; - -struct aes_key_st { - ...; -}; -typedef struct aes_key_st AES_KEY; +typedef ... AES_KEY; """ FUNCTIONS = """ @@ -28,23 +22,5 @@ const unsigned char *, unsigned int); """ -MACROS = """ -/* The ctr128_encrypt function is only useful in 1.0.0. We can use EVP for - this in 1.0.1+. */ -void AES_ctr128_encrypt(const unsigned char *, unsigned char *, - size_t, const AES_KEY *, unsigned char[], - unsigned char[], unsigned int *); +CUSTOMIZATIONS = """ """ - -CUSTOMIZATIONS = """ -static const long Cryptography_HAS_AES_WRAP = 1; -#if CRYPTOGRAPHY_OPENSSL_110_OR_GREATER && !defined(LIBRESSL_VERSION_NUMBER) -static const int Cryptography_HAS_AES_CTR128_ENCRYPT = 0; -void (*AES_ctr128_encrypt)(const unsigned char *, unsigned char *, - size_t, const AES_KEY *, - unsigned char[], unsigned char[], - unsigned int *) = NULL; -#else -static const int Cryptography_HAS_AES_CTR128_ENCRYPT = 1; -#endif -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py @@ -11,7 +11,6 @@ TYPES = """ typedef int... time_t; -typedef int ASN1_BOOLEAN; typedef ... ASN1_INTEGER; struct asn1_string_st { @@ -35,29 +34,17 @@ typedef ... ASN1_VALUE; typedef ... ASN1_ITEM_EXP; - -typedef ... ASN1_UTCTIME; +typedef ... ASN1_NULL; static const int V_ASN1_GENERALIZEDTIME; -static const int MBSTRING_FLAG; -static const int MBSTRING_ASC; -static const int MBSTRING_BMP; static const int MBSTRING_UTF8; -static const int MBSTRING_UNIV; """ FUNCTIONS = """ -ASN1_OBJECT *ASN1_OBJECT_new(void); void ASN1_OBJECT_free(ASN1_OBJECT *); -/* ASN1 OBJECT IDENTIFIER */ -ASN1_OBJECT *d2i_ASN1_OBJECT(ASN1_OBJECT **, const unsigned char **, long); - /* ASN1 STRING */ -ASN1_STRING *ASN1_STRING_new(void); -ASN1_STRING *ASN1_STRING_type_new(int); -void ASN1_STRING_free(ASN1_STRING *); unsigned char *ASN1_STRING_data(ASN1_STRING *); int ASN1_STRING_set(ASN1_STRING *, const void *, int); @@ -70,95 +57,59 @@ ASN1_IA5STRING *ASN1_IA5STRING_new(void); /* ASN1 INTEGER */ -ASN1_INTEGER *ASN1_INTEGER_new(void); void ASN1_INTEGER_free(ASN1_INTEGER *); int ASN1_INTEGER_set(ASN1_INTEGER *, long); /* ASN1 TIME */ ASN1_TIME *ASN1_TIME_new(void); void ASN1_TIME_free(ASN1_TIME *); -ASN1_GENERALIZEDTIME *ASN1_TIME_to_generalizedtime(ASN1_TIME *, - ASN1_GENERALIZEDTIME **); ASN1_TIME *ASN1_TIME_set(ASN1_TIME *, time_t); - -/* ASN1 UTCTIME */ -ASN1_UTCTIME *ASN1_UTCTIME_new(void); -void ASN1_UTCTIME_free(ASN1_UTCTIME *); -int ASN1_UTCTIME_cmp_time_t(const ASN1_UTCTIME *, time_t); -ASN1_UTCTIME *ASN1_UTCTIME_set(ASN1_UTCTIME *, time_t); +int ASN1_TIME_set_string(ASN1_TIME *, const char *); /* ASN1 GENERALIZEDTIME */ -int ASN1_GENERALIZEDTIME_set_string(ASN1_GENERALIZEDTIME *, const char *); ASN1_GENERALIZEDTIME *ASN1_GENERALIZEDTIME_set(ASN1_GENERALIZEDTIME *, time_t); void ASN1_GENERALIZEDTIME_free(ASN1_GENERALIZEDTIME *); -int i2d_ASN1_GENERALIZEDTIME(ASN1_GENERALIZEDTIME *, unsigned char **); /* ASN1 ENUMERATED */ ASN1_ENUMERATED *ASN1_ENUMERATED_new(void); void ASN1_ENUMERATED_free(ASN1_ENUMERATED *); int ASN1_ENUMERATED_set(ASN1_ENUMERATED *, long); -int i2d_ASN1_ENUMERATED(ASN1_ENUMERATED *, unsigned char **); ASN1_VALUE *ASN1_item_d2i(ASN1_VALUE **, const unsigned char **, long, const ASN1_ITEM *); int ASN1_BIT_STRING_set_bit(ASN1_BIT_STRING *, int, int); -""" - -MACROS = """ /* These became const ASN1_* in 1.1.0 */ -int i2d_ASN1_OBJECT(ASN1_OBJECT *, unsigned char **); int ASN1_STRING_type(ASN1_STRING *); int ASN1_STRING_to_UTF8(unsigned char **, ASN1_STRING *); long ASN1_ENUMERATED_get(ASN1_ENUMERATED *); int i2a_ASN1_INTEGER(BIO *, ASN1_INTEGER *); +/* This became const ASN1_TIME in 1.1.0f */ +ASN1_GENERALIZEDTIME *ASN1_TIME_to_generalizedtime(ASN1_TIME *, + ASN1_GENERALIZEDTIME **); + ASN1_UTF8STRING *ASN1_UTF8STRING_new(void); void ASN1_UTF8STRING_free(ASN1_UTF8STRING *); ASN1_BIT_STRING *ASN1_BIT_STRING_new(void); void ASN1_BIT_STRING_free(ASN1_BIT_STRING *); -int i2d_ASN1_BIT_STRING(ASN1_BIT_STRING *, unsigned char **); -int i2d_ASN1_OCTET_STRING(ASN1_OCTET_STRING *, unsigned char **); -int i2d_ASN1_INTEGER(ASN1_INTEGER *, unsigned char **); +const ASN1_ITEM *ASN1_ITEM_ptr(ASN1_ITEM_EXP *); + /* This is not a macro, but is const on some versions of OpenSSL */ int ASN1_BIT_STRING_get_bit(ASN1_BIT_STRING *, int); -ASN1_TIME *M_ASN1_TIME_dup(void *); -const ASN1_ITEM *ASN1_ITEM_ptr(ASN1_ITEM_EXP *); - -/* These aren't macros these arguments are all const X on openssl > 1.0.x */ int ASN1_TIME_print(BIO *, ASN1_TIME *); int ASN1_STRING_length(ASN1_STRING *); -ASN1_STRING *ASN1_STRING_dup(ASN1_STRING *); -int ASN1_STRING_cmp(ASN1_STRING *, ASN1_STRING *); -int ASN1_UTCTIME_print(BIO *, ASN1_UTCTIME *); - -ASN1_OCTET_STRING *ASN1_OCTET_STRING_dup(ASN1_OCTET_STRING *); -int ASN1_OCTET_STRING_cmp(ASN1_OCTET_STRING *, ASN1_OCTET_STRING *); - -ASN1_INTEGER *ASN1_INTEGER_dup(ASN1_INTEGER *); -int ASN1_INTEGER_cmp(ASN1_INTEGER *, ASN1_INTEGER *); -long ASN1_INTEGER_get(ASN1_INTEGER *); +int ASN1_STRING_set_default_mask_asc(char *); BIGNUM *ASN1_INTEGER_to_BN(ASN1_INTEGER *, BIGNUM *); ASN1_INTEGER *BN_to_ASN1_INTEGER(BIGNUM *, ASN1_INTEGER *); -/* These isn't a macro the arg is const on openssl 1.0.2+ */ -int ASN1_GENERALIZEDTIME_check(ASN1_GENERALIZEDTIME *); -int ASN1_UTCTIME_check(ASN1_UTCTIME *); - -/* Not a macro, const on openssl 1.0 */ -int ASN1_STRING_set_default_mask_asc(char *); - int i2d_ASN1_TYPE(ASN1_TYPE *, unsigned char **); ASN1_TYPE *d2i_ASN1_TYPE(ASN1_TYPE **, const unsigned char **, long); + +ASN1_NULL *ASN1_NULL_new(void); """ CUSTOMIZATIONS = """ -/* This macro is removed in 1.1.0. We re-add it if required to support - pyOpenSSL versions older than whatever resolves - https://github.com/pyca/pyopenssl/issues/431 */ -#if !defined(M_ASN1_TIME_dup) -#define M_ASN1_TIME_dup(a) (ASN1_TIME *)ASN1_STRING_dup((const ASN1_STRING *)a) -#endif """ diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py @@ -10,13 +10,21 @@ TYPES = """ typedef ... BN_CTX; +typedef ... BN_MONT_CTX; typedef ... BIGNUM; typedef int... BN_ULONG; """ FUNCTIONS = """ +#define BN_FLG_CONSTTIME ... + +void BN_set_flags(BIGNUM *, int); + BIGNUM *BN_new(void); void BN_free(BIGNUM *); +void BN_clear_free(BIGNUM *); + +int BN_rand_range(BIGNUM *, const BIGNUM *); BN_CTX *BN_CTX_new(void); void BN_CTX_free(BN_CTX *); @@ -25,17 +33,18 @@ BIGNUM *BN_CTX_get(BN_CTX *); void BN_CTX_end(BN_CTX *); -BIGNUM *BN_copy(BIGNUM *, const BIGNUM *); +BN_MONT_CTX *BN_MONT_CTX_new(void); +int BN_MONT_CTX_set(BN_MONT_CTX *, const BIGNUM *, BN_CTX *); +void BN_MONT_CTX_free(BN_MONT_CTX *); + BIGNUM *BN_dup(const BIGNUM *); int BN_set_word(BIGNUM *, BN_ULONG); -BN_ULONG BN_get_word(const BIGNUM *); const BIGNUM *BN_value_one(void); char *BN_bn2hex(const BIGNUM *); int BN_hex2bn(BIGNUM **, const char *); -int BN_dec2bn(BIGNUM **, const char *); int BN_bn2bin(const BIGNUM *, unsigned char *); BIGNUM *BN_bin2bn(const unsigned char *, int, BIGNUM *); @@ -43,11 +52,9 @@ int BN_num_bits(const BIGNUM *); int BN_cmp(const BIGNUM *, const BIGNUM *); +int BN_is_negative(const BIGNUM *); int BN_add(BIGNUM *, const BIGNUM *, const BIGNUM *); int BN_sub(BIGNUM *, const BIGNUM *, const BIGNUM *); -int BN_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_sqr(BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_div(BIGNUM *, BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); int BN_nnmod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); int BN_mod_add(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); @@ -55,33 +62,23 @@ BN_CTX *); int BN_mod_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_mod_sqr(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); int BN_mod_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_gcd(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_mod_exp_mont(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, + BN_CTX *, BN_MONT_CTX *); +int BN_mod_exp_mont_consttime(BIGNUM *, const BIGNUM *, const BIGNUM *, + const BIGNUM *, BN_CTX *, BN_MONT_CTX *); BIGNUM *BN_mod_inverse(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_set_bit(BIGNUM *, int); -int BN_clear_bit(BIGNUM *, int); - -int BN_is_bit_set(const BIGNUM *, int); - -int BN_mask_bits(BIGNUM *, int); -""" - -MACROS = """ int BN_num_bytes(const BIGNUM *); -int BN_zero(BIGNUM *); -int BN_one(BIGNUM *); int BN_mod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_lshift(BIGNUM *, const BIGNUM *, int); -int BN_lshift1(BIGNUM *, BIGNUM *); - -int BN_rshift(BIGNUM *, BIGNUM *, int); -int BN_rshift1(BIGNUM *, BIGNUM *); +/* The following 3 prime methods are exposed for Tribler. */ +int BN_generate_prime_ex(BIGNUM *, int, int, const BIGNUM *, + const BIGNUM *, BN_GENCB *); +int BN_is_prime_ex(const BIGNUM *, int, BN_CTX *, BN_GENCB *); +const int BN_prime_checks_for_size(int); """ CUSTOMIZATIONS = """ diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py @@ -9,136 +9,42 @@ """ TYPES = """ -typedef struct bio_st BIO; -typedef void bio_info_cb(BIO *, int, const char *, int, long, long); -typedef ... bio_st; +typedef ... BIO; typedef ... BIO_METHOD; -typedef ... BUF_MEM; - -static const int BIO_TYPE_MEM; -static const int BIO_TYPE_FILE; -static const int BIO_TYPE_FD; -static const int BIO_TYPE_SOCKET; -static const int BIO_TYPE_CONNECT; -static const int BIO_TYPE_ACCEPT; -static const int BIO_TYPE_NULL; -static const int BIO_CLOSE; -static const int BIO_NOCLOSE; -static const int BIO_TYPE_SOURCE_SINK; -static const int BIO_CTRL_RESET; -static const int BIO_CTRL_EOF; -static const int BIO_CTRL_SET; -static const int BIO_CTRL_SET_CLOSE; -static const int BIO_CTRL_FLUSH; -static const int BIO_CTRL_DUP; -static const int BIO_CTRL_GET_CLOSE; -static const int BIO_CTRL_INFO; -static const int BIO_CTRL_GET; -static const int BIO_CTRL_PENDING; -static const int BIO_CTRL_WPENDING; -static const int BIO_C_FILE_SEEK; -static const int BIO_C_FILE_TELL; -static const int BIO_TYPE_NONE; -static const int BIO_TYPE_NBIO_TEST; -static const int BIO_TYPE_BIO; -static const int BIO_TYPE_DESCRIPTOR; -static const int BIO_FLAGS_READ; -static const int BIO_FLAGS_WRITE; -static const int BIO_FLAGS_IO_SPECIAL; -static const int BIO_FLAGS_RWS; -static const int BIO_FLAGS_SHOULD_RETRY; -static const int BIO_TYPE_NULL_FILTER; -static const int BIO_TYPE_SSL; -static const int BIO_TYPE_MD; -static const int BIO_TYPE_BUFFER; -static const int BIO_TYPE_CIPHER; -static const int BIO_TYPE_BASE64; -static const int BIO_TYPE_FILTER; """ FUNCTIONS = """ int BIO_free(BIO *); -void BIO_vfree(BIO *); void BIO_free_all(BIO *); -BIO *BIO_push(BIO *, BIO *); -BIO *BIO_pop(BIO *); -BIO *BIO_next(BIO *); -BIO *BIO_find_type(BIO *, int); BIO *BIO_new_file(const char *, const char *); -BIO *BIO_new_fp(FILE *, int); -BIO *BIO_new_fd(int, int); -BIO *BIO_new_socket(int, int); -long BIO_ctrl(BIO *, int, long, void *); -long BIO_callback_ctrl( - BIO *, - int, - void (*)(struct bio_st *, int, const char *, int, long, long) -); -long BIO_int_ctrl(BIO *, int, long, int); +BIO *BIO_new_dgram(int, int); size_t BIO_ctrl_pending(BIO *); -size_t BIO_ctrl_wpending(BIO *); int BIO_read(BIO *, void *, int); int BIO_gets(BIO *, char *, int); int BIO_write(BIO *, const void *, int); -int BIO_puts(BIO *, const char *); -int BIO_method_type(const BIO *); -""" - -MACROS = """ /* Added in 1.1.0 */ int BIO_up_ref(BIO *); -/* These added const to BIO_METHOD in 1.1.0 */ BIO *BIO_new(BIO_METHOD *); BIO_METHOD *BIO_s_mem(void); BIO_METHOD *BIO_s_file(void); -BIO_METHOD *BIO_s_fd(void); -BIO_METHOD *BIO_s_socket(void); -BIO_METHOD *BIO_s_null(void); -BIO_METHOD *BIO_f_null(void); -BIO_METHOD *BIO_f_buffer(void); -/* BIO_new_mem_buf became const void * in 1.0.2g */ -BIO *BIO_new_mem_buf(void *, int); -long BIO_set_fd(BIO *, long, int); -long BIO_get_fd(BIO *, char *); +BIO_METHOD *BIO_s_datagram(void); +BIO *BIO_new_mem_buf(const void *, int); long BIO_set_mem_eof_return(BIO *, int); long BIO_get_mem_data(BIO *, char **); -long BIO_set_mem_buf(BIO *, BUF_MEM *, int); -long BIO_get_mem_ptr(BIO *, BUF_MEM **); -long BIO_set_fp(BIO *, FILE *, int); -long BIO_get_fp(BIO *, FILE **); long BIO_read_filename(BIO *, char *); -long BIO_write_filename(BIO *, char *); -long BIO_append_filename(BIO *, char *); -long BIO_rw_filename(BIO *, char *); int BIO_should_read(BIO *); int BIO_should_write(BIO *); int BIO_should_io_special(BIO *); -int BIO_retry_type(BIO *); int BIO_should_retry(BIO *); int BIO_reset(BIO *); -int BIO_seek(BIO *, int); -int BIO_tell(BIO *); -int BIO_flush(BIO *); -int BIO_eof(BIO *); -int BIO_set_close(BIO *,long); -int BIO_get_close(BIO *); -int BIO_pending(BIO *); -int BIO_wpending(BIO *); -int BIO_get_info_callback(BIO *, bio_info_cb **); -int BIO_set_info_callback(BIO *, bio_info_cb *); -long BIO_get_buffer_num_lines(BIO *); -long BIO_set_read_buffer_size(BIO *, long); -long BIO_set_write_buffer_size(BIO *, long); -long BIO_set_buffer_size(BIO *, long); -long BIO_set_buffer_read_data(BIO *, void *, long); long BIO_set_nbio(BIO *, long); void BIO_set_retry_read(BIO *); void BIO_clear_retry_flags(BIO *); """ CUSTOMIZATIONS = """ -#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110PRE4 || defined(LIBRESSL_VERSION_NUMBER) +#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 && !CRYPTOGRAPHY_LIBRESSL_27_OR_GREATER int BIO_up_ref(BIO *b) { CRYPTO_add(&b->references, 1, CRYPTO_LOCK_BIO); return 1; diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py @@ -4,108 +4,85 @@ from __future__ import absolute_import, division, print_function -import sys - -import cffi - INCLUDES = """ #include #include #include #include + +#ifdef _WIN32 +#define WIN32_LEAN_AND_MEAN +#include +#include +#include +#else +#include +#include +#include +#endif """ TYPES = """ -static const long Cryptography_STATIC_CALLBACKS; - -/* crypto.h - * CRYPTO_set_locking_callback - * void (*cb)(int mode, int type, const char *file, int line) - */ -extern "Python" void Cryptography_locking_cb(int, int, const char *, int); - -/* pem.h - * int pem_password_cb(char *buf, int size, int rwflag, void *userdata); - */ -extern "Python" int Cryptography_pem_password_cb(char *, int, int, void *); - -/* rand.h - * int (*bytes)(unsigned char *buf, int num); - * int (*status)(void); - */ -extern "Python" int Cryptography_rand_bytes(unsigned char *, int); -extern "Python" int Cryptography_rand_status(void); +typedef struct { + char *password; + int length; + int called; + int error; + int maxsize; +} CRYPTOGRAPHY_PASSWORD_DATA; """ FUNCTIONS = """ -int _setup_ssl_threads(void); -""" - -MACROS = """ +int Cryptography_setup_ssl_threads(void); +int Cryptography_pem_password_cb(char *, int, int, void *); """ CUSTOMIZATIONS = """ -static const long Cryptography_STATIC_CALLBACKS = 1; -""" - -if cffi.__version_info__ < (1, 4, 0) or sys.version_info >= (3, 5): - # backwards compatibility for old cffi version on PyPy - # and Python >=3.5 (https://github.com/pyca/cryptography/issues/2970) - TYPES = "static const long Cryptography_STATIC_CALLBACKS;" - CUSTOMIZATIONS = """static const long Cryptography_STATIC_CALLBACKS = 0; -""" - -CUSTOMIZATIONS += """ /* This code is derived from the locking code found in the Python _ssl module's locking callback for OpenSSL. Copyright 2001-2016 Python Software Foundation; All Rights Reserved. + + It has been subsequently modified to use cross platform locking without + using CPython APIs by Armin Rigo of the PyPy project. */ +#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 #ifdef _WIN32 -#ifdef _MSC_VER -#ifdef inline -#undef inline -#endif -#define inline __inline -#endif -#include -typedef CRITICAL_SECTION mutex1_t; -static inline void mutex1_init(mutex1_t *mutex) { +typedef CRITICAL_SECTION Cryptography_mutex; +static __inline void cryptography_mutex_init(Cryptography_mutex *mutex) { InitializeCriticalSection(mutex); } -static inline void mutex1_lock(mutex1_t *mutex) { +static __inline void cryptography_mutex_lock(Cryptography_mutex *mutex) { EnterCriticalSection(mutex); } -static inline void mutex1_unlock(mutex1_t *mutex) { +static __inline void cryptography_mutex_unlock(Cryptography_mutex *mutex) { LeaveCriticalSection(mutex); } #else -#include -#include -#include -typedef pthread_mutex_t mutex1_t; -#define ASSERT_STATUS(call) \ - if (call != 0) { \ - perror("Fatal error in _cffi_ssl: " #call); \ - abort(); \ +typedef pthread_mutex_t Cryptography_mutex; +#define ASSERT_STATUS(call) \ + if ((call) != 0) { \ + perror("Fatal error in callback initialization: " #call); \ + abort(); \ } -static inline void mutex1_init(mutex1_t *mutex) { +static inline void cryptography_mutex_init(Cryptography_mutex *mutex) { #if !defined(pthread_mutexattr_default) # define pthread_mutexattr_default ((pthread_mutexattr_t *)NULL) #endif ASSERT_STATUS(pthread_mutex_init(mutex, pthread_mutexattr_default)); } -static inline void mutex1_lock(mutex1_t *mutex) { +static inline void cryptography_mutex_lock(Cryptography_mutex *mutex) { ASSERT_STATUS(pthread_mutex_lock(mutex)); } -static inline void mutex1_unlock(mutex1_t *mutex) { +static inline void cryptography_mutex_unlock(Cryptography_mutex *mutex) { ASSERT_STATUS(pthread_mutex_unlock(mutex)); } #endif + static unsigned int _ssl_locks_count = 0; -static mutex1_t *_ssl_locks = NULL; +static Cryptography_mutex *_ssl_locks = NULL; static void _ssl_thread_locking_function(int mode, int n, const char *file, int line) { @@ -129,24 +106,24 @@ } if (mode & CRYPTO_LOCK) { - mutex1_lock(_ssl_locks + n); + cryptography_mutex_lock(_ssl_locks + n); } else { - mutex1_unlock(_ssl_locks + n); + cryptography_mutex_unlock(_ssl_locks + n); } } -static void init_mutexes(void) -{ +static void init_mutexes(void) { int i; - for (i = 0; i < _ssl_locks_count; i++) { - mutex1_init(_ssl_locks + i); + for (i = 0; i < _ssl_locks_count; i++) { + cryptography_mutex_init(_ssl_locks + i); } } -int _setup_ssl_threads(void) { + +int Cryptography_setup_ssl_threads(void) { if (_ssl_locks == NULL) { _ssl_locks_count = CRYPTO_num_locks(); - _ssl_locks = malloc(sizeof(mutex1_t) * _ssl_locks_count); + _ssl_locks = calloc(_ssl_locks_count, sizeof(Cryptography_mutex)); if (_ssl_locks == NULL) { return 0; } @@ -158,4 +135,34 @@ } return 1; } +#else +int (*Cryptography_setup_ssl_threads)(void) = NULL; +#endif + +typedef struct { + char *password; + int length; + int called; + int error; + int maxsize; +} CRYPTOGRAPHY_PASSWORD_DATA; + +int Cryptography_pem_password_cb(char *buf, int size, + int rwflag, void *userdata) { + /* The password cb is only invoked if OpenSSL decides the private + key is encrypted. So this path only occurs if it needs a password */ + CRYPTOGRAPHY_PASSWORD_DATA *st = (CRYPTOGRAPHY_PASSWORD_DATA *)userdata; + st->called += 1; + st->maxsize = size; + if (st->length == 0) { + st->error = -1; + return 0; + } else if (st->length < size) { + memcpy(buf, st->password, st->length); + return st->length; + } else { + st->error = -2; + return 0; + } +} """ diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py @@ -5,20 +5,16 @@ from __future__ import absolute_import, division, print_function INCLUDES = """ -#if !defined(OPENSSL_NO_CMAC) && CRYPTOGRAPHY_OPENSSL_101_OR_GREATER +#if !defined(OPENSSL_NO_CMAC) #include #endif """ TYPES = """ -static const int Cryptography_HAS_CMAC; typedef ... CMAC_CTX; """ FUNCTIONS = """ -""" - -MACROS = """ CMAC_CTX *CMAC_CTX_new(void); int CMAC_Init(CMAC_CTX *, const void *, size_t, const EVP_CIPHER *, ENGINE *); int CMAC_Update(CMAC_CTX *, const void *, size_t); @@ -28,17 +24,4 @@ """ CUSTOMIZATIONS = """ -#if !defined(OPENSSL_NO_CMAC) && CRYPTOGRAPHY_OPENSSL_101_OR_GREATER -static const long Cryptography_HAS_CMAC = 1; -#else -static const long Cryptography_HAS_CMAC = 0; -typedef void CMAC_CTX; -CMAC_CTX *(*CMAC_CTX_new)(void) = NULL; -int (*CMAC_Init)(CMAC_CTX *, const void *, size_t, const EVP_CIPHER *, - ENGINE *) = NULL; -int (*CMAC_Update)(CMAC_CTX *, const void *, size_t) = NULL; -int (*CMAC_Final)(CMAC_CTX *, unsigned char *, size_t *) = NULL; -int (*CMAC_CTX_copy)(CMAC_CTX *, const CMAC_CTX *) = NULL; -void (*CMAC_CTX_free)(CMAC_CTX *) = NULL; -#endif """ diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/cms.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/cms.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/cms.py +++ /dev/null @@ -1,116 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ From pypy.commits at gmail.com Wed Aug 14 12:42:14 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 09:42:14 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: tweak test, add missing get_cipher Message-ID: <5d5439e6.1c69fb81.d5e3e.2ea3@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97176:c9b561590324 Date: 2019-08-14 19:35 +0300 http://bitbucket.org/pypy/pypy/changeset/c9b561590324/ Log: tweak test, add missing get_cipher diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py --- a/lib-python/3/test/test_ssl.py +++ b/lib-python/3/test/test_ssl.py @@ -1814,7 +1814,8 @@ sslobj = ctx.wrap_bio(incoming, outgoing, False, 'localhost') self.assertIs(sslobj._sslobj.owner, sslobj) self.assertIsNone(sslobj.cipher()) - self.assertIsNone(sslobj.version()) + # cypthon implementation detail + # self.assertIsNone(sslobj.version()) self.assertIsNotNone(sslobj.shared_ciphers()) self.assertRaises(ValueError, sslobj.getpeercert) if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES: @@ -2843,6 +2844,10 @@ else: s.close() + def test_socketserver_urlib_uses_bisect(self): + b = urllib.request.bisect + raise ValueError('urllib.request.bisect is %s' % str(b)) + def test_socketserver(self): """Using socketserver to create and manage SSL connections.""" server = make_https_server(self, certfile=CERTFILE) @@ -3209,7 +3214,7 @@ server_hostname="localhost") as s: with self.assertRaises(OSError): s.connect((HOST, server.port)) - self.assertEqual("NO_SHARED_CIPHER", server.conn_errors[0].reason) + self.assertIn("no shared cipher", server.conn_errors[0]) def test_version_basic(self): """ diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -162,7 +162,7 @@ ffi.memmove(buf, password, len(password)) return len(password) -if lib.Cryptography_STATIC_CALLBACKS: +if 0: ffi.def_extern(_Cryptography_pem_password_cb) Cryptography_pem_password_cb = lib.Cryptography_pem_password_cb else: @@ -749,6 +749,51 @@ bits = lib.SSL_CIPHER_get_bits(cipher, ffi.NULL) return (cipher_name, cipher_protocol, bits) +def cipher_to_dict(cipher): + ccipher_name = lib.SSL_CIPHER_get_name(cipher) + buf = ffi.new('char[512]') + alg_bits = ffi.new('int[4]') + if ccipher_name == ffi.NULL: + cipher_name = None + else: + cipher_name = _str_from_buf(ccipher_name) + + ccipher_protocol = lib.SSL_CIPHER_get_version(cipher) + if ccipher_protocol == ffi.NULL: + cipher_protocol = None + else: + cipher_protocol = _str_from_buf(ccipher_protocol) + + cipher_id = lib.SSL_CIPHER_get_id(cipher); + lib.SSL_CIPHER_description(cipher, buf, 511) + description = _str_from_buf(buf) + strength_bits = lib.SSL_CIPHER_get_bits(cipher, alg_bits) + ret = { + 'id' : cipher_id, + 'name' : cipher_name, + 'protocol' : cipher_protocol, + 'description' : description, + 'strength_bits': strength_bits, + 'alg_bits' : alg_bits[0], + } + if OPENSSL_VERSION_INFO > (1, 1, 0, 0, 0): + aead = lib.SSL_CIPHER_is_aead(cipher) + nid = lib.SSL_CIPHER_get_cipher_nid(cipher) + skcipher = OBJ_nid2ln(nid) if nid != NID_undef else None + nid = lib.SSL_CIPHER_get_digest_nid(cipher); + digest = OBJ_nid2ln(nid) if nid != NID_undef else None + nid = lib.SSL_CIPHER_get_kx_nid(cipher); + kx = OBJ_nid2ln(nid) if nid != NID_undef else None + nid = SSL_CIPHER_get_auth_nid(cipher); + auth = OBJ_nid2ln(nid) if nid != NID_undef else None + ret.update({'aead' : bool(aead), + 'symmmetric' : skcipher, + 'digest' : digest, + 'kea' : kx, + 'auth' : auth, + }) + return ret + class SSLSession(object): def __new__(cls, ssl): @@ -974,6 +1019,20 @@ lib.ERR_clear_error() raise ssl_error("No cipher can be selected.") + def get_ciphers(self): + ssl = lib.SSL_new(self.ctx) + try: + ciphers = lib.SSL_get_ciphers(ssl) + if ciphers == ffi.NULL: + return None + count = lib.sk_SSL_CIPHER_num(ciphers) + res = [None] * count + for i in range(count): + dct = cipher_to_dict(lib.sk_SSL_CIPHER_value(ciphers, i)) + res[i] = dct + return res + finally: + lib.SSL_free(ssl) def load_cert_chain(self, certfile, keyfile=None, password=None): if keyfile is None: From pypy.commits at gmail.com Wed Aug 14 12:42:12 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 09:42:12 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default into py3.6 Message-ID: <5d5439e4.1c69fb81.940fc.080e@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97175:168ba5471494 Date: 2019-08-14 17:02 +0300 http://bitbucket.org/pypy/pypy/changeset/168ba5471494/ Log: merge default into py3.6 diff too long, truncating to 2000 out of 25605 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,7 +75,9 @@ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ ^lib_pypy/_libmpdec/.+.o$ -^lib_pypy/.+.c$ +^lib_pypy/.+_cffi.c$ +^lib_pypy/_curses_cffi_check.c +^lib_pypy/_pypy_openssl.c ^lib_pypy/.+.o$ ^lib_pypy/.+.so$ ^lib_pypy/.+.pyd$ diff --git a/extra_tests/ctypes_tests/test_win32.py b/extra_tests/ctypes_tests/test_win32.py --- a/extra_tests/ctypes_tests/test_win32.py +++ b/extra_tests/ctypes_tests/test_win32.py @@ -5,7 +5,7 @@ import pytest @pytest.mark.skipif("sys.platform != 'win32'") -def test_VARIANT(self): +def test_VARIANT(): from ctypes import wintypes a = wintypes.VARIANT_BOOL() assert a.value is False diff --git a/extra_tests/test_datetime.py b/extra_tests/test_datetime.py --- a/extra_tests/test_datetime.py +++ b/extra_tests/test_datetime.py @@ -130,7 +130,7 @@ import os import time if os.name == 'nt': - skip("setting os.environ['TZ'] ineffective on windows") + pytest.skip("setting os.environ['TZ'] ineffective on windows") try: prev_tz = os.environ.get("TZ") os.environ["TZ"] = "GMT" diff --git a/lib_pypy/_cffi_ssl/README.md b/lib_pypy/_cffi_ssl/README.md --- a/lib_pypy/_cffi_ssl/README.md +++ b/lib_pypy/_cffi_ssl/README.md @@ -1,21 +1,22 @@ # PyPy's SSL module -All of the CFFI code is copied from cryptography, wich patches contributed -back to cryptography. PyPy vendors it's own copy of the cffi backend thus -it renames the compiled shared object to _pypy_openssl.so (which means -that cryptography can ship their own cffi backend) +All of the CFFI code is copied from cryptography. PyPy vendors it's own copy of +the cffi backend thus it renames the compiled shared object to _pypy_openssl.so +(which means that cryptography can ship their own cffi backend) -NOTE: currently, we have the following changes: +# Modifications to cryptography 2.7 -* ``_cffi_src/openssl/callbacks.py`` to not rely on the CPython C API - (this change is now backported) - -* ``_cffi_src/utils.py`` for issue #2575 (29c9a89359e4) - -* ``_cffi_src/openssl/x509_vfy.py`` for issue #2605 (ca4d0c90f5a1) - -* ``_cffi_src/openssl/pypy_win32_extra.py`` for Win32-only functionality like ssl.enum_certificates() - +- `_cffi_src/openssl/asn1.py` : revert removal of `ASN1_TIME_print`, + `ASN1_ITEM`, `ASN1_ITEM_EXP`, `ASN1_VALUE`, `ASN1_item_d2i` +- `_cffi_src/openssl/bio.py` : revert removal of `BIO_s_file`, `BIO_read_filename` +- `_cffi_src/openssl/evp.py` : revert removal of `EVP_MD_size` +- `_cffi_src/openssl/nid.py` : revert removal of `NID_ad_OCSP`, + `NID_info_access`, `NID_ad_ca_issuers`, `NID_crl_distribution_points` +- `_cffi_src/openssl/pem.py` : revert removal of `PEM_read_bio_X509_AUX` +- `_cffi_src/openssl/x509.py` : revert removal of `X509_get_ext_by_NID`, + `i2d_X509` +- `_cffi_src/openssl/x509v3.py` : revert removal of `X509V3_EXT_get`, + `X509V3_EXT_METHOD` # Tests? @@ -25,11 +26,8 @@ Copy over all the sources into the folder `lib_pypy/_cffi_ssl/*`. Updating the cffi backend can be simply done by the following command:: - $ cp -r /src/_cffi_src/* . - -NOTE: you need to keep our version of ``_cffi_src/openssl/callbacks.py`` -for now! + $ cp -r /src/* . # Crpytography version -Copied over release version `1.7.2` +Copied over release version `2.7` diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py b/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -from _cffi_src.utils import build_ffi_for_binding - - -ffi = build_ffi_for_binding( - module_name="_commoncrypto", - module_prefix="_cffi_src.commoncrypto.", - modules=[ - "cf", - "common_digest", - "common_hmac", - "common_key_derivation", - "common_cryptor", - "common_symmetric_key_wrap", - "seccertificate", - "secimport", - "secitem", - "seckey", - "seckeychain", - "secpolicy", - "sectransform", - "sectrust", - "secure_transport", - ], - extra_link_args=[ - "-framework", "Security", "-framework", "CoreFoundation" - ], -) diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py --- a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py @@ -13,31 +13,43 @@ def _get_openssl_libraries(platform): + if os.environ.get("CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS", None): + return [] # OpenSSL goes by a different library name on different operating systems. - if platform == "darwin": - return _osx_libraries( - os.environ.get("CRYPTOGRAPHY_OSX_NO_LINK_FLAGS") + if platform == "win32" and compiler_type() == "msvc": + windows_link_legacy_openssl = os.environ.get( + "CRYPTOGRAPHY_WINDOWS_LINK_LEGACY_OPENSSL", None ) - elif platform == "win32": - if compiler_type() == "msvc": + if windows_link_legacy_openssl is None: + # Link against the 1.1.0 names + libs = ["libssl", "libcrypto"] + else: + # Link against the 1.0.2 and lower names libs = ["libeay32", "ssleay32"] - else: - libs = ["ssl", "crypto"] return libs + ["advapi32", "crypt32", "gdi32", "user32", "ws2_32"] else: + # darwin, linux, mingw all use this path # In some circumstances, the order in which these libs are # specified on the linker command-line is significant; # libssl must come before libcrypto - # (http://marc.info/?l=openssl-users&m=135361825921871) + # (https://marc.info/?l=openssl-users&m=135361825921871) return ["ssl", "crypto"] -def _osx_libraries(build_static): - # For building statically we don't want to pass the -lssl or -lcrypto flags - if build_static == "1": +def _extra_compile_args(platform): + """ + We set -Wconversion args here so that we only do Wconversion checks on the + code we're compiling and not on cffi itself (as passing -Wconversion in + CFLAGS would do). We set no error on sign conversion because some + function signatures in OpenSSL have changed from long -> unsigned long + in the past. Since that isn't a precision issue we don't care. + When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 we can + revisit this. + """ + if platform not in ["win32", "hp-ux11", "sunos5"]: + return ["-Wconversion", "-Wno-error=sign-conversion"] + else: return [] - else: - return ["ssl", "crypto"] ffi = build_ffi_for_binding( @@ -52,9 +64,9 @@ "bignum", "bio", "cmac", - "cms", "conf", "crypto", + "ct", "dh", "dsa", "ec", @@ -63,6 +75,7 @@ "engine", "err", "evp", + "fips", "hmac", "nid", "objects", @@ -82,5 +95,13 @@ "callbacks", ], libraries=_get_openssl_libraries(sys.platform), + # These args are passed here so that we only do Wconversion checks on the + # code we're compiling and not on cffi itself (as passing -Wconversion in + # CFLAGS would do). We set no error on sign convesrion because some + # function signatures in OpenSSL have changed from long -> unsigned long + # in the past. Since that isn't a precision issue we don't care. + # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 we can + # revisit this. + extra_compile_args=_extra_compile_args(sys.platform), extra_link_args=extra_link_args(compiler_type()), ) diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/__init__.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/__init__.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/cf.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/cf.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/cf.py +++ /dev/null @@ -1,113 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef bool Boolean; -typedef signed long OSStatus; -typedef unsigned char UInt8; -typedef uint32_t UInt32; - -typedef const void * CFAllocatorRef; -const CFAllocatorRef kCFAllocatorDefault; -typedef ... *CFDataRef; -typedef signed long long CFIndex; -typedef ... *CFStringRef; -typedef ... *CFArrayRef; -typedef ... *CFMutableArrayRef; -typedef ... *CFBooleanRef; -typedef ... *CFErrorRef; -typedef ... *CFNumberRef; -typedef ... *CFTypeRef; -typedef ... *CFDictionaryRef; -typedef ... *CFMutableDictionaryRef; -typedef struct { - ...; -} CFDictionaryKeyCallBacks; -typedef struct { - ...; -} CFDictionaryValueCallBacks; -typedef struct { - ...; -} CFRange; -typedef struct { - ...; -} CFArrayCallBacks; - -typedef UInt32 CFStringEncoding; -enum { - kCFStringEncodingASCII = 0x0600 -}; - -enum { - kCFNumberSInt8Type = 1, - kCFNumberSInt16Type = 2, - kCFNumberSInt32Type = 3, - kCFNumberSInt64Type = 4, - kCFNumberFloat32Type = 5, - kCFNumberFloat64Type = 6, - kCFNumberCharType = 7, - kCFNumberShortType = 8, - kCFNumberIntType = 9, - kCFNumberLongType = 10, - kCFNumberLongLongType = 11, - kCFNumberFloatType = 12, - kCFNumberDoubleType = 13, - kCFNumberCFIndexType = 14, - kCFNumberNSIntegerType = 15, - kCFNumberCGFloatType = 16, - kCFNumberMaxType = 16 -}; -typedef int CFNumberType; - -const CFDictionaryKeyCallBacks kCFTypeDictionaryKeyCallBacks; -const CFDictionaryValueCallBacks kCFTypeDictionaryValueCallBacks; - -const CFArrayCallBacks kCFTypeArrayCallBacks; - -const CFBooleanRef kCFBooleanTrue; -const CFBooleanRef kCFBooleanFalse; -""" - -FUNCTIONS = """ -CFDataRef CFDataCreate(CFAllocatorRef, const UInt8 *, CFIndex); -CFStringRef CFStringCreateWithCString(CFAllocatorRef, const char *, - CFStringEncoding); -CFDictionaryRef CFDictionaryCreate(CFAllocatorRef, const void **, - const void **, CFIndex, - const CFDictionaryKeyCallBacks *, - const CFDictionaryValueCallBacks *); -CFMutableDictionaryRef CFDictionaryCreateMutable( - CFAllocatorRef, - CFIndex, - const CFDictionaryKeyCallBacks *, - const CFDictionaryValueCallBacks * -); -void CFDictionarySetValue(CFMutableDictionaryRef, const void *, const void *); -CFIndex CFArrayGetCount(CFArrayRef); -const void *CFArrayGetValueAtIndex(CFArrayRef, CFIndex); -CFIndex CFDataGetLength(CFDataRef); -void CFDataGetBytes(CFDataRef, CFRange, UInt8 *); -CFRange CFRangeMake(CFIndex, CFIndex); -void CFShow(CFTypeRef); -Boolean CFBooleanGetValue(CFBooleanRef); -CFNumberRef CFNumberCreate(CFAllocatorRef, CFNumberType, const void *); -void CFRelease(CFTypeRef); -CFTypeRef CFRetain(CFTypeRef); - -CFMutableArrayRef CFArrayCreateMutable(CFAllocatorRef, CFIndex, - const CFArrayCallBacks *); -void CFArrayAppendValue(CFMutableArrayRef, const void *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_cryptor.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_cryptor.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_cryptor.py +++ /dev/null @@ -1,99 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCAlgorithmAES128 = 0, - kCCAlgorithmDES, - kCCAlgorithm3DES, - kCCAlgorithmCAST, - kCCAlgorithmRC4, - kCCAlgorithmRC2, - kCCAlgorithmBlowfish -}; -typedef uint32_t CCAlgorithm; -enum { - kCCSuccess = 0, - kCCParamError = -4300, - kCCBufferTooSmall = -4301, - kCCMemoryFailure = -4302, - kCCAlignmentError = -4303, - kCCDecodeError = -4304, - kCCUnimplemented = -4305 -}; -typedef int32_t CCCryptorStatus; -typedef uint32_t CCOptions; -enum { - kCCEncrypt = 0, - kCCDecrypt, -}; -typedef uint32_t CCOperation; -typedef ... *CCCryptorRef; - -enum { - kCCModeOptionCTR_LE = 0x0001, - kCCModeOptionCTR_BE = 0x0002 -}; - -typedef uint32_t CCModeOptions; - -enum { - kCCModeECB = 1, - kCCModeCBC = 2, - kCCModeCFB = 3, - kCCModeCTR = 4, - kCCModeF8 = 5, - kCCModeLRW = 6, - kCCModeOFB = 7, - kCCModeXTS = 8, - kCCModeRC4 = 9, - kCCModeCFB8 = 10, - kCCModeGCM = 11 -}; -typedef uint32_t CCMode; -enum { - ccNoPadding = 0, - ccPKCS7Padding = 1, -}; -typedef uint32_t CCPadding; -""" - -FUNCTIONS = """ -CCCryptorStatus CCCryptorCreateWithMode(CCOperation, CCMode, CCAlgorithm, - CCPadding, const void *, const void *, - size_t, const void *, size_t, int, - CCModeOptions, CCCryptorRef *); -CCCryptorStatus CCCryptorCreate(CCOperation, CCAlgorithm, CCOptions, - const void *, size_t, const void *, - CCCryptorRef *); -CCCryptorStatus CCCryptorUpdate(CCCryptorRef, const void *, size_t, void *, - size_t, size_t *); -CCCryptorStatus CCCryptorFinal(CCCryptorRef, void *, size_t, size_t *); -CCCryptorStatus CCCryptorRelease(CCCryptorRef); - -CCCryptorStatus CCCryptorGCMAddIV(CCCryptorRef, const void *, size_t); -CCCryptorStatus CCCryptorGCMAddAAD(CCCryptorRef, const void *, size_t); -CCCryptorStatus CCCryptorGCMEncrypt(CCCryptorRef, const void *, size_t, - void *); -CCCryptorStatus CCCryptorGCMDecrypt(CCCryptorRef, const void *, size_t, - void *); -CCCryptorStatus CCCryptorGCMFinal(CCCryptorRef, const void *, size_t *); -CCCryptorStatus CCCryptorGCMReset(CCCryptorRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -/* Not defined in the public header */ -enum { - kCCModeGCM = 11 -}; -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_digest.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_digest.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_digest.py +++ /dev/null @@ -1,58 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef uint32_t CC_LONG; -typedef uint64_t CC_LONG64; -typedef struct CC_MD5state_st { - ...; -} CC_MD5_CTX; -typedef struct CC_SHA1state_st { - ...; -} CC_SHA1_CTX; -typedef struct CC_SHA256state_st { - ...; -} CC_SHA256_CTX; -typedef struct CC_SHA512state_st { - ...; -} CC_SHA512_CTX; -""" - -FUNCTIONS = """ -int CC_MD5_Init(CC_MD5_CTX *); -int CC_MD5_Update(CC_MD5_CTX *, const void *, CC_LONG); -int CC_MD5_Final(unsigned char *, CC_MD5_CTX *); - -int CC_SHA1_Init(CC_SHA1_CTX *); -int CC_SHA1_Update(CC_SHA1_CTX *, const void *, CC_LONG); -int CC_SHA1_Final(unsigned char *, CC_SHA1_CTX *); - -int CC_SHA224_Init(CC_SHA256_CTX *); -int CC_SHA224_Update(CC_SHA256_CTX *, const void *, CC_LONG); -int CC_SHA224_Final(unsigned char *, CC_SHA256_CTX *); - -int CC_SHA256_Init(CC_SHA256_CTX *); -int CC_SHA256_Update(CC_SHA256_CTX *, const void *, CC_LONG); -int CC_SHA256_Final(unsigned char *, CC_SHA256_CTX *); - -int CC_SHA384_Init(CC_SHA512_CTX *); -int CC_SHA384_Update(CC_SHA512_CTX *, const void *, CC_LONG); -int CC_SHA384_Final(unsigned char *, CC_SHA512_CTX *); - -int CC_SHA512_Init(CC_SHA512_CTX *); -int CC_SHA512_Update(CC_SHA512_CTX *, const void *, CC_LONG); -int CC_SHA512_Final(unsigned char *, CC_SHA512_CTX *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_hmac.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_hmac.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_hmac.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct { - ...; -} CCHmacContext; -enum { - kCCHmacAlgSHA1, - kCCHmacAlgMD5, - kCCHmacAlgSHA256, - kCCHmacAlgSHA384, - kCCHmacAlgSHA512, - kCCHmacAlgSHA224 -}; -typedef uint32_t CCHmacAlgorithm; -""" - -FUNCTIONS = """ -void CCHmacInit(CCHmacContext *, CCHmacAlgorithm, const void *, size_t); -void CCHmacUpdate(CCHmacContext *, const void *, size_t); -void CCHmacFinal(CCHmacContext *, void *); - -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_key_derivation.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_key_derivation.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_key_derivation.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCPBKDF2 = 2, -}; -typedef uint32_t CCPBKDFAlgorithm; -enum { - kCCPRFHmacAlgSHA1 = 1, - kCCPRFHmacAlgSHA224 = 2, - kCCPRFHmacAlgSHA256 = 3, - kCCPRFHmacAlgSHA384 = 4, - kCCPRFHmacAlgSHA512 = 5, -}; -typedef uint32_t CCPseudoRandomAlgorithm; -typedef unsigned int uint; -""" - -FUNCTIONS = """ -int CCKeyDerivationPBKDF(CCPBKDFAlgorithm, const char *, size_t, - const uint8_t *, size_t, CCPseudoRandomAlgorithm, - uint, uint8_t *, size_t); -uint CCCalibratePBKDF(CCPBKDFAlgorithm, size_t, size_t, - CCPseudoRandomAlgorithm, size_t, uint32_t); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_symmetric_key_wrap.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_symmetric_key_wrap.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/common_symmetric_key_wrap.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCWRAPAES = 1, -}; - -typedef uint32_t CCWrappingAlgorithm; -""" - -FUNCTIONS = """ -int CCSymmetricKeyWrap(CCWrappingAlgorithm, const uint8_t *, const size_t, - const uint8_t *, size_t, const uint8_t *, size_t, - uint8_t *, size_t *); -int CCSymmetricKeyUnwrap(CCWrappingAlgorithm algorithm, const uint8_t *, - const size_t, const uint8_t *, size_t, - const uint8_t *, size_t, uint8_t *, size_t *); -size_t CCSymmetricWrappedSize(CCWrappingAlgorithm, size_t); -size_t CCSymmetricUnwrappedSize(CCWrappingAlgorithm, size_t); - -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seccertificate.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seccertificate.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seccertificate.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecCertificateRef; -""" - -FUNCTIONS = """ -SecCertificateRef SecCertificateCreateWithData(CFAllocatorRef, CFDataRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secimport.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secimport.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secimport.py +++ /dev/null @@ -1,86 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecAccessRef; - -CFStringRef kSecImportExportPassphrase; -CFStringRef kSecImportExportKeychain; -CFStringRef kSecImportExportAccess; - -typedef uint32_t SecExternalItemType; -enum { - kSecItemTypeUnknown, - kSecItemTypePrivateKey, - kSecItemTypePublicKey, - kSecItemTypeSessionKey, - kSecItemTypeCertificate, - kSecItemTypeAggregate -}; - - -typedef uint32_t SecExternalFormat; -enum { - kSecFormatUnknown = 0, - kSecFormatOpenSSL, - kSecFormatSSH, - kSecFormatBSAFE, - kSecFormatRawKey, - kSecFormatWrappedPKCS8, - kSecFormatWrappedOpenSSL, - kSecFormatWrappedSSH, - kSecFormatWrappedLSH, - kSecFormatX509Cert, - kSecFormatPEMSequence, - kSecFormatPKCS7, - kSecFormatPKCS12, - kSecFormatNetscapeCertSequence, - kSecFormatSSHv2 -}; - -typedef uint32_t SecItemImportExportFlags; -enum { - kSecKeyImportOnlyOne = 0x00000001, - kSecKeySecurePassphrase = 0x00000002, - kSecKeyNoAccessControl = 0x00000004 -}; -typedef uint32_t SecKeyImportExportFlags; - -typedef struct { - /* for import and export */ - uint32_t version; - SecKeyImportExportFlags flags; - CFTypeRef passphrase; - CFStringRef alertTitle; - CFStringRef alertPrompt; - - /* for import only */ - SecAccessRef accessRef; - CFArrayRef keyUsage; - - CFArrayRef keyAttributes; -} SecItemImportExportKeyParameters; -""" - -FUNCTIONS = """ -OSStatus SecItemImport(CFDataRef, CFStringRef, SecExternalFormat *, - SecExternalItemType *, SecItemImportExportFlags, - const SecItemImportExportKeyParameters *, - SecKeychainRef, CFArrayRef *); -OSStatus SecPKCS12Import(CFDataRef, CFDictionaryRef, CFArrayRef *); -OSStatus SecItemExport(CFTypeRef, SecExternalFormat, SecItemImportExportFlags, - const SecItemImportExportKeyParameters *, CFDataRef *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secitem.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secitem.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secitem.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -const CFTypeRef kSecAttrKeyType; -const CFTypeRef kSecAttrKeySizeInBits; -const CFTypeRef kSecAttrIsPermanent; -const CFTypeRef kSecAttrKeyTypeRSA; -const CFTypeRef kSecAttrKeyTypeDSA; -const CFTypeRef kSecUseKeychain; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckey.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckey.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckey.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecKeyRef; -""" - -FUNCTIONS = """ -OSStatus SecKeyGeneratePair(CFDictionaryRef, SecKeyRef *, SecKeyRef *); -size_t SecKeyGetBlockSize(SecKeyRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckeychain.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckeychain.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/seckeychain.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecKeychainRef; -""" - -FUNCTIONS = """ -OSStatus SecKeychainCreate(const char *, UInt32, const void *, Boolean, - SecAccessRef, SecKeychainRef *); -OSStatus SecKeychainDelete(SecKeychainRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secpolicy.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secpolicy.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secpolicy.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecPolicyRef; -""" - -FUNCTIONS = """ -SecPolicyRef SecPolicyCreateSSL(Boolean, CFStringRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectransform.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectransform.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectransform.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -#include -#include -""" - -TYPES = """ -typedef ... *SecTransformRef; - -CFStringRef kSecImportExportPassphrase; -CFStringRef kSecImportExportKeychain; -CFStringRef kSecImportExportAccess; - -CFStringRef kSecEncryptionMode; -CFStringRef kSecEncryptKey; -CFStringRef kSecIVKey; -CFStringRef kSecModeCBCKey; -CFStringRef kSecModeCFBKey; -CFStringRef kSecModeECBKey; -CFStringRef kSecModeNoneKey; -CFStringRef kSecModeOFBKey; -CFStringRef kSecOAEPEncodingParametersAttributeName; -CFStringRef kSecPaddingKey; -CFStringRef kSecPaddingNoneKey; -CFStringRef kSecPaddingOAEPKey; -CFStringRef kSecPaddingPKCS1Key; -CFStringRef kSecPaddingPKCS5Key; -CFStringRef kSecPaddingPKCS7Key; - -const CFStringRef kSecTransformInputAttributeName; -const CFStringRef kSecTransformOutputAttributeName; -const CFStringRef kSecTransformDebugAttributeName; -const CFStringRef kSecTransformTransformName; -const CFStringRef kSecTransformAbortAttributeName; - -CFStringRef kSecInputIsAttributeName; -CFStringRef kSecInputIsPlainText; -CFStringRef kSecInputIsDigest; -CFStringRef kSecInputIsRaw; - -const CFStringRef kSecDigestTypeAttribute; -const CFStringRef kSecDigestLengthAttribute; -const CFStringRef kSecDigestMD5; -const CFStringRef kSecDigestSHA1; -const CFStringRef kSecDigestSHA2; -""" - -FUNCTIONS = """ -Boolean SecTransformSetAttribute(SecTransformRef, CFStringRef, CFTypeRef, - CFErrorRef *); -SecTransformRef SecDecryptTransformCreate(SecKeyRef, CFErrorRef *); -SecTransformRef SecEncryptTransformCreate(SecKeyRef, CFErrorRef *); -SecTransformRef SecVerifyTransformCreate(SecKeyRef, CFDataRef, CFErrorRef *); -SecTransformRef SecSignTransformCreate(SecKeyRef, CFErrorRef *) ; -CFTypeRef SecTransformExecute(SecTransformRef, CFErrorRef *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectrust.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectrust.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/sectrust.py +++ /dev/null @@ -1,39 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecTrustRef; -typedef uint32_t SecTrustResultType; - -enum { - kSecTrustResultInvalid, - kSecTrustResultProceed, - kSecTrustResultDeny, - kSecTrustResultUnspecified, - kSecTrustResultRecoverableTrustFailure, - kSecTrustResultFatalTrustFailure, - kSecTrustResultOtherError -}; -""" - -FUNCTIONS = """ -OSStatus SecTrustEvaluate(SecTrustRef, SecTrustResultType *); -OSStatus SecTrustCopyAnchorCertificates(CFArrayRef *); -""" - -MACROS = """ -/* The first argument changed from CFArrayRef to CFTypeRef in 10.8, so this - * has to go here for compatibility. - */ -OSStatus SecTrustCreateWithCertificates(CFTypeRef, CFTypeRef, SecTrustRef *); -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secure_transport.py b/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secure_transport.py deleted file mode 100644 --- a/lib_pypy/_cffi_ssl/_cffi_src/commoncrypto/secure_transport.py +++ /dev/null @@ -1,308 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SSLContextRef; -typedef const void *SSLConnectionRef; - -typedef enum { - kSSLSessionOptionBreakOnServerAuth, - kSSLSessionOptionBreakOnCertRequested, -} SSLSessionOption; - -typedef enum { - kNeverAuthenticate, - kAlwaysAuthenticate, - kTryAuthenticate -} SSLAuthenticate; - -typedef enum { - kSSLIdle, - kSSLHandshake, - kSSLConnected, - kSSLClosed, - kSSLAborted -} SSLSessionState; - -typedef enum { - kSSLProtocolUnknown = 0, - kSSLProtocol3 = 2, - kTLSProtocol1 = 4, - /* DEPRECATED on iOS */ - kSSLProtocol2 = 1, - kSSLProtocol3Only = 3, - kTLSProtocol1Only = 5, - kSSLProtocolAll = 6, -} SSLProtocol; - -typedef UInt32 SSLCipherSuite; -enum { - SSL_NULL_WITH_NULL_NULL = 0x0000, - SSL_RSA_WITH_NULL_MD5 = 0x0001, - SSL_RSA_WITH_NULL_SHA = 0x0002, - SSL_RSA_EXPORT_WITH_RC4_40_MD5 = 0x0003, - SSL_RSA_WITH_RC4_128_MD5 = 0x0004, - SSL_RSA_WITH_RC4_128_SHA = 0x0005, - SSL_RSA_EXPORT_WITH_RC2_CBC_40_MD5 = 0x0006, - SSL_RSA_WITH_IDEA_CBC_SHA = 0x0007, - SSL_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0008, - SSL_RSA_WITH_DES_CBC_SHA = 0x0009, - SSL_RSA_WITH_3DES_EDE_CBC_SHA = 0x000A, - SSL_DH_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x000B, - SSL_DH_DSS_WITH_DES_CBC_SHA = 0x000C, - SSL_DH_DSS_WITH_3DES_EDE_CBC_SHA = 0x000D, - SSL_DH_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x000E, - SSL_DH_RSA_WITH_DES_CBC_SHA = 0x000F, - SSL_DH_RSA_WITH_3DES_EDE_CBC_SHA = 0x0010, - SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x0011, - SSL_DHE_DSS_WITH_DES_CBC_SHA = 0x0012, - SSL_DHE_DSS_WITH_3DES_EDE_CBC_SHA = 0x0013, - SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0014, - SSL_DHE_RSA_WITH_DES_CBC_SHA = 0x0015, - SSL_DHE_RSA_WITH_3DES_EDE_CBC_SHA = 0x0016, - SSL_DH_anon_EXPORT_WITH_RC4_40_MD5 = 0x0017, - SSL_DH_anon_WITH_RC4_128_MD5 = 0x0018, - SSL_DH_anon_EXPORT_WITH_DES40_CBC_SHA = 0x0019, - SSL_DH_anon_WITH_DES_CBC_SHA = 0x001A, - SSL_DH_anon_WITH_3DES_EDE_CBC_SHA = 0x001B, - SSL_FORTEZZA_DMS_WITH_NULL_SHA = 0x001C, - SSL_FORTEZZA_DMS_WITH_FORTEZZA_CBC_SHA =0x001D, - - /* TLS addenda using AES, per RFC 3268 */ - TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F, - TLS_DH_DSS_WITH_AES_128_CBC_SHA = 0x0030, - TLS_DH_RSA_WITH_AES_128_CBC_SHA = 0x0031, - TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032, - TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033, - TLS_DH_anon_WITH_AES_128_CBC_SHA = 0x0034, - TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035, - TLS_DH_DSS_WITH_AES_256_CBC_SHA = 0x0036, - TLS_DH_RSA_WITH_AES_256_CBC_SHA = 0x0037, - TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038, - TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039, - TLS_DH_anon_WITH_AES_256_CBC_SHA = 0x003A, - - /* ECDSA addenda, RFC 4492 */ - TLS_ECDH_ECDSA_WITH_NULL_SHA = 0xC001, - TLS_ECDH_ECDSA_WITH_RC4_128_SHA = 0xC002, - TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC003, - TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA = 0xC004, - TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA = 0xC005, - TLS_ECDHE_ECDSA_WITH_NULL_SHA = 0xC006, - TLS_ECDHE_ECDSA_WITH_RC4_128_SHA = 0xC007, - TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC008, - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009, - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A, - TLS_ECDH_RSA_WITH_NULL_SHA = 0xC00B, - TLS_ECDH_RSA_WITH_RC4_128_SHA = 0xC00C, - TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA = 0xC00D, - TLS_ECDH_RSA_WITH_AES_128_CBC_SHA = 0xC00E, - TLS_ECDH_RSA_WITH_AES_256_CBC_SHA = 0xC00F, - TLS_ECDHE_RSA_WITH_NULL_SHA = 0xC010, - TLS_ECDHE_RSA_WITH_RC4_128_SHA = 0xC011, - TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA = 0xC012, - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013, - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014, - TLS_ECDH_anon_WITH_NULL_SHA = 0xC015, - TLS_ECDH_anon_WITH_RC4_128_SHA = 0xC016, - TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA = 0xC017, - TLS_ECDH_anon_WITH_AES_128_CBC_SHA = 0xC018, - TLS_ECDH_anon_WITH_AES_256_CBC_SHA = 0xC019, - - /* TLS 1.2 addenda, RFC 5246 */ - /* Initial state. */ - TLS_NULL_WITH_NULL_NULL = 0x0000, - - /* Server provided RSA certificate for key exchange. */ - TLS_RSA_WITH_NULL_MD5 = 0x0001, - TLS_RSA_WITH_NULL_SHA = 0x0002, - TLS_RSA_WITH_RC4_128_MD5 = 0x0004, - TLS_RSA_WITH_RC4_128_SHA = 0x0005, - TLS_RSA_WITH_3DES_EDE_CBC_SHA = 0x000A, - TLS_RSA_WITH_NULL_SHA256 = 0x003B, - TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C, - TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D, - - /* Server-authenticated (and optionally client-authenticated) - Diffie-Hellman. */ - TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA = 0x000D, - TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA = 0x0010, - TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA = 0x0013, - TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA = 0x0016, - TLS_DH_DSS_WITH_AES_128_CBC_SHA256 = 0x003E, - TLS_DH_RSA_WITH_AES_128_CBC_SHA256 = 0x003F, - TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040, - TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067, - TLS_DH_DSS_WITH_AES_256_CBC_SHA256 = 0x0068, - TLS_DH_RSA_WITH_AES_256_CBC_SHA256 = 0x0069, - TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A, - TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B, - - /* Completely anonymous Diffie-Hellman */ - TLS_DH_anon_WITH_RC4_128_MD5 = 0x0018, - TLS_DH_anon_WITH_3DES_EDE_CBC_SHA = 0x001B, - TLS_DH_anon_WITH_AES_128_CBC_SHA256 = 0x006C, - TLS_DH_anon_WITH_AES_256_CBC_SHA256 = 0x006D, - - /* Addenda from rfc 5288 AES Galois Counter Mode (GCM) Cipher Suites - for TLS. */ - TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C, - TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D, - TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E, - TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F, - TLS_DH_RSA_WITH_AES_128_GCM_SHA256 = 0x00A0, - TLS_DH_RSA_WITH_AES_256_GCM_SHA384 = 0x00A1, - TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2, - TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3, - TLS_DH_DSS_WITH_AES_128_GCM_SHA256 = 0x00A4, - TLS_DH_DSS_WITH_AES_256_GCM_SHA384 = 0x00A5, - TLS_DH_anon_WITH_AES_128_GCM_SHA256 = 0x00A6, - TLS_DH_anon_WITH_AES_256_GCM_SHA384 = 0x00A7, - - /* Addenda from rfc 5289 Elliptic Curve Cipher Suites with - HMAC SHA-256/384. */ - TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023, - TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024, - TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC025, - TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC026, - TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027, - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028, - TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256 = 0xC029, - TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384 = 0xC02A, - - /* Addenda from rfc 5289 Elliptic Curve Cipher Suites with - SHA-256/384 and AES Galois Counter Mode (GCM) */ - TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B, - TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C, - TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02D, - TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02E, - TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F, - TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030, - TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256 = 0xC031, - TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384 = 0xC032, - - /* RFC 5746 - Secure Renegotiation */ - TLS_EMPTY_RENEGOTIATION_INFO_SCSV = 0x00FF, - - /* - * Tags for SSL 2 cipher kinds that are not specified - * for SSL 3. - */ - SSL_RSA_WITH_RC2_CBC_MD5 = 0xFF80, - SSL_RSA_WITH_IDEA_CBC_MD5 = 0xFF81, - SSL_RSA_WITH_DES_CBC_MD5 = 0xFF82, - SSL_RSA_WITH_3DES_EDE_CBC_MD5 = 0xFF83, - SSL_NO_SUCH_CIPHERSUITE = 0xFFFF -}; - -typedef enum { - kSSLClientCertNone, - kSSLClientCertRequested, - kSSLClientCertSent, - kSSLClientCertRejected -} SSLClientCertificateState; - -enum { - errSSLProtocol = -9800, - errSSLNegotiation = -9801, - errSSLFatalAlert = -9802, - errSSLWouldBlock = -9803, - errSSLSessionNotFound = -9804, - errSSLClosedGraceful = -9805, - errSSLClosedAbort = -9806, - errSSLXCertChainInvalid = -9807, - errSSLBadCert = -9808, - errSSLCrypto = -9809, - errSSLInternal = -9810, - errSSLModuleAttach = -9811, - errSSLUnknownRootCert = -9812, - errSSLNoRootCert = -9813, - errSSLCertExpired = -9814, - errSSLCertNotYetValid = -9815, - errSSLClosedNoNotify = -9816, - errSSLBufferOverflow = -9817, - errSSLBadCipherSuite = -9818, - errSSLPeerUnexpectedMsg = -9819, - errSSLPeerBadRecordMac = -9820, - errSSLPeerDecryptionFail = -9821, - errSSLPeerRecordOverflow = -9822, - errSSLPeerDecompressFail = -9823, - errSSLPeerHandshakeFail = -9824, - errSSLPeerBadCert = -9825, - errSSLPeerUnsupportedCert = -9826, - errSSLPeerCertRevoked = -9827, - errSSLPeerCertExpired = -9828, - errSSLPeerCertUnknown = -9829, - errSSLIllegalParam = -9830, - errSSLPeerUnknownCA = -9831, - errSSLPeerAccessDenied = -9832, - errSSLPeerDecodeError = -9833, - errSSLPeerDecryptError = -9834, - errSSLPeerExportRestriction = -9835, - errSSLPeerProtocolVersion = -9836, - errSSLPeerInsufficientSecurity = -9837, - errSSLPeerInternalError = -9838, - errSSLPeerUserCancelled = -9839, - errSSLPeerNoRenegotiation = -9840, - errSSLServerAuthCompleted = -9841, - errSSLClientCertRequested = -9842, - errSSLHostNameMismatch = -9843, - errSSLConnectionRefused = -9844, - errSSLDecryptionFail = -9845, - errSSLBadRecordMac = -9846, - errSSLRecordOverflow = -9847, - errSSLBadConfiguration = -9848, - errSSLLast = -9849 /* end of range, to be deleted */ -}; -""" - -FUNCTIONS = """ -OSStatus SSLSetConnection(SSLContextRef, SSLConnectionRef); -OSStatus SSLGetConnection(SSLContextRef, SSLConnectionRef *); -OSStatus SSLSetSessionOption(SSLContextRef, SSLSessionOption, Boolean); -OSStatus SSLSetClientSideAuthenticate(SSLContextRef, SSLAuthenticate); - -OSStatus SSLHandshake(SSLContextRef); -OSStatus SSLGetSessionState(SSLContextRef, SSLSessionState *); -OSStatus SSLGetNegotiatedProtocolVersion(SSLContextRef, SSLProtocol *); -OSStatus SSLSetPeerID(SSLContextRef, const void *, size_t); -OSStatus SSLGetPeerID(SSLContextRef, const void **, size_t *); -OSStatus SSLGetBufferedReadSize(SSLContextRef, size_t *); -OSStatus SSLRead(SSLContextRef, void *, size_t, size_t *); -OSStatus SSLWrite(SSLContextRef, const void *, size_t, size_t *); -OSStatus SSLClose(SSLContextRef); - -OSStatus SSLGetNumberSupportedCiphers(SSLContextRef, size_t *); -OSStatus SSLGetSupportedCiphers(SSLContextRef, SSLCipherSuite *, size_t *); -OSStatus SSLSetEnabledCiphers(SSLContextRef, const SSLCipherSuite *, size_t); -OSStatus SSLGetNumberEnabledCiphers(SSLContextRef, size_t *); -OSStatus SSLGetEnabledCiphers(SSLContextRef, SSLCipherSuite *, size_t *); -OSStatus SSLGetNegotiatedCipher(SSLContextRef, SSLCipherSuite *); -OSStatus SSLSetDiffieHellmanParams(SSLContextRef, const void *, size_t); -OSStatus SSLGetDiffieHellmanParams(SSLContextRef, const void **, size_t *); - -OSStatus SSLSetCertificateAuthorities(SSLContextRef, CFTypeRef, Boolean); -OSStatus SSLCopyCertificateAuthorities(SSLContextRef, CFArrayRef *); -OSStatus SSLCopyDistinguishedNames(SSLContextRef, CFArrayRef *); -OSStatus SSLSetCertificate(SSLContextRef, CFArrayRef); -OSStatus SSLGetClientCertificateState(SSLContextRef, - SSLClientCertificateState *); -OSStatus SSLCopyPeerTrust(SSLContextRef, SecTrustRef *trust); - -OSStatus SSLSetPeerDomainName(SSLContextRef, const char *, size_t); -OSStatus SSLGetPeerDomainNameLength(SSLContextRef, size_t *); -OSStatus SSLGetPeerDomainName(SSLContextRef, char *, size_t *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/aes.py @@ -9,13 +9,7 @@ """ TYPES = """ -static const int Cryptography_HAS_AES_WRAP; -static const int Cryptography_HAS_AES_CTR128_ENCRYPT; - -struct aes_key_st { - ...; -}; -typedef struct aes_key_st AES_KEY; +typedef ... AES_KEY; """ FUNCTIONS = """ @@ -28,23 +22,5 @@ const unsigned char *, unsigned int); """ -MACROS = """ -/* The ctr128_encrypt function is only useful in 1.0.0. We can use EVP for - this in 1.0.1+. */ -void AES_ctr128_encrypt(const unsigned char *, unsigned char *, - size_t, const AES_KEY *, unsigned char[], - unsigned char[], unsigned int *); +CUSTOMIZATIONS = """ """ - -CUSTOMIZATIONS = """ -static const long Cryptography_HAS_AES_WRAP = 1; -#if CRYPTOGRAPHY_OPENSSL_110_OR_GREATER && !defined(LIBRESSL_VERSION_NUMBER) -static const int Cryptography_HAS_AES_CTR128_ENCRYPT = 0; -void (*AES_ctr128_encrypt)(const unsigned char *, unsigned char *, - size_t, const AES_KEY *, - unsigned char[], unsigned char[], - unsigned int *) = NULL; -#else -static const int Cryptography_HAS_AES_CTR128_ENCRYPT = 1; -#endif -""" diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/asn1.py @@ -11,7 +11,6 @@ TYPES = """ typedef int... time_t; -typedef int ASN1_BOOLEAN; typedef ... ASN1_INTEGER; struct asn1_string_st { @@ -35,29 +34,17 @@ typedef ... ASN1_VALUE; typedef ... ASN1_ITEM_EXP; - -typedef ... ASN1_UTCTIME; +typedef ... ASN1_NULL; static const int V_ASN1_GENERALIZEDTIME; -static const int MBSTRING_FLAG; -static const int MBSTRING_ASC; -static const int MBSTRING_BMP; static const int MBSTRING_UTF8; -static const int MBSTRING_UNIV; """ FUNCTIONS = """ -ASN1_OBJECT *ASN1_OBJECT_new(void); void ASN1_OBJECT_free(ASN1_OBJECT *); -/* ASN1 OBJECT IDENTIFIER */ -ASN1_OBJECT *d2i_ASN1_OBJECT(ASN1_OBJECT **, const unsigned char **, long); - /* ASN1 STRING */ -ASN1_STRING *ASN1_STRING_new(void); -ASN1_STRING *ASN1_STRING_type_new(int); -void ASN1_STRING_free(ASN1_STRING *); unsigned char *ASN1_STRING_data(ASN1_STRING *); int ASN1_STRING_set(ASN1_STRING *, const void *, int); @@ -70,95 +57,59 @@ ASN1_IA5STRING *ASN1_IA5STRING_new(void); /* ASN1 INTEGER */ -ASN1_INTEGER *ASN1_INTEGER_new(void); void ASN1_INTEGER_free(ASN1_INTEGER *); int ASN1_INTEGER_set(ASN1_INTEGER *, long); /* ASN1 TIME */ ASN1_TIME *ASN1_TIME_new(void); void ASN1_TIME_free(ASN1_TIME *); -ASN1_GENERALIZEDTIME *ASN1_TIME_to_generalizedtime(ASN1_TIME *, - ASN1_GENERALIZEDTIME **); ASN1_TIME *ASN1_TIME_set(ASN1_TIME *, time_t); - -/* ASN1 UTCTIME */ -ASN1_UTCTIME *ASN1_UTCTIME_new(void); -void ASN1_UTCTIME_free(ASN1_UTCTIME *); -int ASN1_UTCTIME_cmp_time_t(const ASN1_UTCTIME *, time_t); -ASN1_UTCTIME *ASN1_UTCTIME_set(ASN1_UTCTIME *, time_t); +int ASN1_TIME_set_string(ASN1_TIME *, const char *); /* ASN1 GENERALIZEDTIME */ -int ASN1_GENERALIZEDTIME_set_string(ASN1_GENERALIZEDTIME *, const char *); ASN1_GENERALIZEDTIME *ASN1_GENERALIZEDTIME_set(ASN1_GENERALIZEDTIME *, time_t); void ASN1_GENERALIZEDTIME_free(ASN1_GENERALIZEDTIME *); -int i2d_ASN1_GENERALIZEDTIME(ASN1_GENERALIZEDTIME *, unsigned char **); /* ASN1 ENUMERATED */ ASN1_ENUMERATED *ASN1_ENUMERATED_new(void); void ASN1_ENUMERATED_free(ASN1_ENUMERATED *); int ASN1_ENUMERATED_set(ASN1_ENUMERATED *, long); -int i2d_ASN1_ENUMERATED(ASN1_ENUMERATED *, unsigned char **); ASN1_VALUE *ASN1_item_d2i(ASN1_VALUE **, const unsigned char **, long, const ASN1_ITEM *); int ASN1_BIT_STRING_set_bit(ASN1_BIT_STRING *, int, int); -""" - -MACROS = """ /* These became const ASN1_* in 1.1.0 */ -int i2d_ASN1_OBJECT(ASN1_OBJECT *, unsigned char **); int ASN1_STRING_type(ASN1_STRING *); int ASN1_STRING_to_UTF8(unsigned char **, ASN1_STRING *); long ASN1_ENUMERATED_get(ASN1_ENUMERATED *); int i2a_ASN1_INTEGER(BIO *, ASN1_INTEGER *); +/* This became const ASN1_TIME in 1.1.0f */ +ASN1_GENERALIZEDTIME *ASN1_TIME_to_generalizedtime(ASN1_TIME *, + ASN1_GENERALIZEDTIME **); + ASN1_UTF8STRING *ASN1_UTF8STRING_new(void); void ASN1_UTF8STRING_free(ASN1_UTF8STRING *); ASN1_BIT_STRING *ASN1_BIT_STRING_new(void); void ASN1_BIT_STRING_free(ASN1_BIT_STRING *); -int i2d_ASN1_BIT_STRING(ASN1_BIT_STRING *, unsigned char **); -int i2d_ASN1_OCTET_STRING(ASN1_OCTET_STRING *, unsigned char **); -int i2d_ASN1_INTEGER(ASN1_INTEGER *, unsigned char **); +const ASN1_ITEM *ASN1_ITEM_ptr(ASN1_ITEM_EXP *); + /* This is not a macro, but is const on some versions of OpenSSL */ int ASN1_BIT_STRING_get_bit(ASN1_BIT_STRING *, int); -ASN1_TIME *M_ASN1_TIME_dup(void *); -const ASN1_ITEM *ASN1_ITEM_ptr(ASN1_ITEM_EXP *); - -/* These aren't macros these arguments are all const X on openssl > 1.0.x */ int ASN1_TIME_print(BIO *, ASN1_TIME *); int ASN1_STRING_length(ASN1_STRING *); -ASN1_STRING *ASN1_STRING_dup(ASN1_STRING *); -int ASN1_STRING_cmp(ASN1_STRING *, ASN1_STRING *); -int ASN1_UTCTIME_print(BIO *, ASN1_UTCTIME *); - -ASN1_OCTET_STRING *ASN1_OCTET_STRING_dup(ASN1_OCTET_STRING *); -int ASN1_OCTET_STRING_cmp(ASN1_OCTET_STRING *, ASN1_OCTET_STRING *); - -ASN1_INTEGER *ASN1_INTEGER_dup(ASN1_INTEGER *); -int ASN1_INTEGER_cmp(ASN1_INTEGER *, ASN1_INTEGER *); -long ASN1_INTEGER_get(ASN1_INTEGER *); +int ASN1_STRING_set_default_mask_asc(char *); BIGNUM *ASN1_INTEGER_to_BN(ASN1_INTEGER *, BIGNUM *); ASN1_INTEGER *BN_to_ASN1_INTEGER(BIGNUM *, ASN1_INTEGER *); -/* These isn't a macro the arg is const on openssl 1.0.2+ */ -int ASN1_GENERALIZEDTIME_check(ASN1_GENERALIZEDTIME *); -int ASN1_UTCTIME_check(ASN1_UTCTIME *); - -/* Not a macro, const on openssl 1.0 */ -int ASN1_STRING_set_default_mask_asc(char *); - int i2d_ASN1_TYPE(ASN1_TYPE *, unsigned char **); ASN1_TYPE *d2i_ASN1_TYPE(ASN1_TYPE **, const unsigned char **, long); + +ASN1_NULL *ASN1_NULL_new(void); """ CUSTOMIZATIONS = """ -/* This macro is removed in 1.1.0. We re-add it if required to support - pyOpenSSL versions older than whatever resolves - https://github.com/pyca/pyopenssl/issues/431 */ -#if !defined(M_ASN1_TIME_dup) -#define M_ASN1_TIME_dup(a) (ASN1_TIME *)ASN1_STRING_dup((const ASN1_STRING *)a) -#endif """ diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bignum.py @@ -10,13 +10,21 @@ TYPES = """ typedef ... BN_CTX; +typedef ... BN_MONT_CTX; typedef ... BIGNUM; typedef int... BN_ULONG; """ FUNCTIONS = """ +#define BN_FLG_CONSTTIME ... + +void BN_set_flags(BIGNUM *, int); + BIGNUM *BN_new(void); void BN_free(BIGNUM *); +void BN_clear_free(BIGNUM *); + +int BN_rand_range(BIGNUM *, const BIGNUM *); BN_CTX *BN_CTX_new(void); void BN_CTX_free(BN_CTX *); @@ -25,17 +33,18 @@ BIGNUM *BN_CTX_get(BN_CTX *); void BN_CTX_end(BN_CTX *); -BIGNUM *BN_copy(BIGNUM *, const BIGNUM *); +BN_MONT_CTX *BN_MONT_CTX_new(void); +int BN_MONT_CTX_set(BN_MONT_CTX *, const BIGNUM *, BN_CTX *); +void BN_MONT_CTX_free(BN_MONT_CTX *); + BIGNUM *BN_dup(const BIGNUM *); int BN_set_word(BIGNUM *, BN_ULONG); -BN_ULONG BN_get_word(const BIGNUM *); const BIGNUM *BN_value_one(void); char *BN_bn2hex(const BIGNUM *); int BN_hex2bn(BIGNUM **, const char *); -int BN_dec2bn(BIGNUM **, const char *); int BN_bn2bin(const BIGNUM *, unsigned char *); BIGNUM *BN_bin2bn(const unsigned char *, int, BIGNUM *); @@ -43,11 +52,9 @@ int BN_num_bits(const BIGNUM *); int BN_cmp(const BIGNUM *, const BIGNUM *); +int BN_is_negative(const BIGNUM *); int BN_add(BIGNUM *, const BIGNUM *, const BIGNUM *); int BN_sub(BIGNUM *, const BIGNUM *, const BIGNUM *); -int BN_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_sqr(BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_div(BIGNUM *, BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); int BN_nnmod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); int BN_mod_add(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); @@ -55,33 +62,23 @@ BN_CTX *); int BN_mod_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_mod_sqr(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); int BN_mod_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_gcd(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_mod_exp_mont(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, + BN_CTX *, BN_MONT_CTX *); +int BN_mod_exp_mont_consttime(BIGNUM *, const BIGNUM *, const BIGNUM *, + const BIGNUM *, BN_CTX *, BN_MONT_CTX *); BIGNUM *BN_mod_inverse(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_set_bit(BIGNUM *, int); -int BN_clear_bit(BIGNUM *, int); - -int BN_is_bit_set(const BIGNUM *, int); - -int BN_mask_bits(BIGNUM *, int); -""" - -MACROS = """ int BN_num_bytes(const BIGNUM *); -int BN_zero(BIGNUM *); -int BN_one(BIGNUM *); int BN_mod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_lshift(BIGNUM *, const BIGNUM *, int); -int BN_lshift1(BIGNUM *, BIGNUM *); - -int BN_rshift(BIGNUM *, BIGNUM *, int); -int BN_rshift1(BIGNUM *, BIGNUM *); +/* The following 3 prime methods are exposed for Tribler. */ +int BN_generate_prime_ex(BIGNUM *, int, int, const BIGNUM *, + const BIGNUM *, BN_GENCB *); +int BN_is_prime_ex(const BIGNUM *, int, BN_CTX *, BN_GENCB *); +const int BN_prime_checks_for_size(int); """ CUSTOMIZATIONS = """ diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/bio.py @@ -9,136 +9,42 @@ """ TYPES = """ -typedef struct bio_st BIO; -typedef void bio_info_cb(BIO *, int, const char *, int, long, long); -typedef ... bio_st; +typedef ... BIO; typedef ... BIO_METHOD; -typedef ... BUF_MEM; - -static const int BIO_TYPE_MEM; -static const int BIO_TYPE_FILE; -static const int BIO_TYPE_FD; -static const int BIO_TYPE_SOCKET; -static const int BIO_TYPE_CONNECT; -static const int BIO_TYPE_ACCEPT; -static const int BIO_TYPE_NULL; -static const int BIO_CLOSE; -static const int BIO_NOCLOSE; -static const int BIO_TYPE_SOURCE_SINK; -static const int BIO_CTRL_RESET; -static const int BIO_CTRL_EOF; -static const int BIO_CTRL_SET; -static const int BIO_CTRL_SET_CLOSE; -static const int BIO_CTRL_FLUSH; -static const int BIO_CTRL_DUP; -static const int BIO_CTRL_GET_CLOSE; -static const int BIO_CTRL_INFO; -static const int BIO_CTRL_GET; -static const int BIO_CTRL_PENDING; -static const int BIO_CTRL_WPENDING; -static const int BIO_C_FILE_SEEK; -static const int BIO_C_FILE_TELL; -static const int BIO_TYPE_NONE; -static const int BIO_TYPE_NBIO_TEST; -static const int BIO_TYPE_BIO; -static const int BIO_TYPE_DESCRIPTOR; -static const int BIO_FLAGS_READ; -static const int BIO_FLAGS_WRITE; -static const int BIO_FLAGS_IO_SPECIAL; -static const int BIO_FLAGS_RWS; -static const int BIO_FLAGS_SHOULD_RETRY; -static const int BIO_TYPE_NULL_FILTER; -static const int BIO_TYPE_SSL; -static const int BIO_TYPE_MD; -static const int BIO_TYPE_BUFFER; -static const int BIO_TYPE_CIPHER; -static const int BIO_TYPE_BASE64; -static const int BIO_TYPE_FILTER; """ FUNCTIONS = """ int BIO_free(BIO *); -void BIO_vfree(BIO *); void BIO_free_all(BIO *); -BIO *BIO_push(BIO *, BIO *); -BIO *BIO_pop(BIO *); -BIO *BIO_next(BIO *); -BIO *BIO_find_type(BIO *, int); BIO *BIO_new_file(const char *, const char *); -BIO *BIO_new_fp(FILE *, int); -BIO *BIO_new_fd(int, int); -BIO *BIO_new_socket(int, int); -long BIO_ctrl(BIO *, int, long, void *); -long BIO_callback_ctrl( - BIO *, - int, - void (*)(struct bio_st *, int, const char *, int, long, long) -); -long BIO_int_ctrl(BIO *, int, long, int); +BIO *BIO_new_dgram(int, int); size_t BIO_ctrl_pending(BIO *); -size_t BIO_ctrl_wpending(BIO *); int BIO_read(BIO *, void *, int); int BIO_gets(BIO *, char *, int); int BIO_write(BIO *, const void *, int); -int BIO_puts(BIO *, const char *); -int BIO_method_type(const BIO *); -""" - -MACROS = """ /* Added in 1.1.0 */ int BIO_up_ref(BIO *); -/* These added const to BIO_METHOD in 1.1.0 */ BIO *BIO_new(BIO_METHOD *); BIO_METHOD *BIO_s_mem(void); BIO_METHOD *BIO_s_file(void); -BIO_METHOD *BIO_s_fd(void); -BIO_METHOD *BIO_s_socket(void); -BIO_METHOD *BIO_s_null(void); -BIO_METHOD *BIO_f_null(void); -BIO_METHOD *BIO_f_buffer(void); -/* BIO_new_mem_buf became const void * in 1.0.2g */ -BIO *BIO_new_mem_buf(void *, int); -long BIO_set_fd(BIO *, long, int); -long BIO_get_fd(BIO *, char *); +BIO_METHOD *BIO_s_datagram(void); +BIO *BIO_new_mem_buf(const void *, int); long BIO_set_mem_eof_return(BIO *, int); long BIO_get_mem_data(BIO *, char **); -long BIO_set_mem_buf(BIO *, BUF_MEM *, int); -long BIO_get_mem_ptr(BIO *, BUF_MEM **); -long BIO_set_fp(BIO *, FILE *, int); -long BIO_get_fp(BIO *, FILE **); long BIO_read_filename(BIO *, char *); -long BIO_write_filename(BIO *, char *); -long BIO_append_filename(BIO *, char *); -long BIO_rw_filename(BIO *, char *); int BIO_should_read(BIO *); int BIO_should_write(BIO *); int BIO_should_io_special(BIO *); -int BIO_retry_type(BIO *); int BIO_should_retry(BIO *); int BIO_reset(BIO *); -int BIO_seek(BIO *, int); -int BIO_tell(BIO *); -int BIO_flush(BIO *); -int BIO_eof(BIO *); -int BIO_set_close(BIO *,long); -int BIO_get_close(BIO *); -int BIO_pending(BIO *); -int BIO_wpending(BIO *); -int BIO_get_info_callback(BIO *, bio_info_cb **); -int BIO_set_info_callback(BIO *, bio_info_cb *); -long BIO_get_buffer_num_lines(BIO *); -long BIO_set_read_buffer_size(BIO *, long); -long BIO_set_write_buffer_size(BIO *, long); -long BIO_set_buffer_size(BIO *, long); -long BIO_set_buffer_read_data(BIO *, void *, long); long BIO_set_nbio(BIO *, long); void BIO_set_retry_read(BIO *); void BIO_clear_retry_flags(BIO *); """ CUSTOMIZATIONS = """ -#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110PRE4 || defined(LIBRESSL_VERSION_NUMBER) +#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 && !CRYPTOGRAPHY_LIBRESSL_27_OR_GREATER int BIO_up_ref(BIO *b) { CRYPTO_add(&b->references, 1, CRYPTO_LOCK_BIO); return 1; diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/callbacks.py @@ -4,108 +4,85 @@ from __future__ import absolute_import, division, print_function -import sys - -import cffi - INCLUDES = """ #include #include #include #include + +#ifdef _WIN32 +#define WIN32_LEAN_AND_MEAN +#include +#include +#include +#else +#include +#include +#include +#endif """ TYPES = """ -static const long Cryptography_STATIC_CALLBACKS; - -/* crypto.h - * CRYPTO_set_locking_callback - * void (*cb)(int mode, int type, const char *file, int line) - */ -extern "Python" void Cryptography_locking_cb(int, int, const char *, int); - -/* pem.h - * int pem_password_cb(char *buf, int size, int rwflag, void *userdata); - */ -extern "Python" int Cryptography_pem_password_cb(char *, int, int, void *); - -/* rand.h - * int (*bytes)(unsigned char *buf, int num); - * int (*status)(void); - */ -extern "Python" int Cryptography_rand_bytes(unsigned char *, int); -extern "Python" int Cryptography_rand_status(void); +typedef struct { + char *password; + int length; + int called; + int error; + int maxsize; +} CRYPTOGRAPHY_PASSWORD_DATA; """ FUNCTIONS = """ -int _setup_ssl_threads(void); -""" - -MACROS = """ +int Cryptography_setup_ssl_threads(void); +int Cryptography_pem_password_cb(char *, int, int, void *); """ CUSTOMIZATIONS = """ -static const long Cryptography_STATIC_CALLBACKS = 1; -""" - -if cffi.__version_info__ < (1, 4, 0) or sys.version_info >= (3, 5): - # backwards compatibility for old cffi version on PyPy - # and Python >=3.5 (https://github.com/pyca/cryptography/issues/2970) - TYPES = "static const long Cryptography_STATIC_CALLBACKS;" - CUSTOMIZATIONS = """static const long Cryptography_STATIC_CALLBACKS = 0; -""" - -CUSTOMIZATIONS += """ /* This code is derived from the locking code found in the Python _ssl module's locking callback for OpenSSL. Copyright 2001-2016 Python Software Foundation; All Rights Reserved. + + It has been subsequently modified to use cross platform locking without + using CPython APIs by Armin Rigo of the PyPy project. */ +#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 #ifdef _WIN32 -#ifdef _MSC_VER -#ifdef inline -#undef inline -#endif -#define inline __inline -#endif -#include -typedef CRITICAL_SECTION mutex1_t; -static inline void mutex1_init(mutex1_t *mutex) { +typedef CRITICAL_SECTION Cryptography_mutex; +static __inline void cryptography_mutex_init(Cryptography_mutex *mutex) { InitializeCriticalSection(mutex); } -static inline void mutex1_lock(mutex1_t *mutex) { +static __inline void cryptography_mutex_lock(Cryptography_mutex *mutex) { EnterCriticalSection(mutex); } -static inline void mutex1_unlock(mutex1_t *mutex) { +static __inline void cryptography_mutex_unlock(Cryptography_mutex *mutex) { LeaveCriticalSection(mutex); } #else -#include -#include -#include -typedef pthread_mutex_t mutex1_t; -#define ASSERT_STATUS(call) \ - if (call != 0) { \ - perror("Fatal error in _cffi_ssl: " #call); \ - abort(); \ +typedef pthread_mutex_t Cryptography_mutex; +#define ASSERT_STATUS(call) \ + if ((call) != 0) { \ + perror("Fatal error in callback initialization: " #call); \ + abort(); \ } -static inline void mutex1_init(mutex1_t *mutex) { +static inline void cryptography_mutex_init(Cryptography_mutex *mutex) { #if !defined(pthread_mutexattr_default) # define pthread_mutexattr_default ((pthread_mutexattr_t *)NULL) #endif ASSERT_STATUS(pthread_mutex_init(mutex, pthread_mutexattr_default)); } -static inline void mutex1_lock(mutex1_t *mutex) { +static inline void cryptography_mutex_lock(Cryptography_mutex *mutex) { ASSERT_STATUS(pthread_mutex_lock(mutex)); } -static inline void mutex1_unlock(mutex1_t *mutex) { +static inline void cryptography_mutex_unlock(Cryptography_mutex *mutex) { ASSERT_STATUS(pthread_mutex_unlock(mutex)); } #endif + static unsigned int _ssl_locks_count = 0; -static mutex1_t *_ssl_locks = NULL; +static Cryptography_mutex *_ssl_locks = NULL; static void _ssl_thread_locking_function(int mode, int n, const char *file, int line) { @@ -129,24 +106,24 @@ } if (mode & CRYPTO_LOCK) { - mutex1_lock(_ssl_locks + n); + cryptography_mutex_lock(_ssl_locks + n); } else { - mutex1_unlock(_ssl_locks + n); + cryptography_mutex_unlock(_ssl_locks + n); } } -static void init_mutexes(void) -{ +static void init_mutexes(void) { int i; - for (i = 0; i < _ssl_locks_count; i++) { - mutex1_init(_ssl_locks + i); + for (i = 0; i < _ssl_locks_count; i++) { + cryptography_mutex_init(_ssl_locks + i); } } -int _setup_ssl_threads(void) { + +int Cryptography_setup_ssl_threads(void) { if (_ssl_locks == NULL) { _ssl_locks_count = CRYPTO_num_locks(); - _ssl_locks = malloc(sizeof(mutex1_t) * _ssl_locks_count); + _ssl_locks = calloc(_ssl_locks_count, sizeof(Cryptography_mutex)); if (_ssl_locks == NULL) { return 0; } @@ -158,4 +135,34 @@ } return 1; } +#else +int (*Cryptography_setup_ssl_threads)(void) = NULL; +#endif + +typedef struct { + char *password; + int length; + int called; + int error; + int maxsize; +} CRYPTOGRAPHY_PASSWORD_DATA; + +int Cryptography_pem_password_cb(char *buf, int size, + int rwflag, void *userdata) { + /* The password cb is only invoked if OpenSSL decides the private + key is encrypted. So this path only occurs if it needs a password */ + CRYPTOGRAPHY_PASSWORD_DATA *st = (CRYPTOGRAPHY_PASSWORD_DATA *)userdata; + st->called += 1; + st->maxsize = size; + if (st->length == 0) { + st->error = -1; + return 0; + } else if (st->length < size) { + memcpy(buf, st->password, st->length); + return st->length; + } else { + st->error = -2; + return 0; + } +} """ diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/cmac.py @@ -5,20 +5,16 @@ from __future__ import absolute_import, division, print_function INCLUDES = """ -#if !defined(OPENSSL_NO_CMAC) && CRYPTOGRAPHY_OPENSSL_101_OR_GREATER +#if !defined(OPENSSL_NO_CMAC) #include #endif """ TYPES = """ -static const int Cryptography_HAS_CMAC; typedef ... CMAC_CTX; """ FUNCTIONS = """ -""" - -MACROS = """ CMAC_CTX *CMAC_CTX_new(void); int CMAC_Init(CMAC_CTX *, const void *, size_t, const EVP_CIPHER *, ENGINE *); int CMAC_Update(CMAC_CTX *, const void *, size_t); @@ -28,17 +24,4 @@ """ CUSTOMIZATIONS = """ -#if !defined(OPENSSL_NO_CMAC) && CRYPTOGRAPHY_OPENSSL_101_OR_GREATER -static const long Cryptography_HAS_CMAC = 1; From pypy.commits at gmail.com Wed Aug 14 12:42:15 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 09:42:15 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: add failing ctypes test that causes socket.sendall() failure Message-ID: <5d5439e7.1c69fb81.c8742.10f3@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97177:1deb84428ab6 Date: 2019-08-14 19:39 +0300 http://bitbucket.org/pypy/pypy/changeset/1deb84428ab6/ Log: add failing ctypes test that causes socket.sendall() failure diff --git a/extra_tests/ctypes_tests/test_cast.py b/extra_tests/ctypes_tests/test_cast.py --- a/extra_tests/ctypes_tests/test_cast.py +++ b/extra_tests/ctypes_tests/test_cast.py @@ -28,3 +28,11 @@ assert x.value is False x = c_bool(['yadda']) assert x.value is True + +def test_cast_array(): + data = b'data' + ubyte = c_ubyte * len(data) + byteslike = ubyte.from_buffer_copy(data) + m = memoryview(byteslike) + b = m.cast('B') + assert bytes(b) == data From pypy.commits at gmail.com Wed Aug 14 17:58:48 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 14:58:48 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: test, fix subtle logic bug for memoryview.cast (when view.format is not 'B') Message-ID: <5d548418.1c69fb81.8c1cd.39dc@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97178:e0689d0f47c6 Date: 2019-08-15 00:58 +0300 http://bitbucket.org/pypy/pypy/changeset/e0689d0f47c6/ Log: test, fix subtle logic bug for memoryview.cast (when view.format is not 'B') diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py --- a/lib-python/3/test/test_ssl.py +++ b/lib-python/3/test/test_ssl.py @@ -2844,10 +2844,6 @@ else: s.close() - def test_socketserver_urlib_uses_bisect(self): - b = urllib.request.bisect - raise ValueError('urllib.request.bisect is %s' % str(b)) - def test_socketserver(self): """Using socketserver to create and manage SSL connections.""" server = make_https_server(self, certfile=CERTFILE) diff --git a/pypy/objspace/std/memoryobject.py b/pypy/objspace/std/memoryobject.py --- a/pypy/objspace/std/memoryobject.py +++ b/pypy/objspace/std/memoryobject.py @@ -482,8 +482,8 @@ " with an optional '@'") origfmt = view.getformat() - if self.get_native_fmtchar(origfmt) < 0 or \ - (not is_byte_format(fmt) and not is_byte_format(origfmt)): + if (self.get_native_fmtchar(origfmt) < 0 or \ + (not is_byte_format(origfmt))) and (not is_byte_format(fmt)): raise oefmt(space.w_TypeError, "memoryview: cannot cast between" " two non-byte formats") diff --git a/pypy/objspace/std/test/test_memoryobject.py b/pypy/objspace/std/test/test_memoryobject.py --- a/pypy/objspace/std/test/test_memoryobject.py +++ b/pypy/objspace/std/test/test_memoryobject.py @@ -487,6 +487,32 @@ assert list(reversed(view)) == revlist assert list(reversed(view)) == view[::-1].tolist() + +class AppTestMemoryViewMockBuffer(object): + spaceconfig = dict(usemodules=['__pypy__']) + + def test_cast_with_byteorder(self): + import sys + if '__pypy__' not in sys.modules: + skip('PyPy-only test') + + # create a memoryview with format ' Author: Matti Picus Branch: py3.6 Changeset: r97179:cc40120a4dad Date: 2019-08-15 09:37 +0300 http://bitbucket.org/pypy/pypy/changeset/cc40120a4dad/ Log: add const qualifier for conditional compilation diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py @@ -621,9 +621,9 @@ #ifdef OPENSSL_NO_SSL3_METHOD static const long Cryptography_HAS_SSL3_METHOD = 0; -SSL_METHOD* (*SSLv3_method)(void) = NULL; -SSL_METHOD* (*SSLv3_client_method)(void) = NULL; -SSL_METHOD* (*SSLv3_server_method)(void) = NULL; +const SSL_METHOD* (*SSLv3_method)(void) = NULL; +const SSL_METHOD* (*SSLv3_client_method)(void) = NULL; +const SSL_METHOD* (*SSLv3_server_method)(void) = NULL; #else static const long Cryptography_HAS_SSL3_METHOD = 1; #endif From pypy.commits at gmail.com Thu Aug 15 02:46:01 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 23:46:01 -0700 (PDT) Subject: [pypy-commit] pypy default: add failing ctypes test that causes socket.sendall() failure Message-ID: <5d54ffa9.1c69fb81.71845.23a1@mx.google.com> Author: Matti Picus Branch: Changeset: r97181:9920d591bb29 Date: 2019-08-14 19:39 +0300 http://bitbucket.org/pypy/pypy/changeset/9920d591bb29/ Log: add failing ctypes test that causes socket.sendall() failure diff --git a/extra_tests/ctypes_tests/test_cast.py b/extra_tests/ctypes_tests/test_cast.py --- a/extra_tests/ctypes_tests/test_cast.py +++ b/extra_tests/ctypes_tests/test_cast.py @@ -28,3 +28,11 @@ assert x.value is False x = c_bool(['yadda']) assert x.value is True + +def test_cast_array(): + data = b'data' + ubyte = c_ubyte * len(data) + byteslike = ubyte.from_buffer_copy(data) + m = memoryview(byteslike) + b = m.cast('B') + assert bytes(b) == data From pypy.commits at gmail.com Thu Aug 15 02:45:59 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 23:45:59 -0700 (PDT) Subject: [pypy-commit] pypy default: tweak test, add missing get_cipher Message-ID: <5d54ffa7.1c69fb81.c51aa.3c5b@mx.google.com> Author: Matti Picus Branch: Changeset: r97180:df138cf691c1 Date: 2019-08-14 19:35 +0300 http://bitbucket.org/pypy/pypy/changeset/df138cf691c1/ Log: tweak test, add missing get_cipher diff too long, truncating to 2000 out of 4071 lines diff --git a/lib-python/3/test/test_tools.py b/lib-python/3.2/test/test_tools.py rename from lib-python/3/test/test_tools.py rename to lib-python/3.2/test/test_tools.py diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py new file mode 100644 --- /dev/null +++ b/lib-python/3/test/test_ssl.py @@ -0,0 +1,3987 @@ +# Test the support for SSL and sockets + +import sys +import unittest +from test import support +import socket +import select +import time +import datetime +import gc +import os +import errno +import pprint +import tempfile +import urllib.request +import traceback +import asyncore +import weakref +import platform +import re +import functools +try: + import ctypes +except ImportError: + ctypes = None + +ssl = support.import_module("ssl") + +try: + import threading +except ImportError: + _have_threads = False +else: + _have_threads = True + +PROTOCOLS = sorted(ssl._PROTOCOL_NAMES) +HOST = support.HOST +IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL') +IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0) + + +def data_file(*name): + return os.path.join(os.path.dirname(__file__), *name) + +# The custom key and certificate files used in test_ssl are generated +# using Lib/test/make_ssl_certs.py. +# Other certificates are simply fetched from the Internet servers they +# are meant to authenticate. + +CERTFILE = data_file("keycert.pem") +BYTES_CERTFILE = os.fsencode(CERTFILE) +ONLYCERT = data_file("ssl_cert.pem") +ONLYKEY = data_file("ssl_key.pem") +BYTES_ONLYCERT = os.fsencode(ONLYCERT) +BYTES_ONLYKEY = os.fsencode(ONLYKEY) +CERTFILE_PROTECTED = data_file("keycert.passwd.pem") +ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem") +KEY_PASSWORD = "somepass" +CAPATH = data_file("capath") +BYTES_CAPATH = os.fsencode(CAPATH) +CAFILE_NEURONIO = data_file("capath", "4e1295a3.0") +CAFILE_CACERT = data_file("capath", "5ed36f99.0") + +# empty CRL +CRLFILE = data_file("revocation.crl") + +# Two keys and certs signed by the same CA (for SNI tests) +SIGNED_CERTFILE = data_file("keycert3.pem") +SIGNED_CERTFILE2 = data_file("keycert4.pem") +# Same certificate as pycacert.pem, but without extra text in file +SIGNING_CA = data_file("capath", "ceff1710.0") +# cert with all kinds of subject alt names +ALLSANFILE = data_file("allsans.pem") +# cert with all kinds of subject alt names +ALLSANFILE = data_file("allsans.pem") + +REMOTE_HOST = "self-signed.pythontest.net" + +EMPTYCERT = data_file("nullcert.pem") +BADCERT = data_file("badcert.pem") +NONEXISTINGCERT = data_file("XXXnonexisting.pem") +BADKEY = data_file("badkey.pem") +NOKIACERT = data_file("nokia.pem") +NULLBYTECERT = data_file("nullbytecert.pem") +TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem") + +DHFILE = data_file("ffdh3072.pem") +BYTES_DHFILE = os.fsencode(DHFILE) + +# Not defined in all versions of OpenSSL +OP_NO_COMPRESSION = getattr(ssl, "OP_NO_COMPRESSION", 0) +OP_SINGLE_DH_USE = getattr(ssl, "OP_SINGLE_DH_USE", 0) +OP_SINGLE_ECDH_USE = getattr(ssl, "OP_SINGLE_ECDH_USE", 0) +OP_CIPHER_SERVER_PREFERENCE = getattr(ssl, "OP_CIPHER_SERVER_PREFERENCE", 0) +OP_ENABLE_MIDDLEBOX_COMPAT = getattr(ssl, "OP_ENABLE_MIDDLEBOX_COMPAT", 0) + + +def handle_error(prefix): + exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) + if support.verbose: + sys.stdout.write(prefix + exc_format) + +def can_clear_options(): + # 0.9.8m or higher + return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15) + +def no_sslv2_implies_sslv3_hello(): + # 0.9.7h or higher + return ssl.OPENSSL_VERSION_INFO >= (0, 9, 7, 8, 15) + +def have_verify_flags(): + # 0.9.8 or higher + return ssl.OPENSSL_VERSION_INFO >= (0, 9, 8, 0, 15) + +def utc_offset(): #NOTE: ignore issues like #1647654 + # local time = utc time + utc offset + if time.daylight and time.localtime().tm_isdst > 0: + return -time.altzone # seconds + return -time.timezone + +def asn1time(cert_time): + # Some versions of OpenSSL ignore seconds, see #18207 + # 0.9.8.i + if ssl._OPENSSL_API_VERSION == (0, 9, 8, 9, 15): + fmt = "%b %d %H:%M:%S %Y GMT" + dt = datetime.datetime.strptime(cert_time, fmt) + dt = dt.replace(second=0) + cert_time = dt.strftime(fmt) + # %d adds leading zero but ASN1_TIME_print() uses leading space + if cert_time[4] == "0": + cert_time = cert_time[:4] + " " + cert_time[5:] + + return cert_time + +# Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2 +def skip_if_broken_ubuntu_ssl(func): + if hasattr(ssl, 'PROTOCOL_SSLv2'): + @functools.wraps(func) + def f(*args, **kwargs): + try: + ssl.SSLContext(ssl.PROTOCOL_SSLv2) + except ssl.SSLError: + if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and + platform.linux_distribution() == ('debian', 'squeeze/sid', '')): + raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour") + return func(*args, **kwargs) + return f + else: + return func + +def skip_if_openssl_cnf_minprotocol_gt_tls1(func): + """Skip a test if the OpenSSL config MinProtocol is > TLSv1. + + OS distros with an /etc/ssl/openssl.cnf and MinProtocol set often do so to + require TLSv1.2 or higher (Debian Buster). Some of our tests for older + protocol versions will fail under such a config. + + Alternative workaround: Run this test in a process with + OPENSSL_CONF=/dev/null in the environment. + """ + @functools.wraps(func) + def f(*args, **kwargs): + openssl_cnf = os.environ.get("OPENSSL_CONF", "/etc/ssl/openssl.cnf") + try: + with open(openssl_cnf, "r") as config: + for line in config: + match = re.match(r"MinProtocol\s*=\s*(TLSv\d+\S*)", line) + if match: + tls_ver = match.group(1) + if tls_ver > "TLSv1": + raise unittest.SkipTest( + "%s has MinProtocol = %s which is > TLSv1." % + (openssl_cnf, tls_ver)) + except (EnvironmentError, UnicodeDecodeError) as err: + # no config file found, etc. + if support.verbose: + sys.stdout.write("\n Could not scan %s for MinProtocol: %s\n" + % (openssl_cnf, err)) + return func(*args, **kwargs) + return f + + +needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test") + + +def test_wrap_socket(sock, ssl_version=ssl.PROTOCOL_TLS, *, + cert_reqs=ssl.CERT_NONE, ca_certs=None, + ciphers=None, certfile=None, keyfile=None, + **kwargs): + context = ssl.SSLContext(ssl_version) + if cert_reqs is not None: + context.verify_mode = cert_reqs + if ca_certs is not None: + context.load_verify_locations(ca_certs) + if certfile is not None or keyfile is not None: + context.load_cert_chain(certfile, keyfile) + if ciphers is not None: + context.set_ciphers(ciphers) + return context.wrap_socket(sock, **kwargs) + +class BasicSocketTests(unittest.TestCase): + + def test_constants(self): + ssl.CERT_NONE + ssl.CERT_OPTIONAL + ssl.CERT_REQUIRED + ssl.OP_CIPHER_SERVER_PREFERENCE + ssl.OP_SINGLE_DH_USE + if ssl.HAS_ECDH: + ssl.OP_SINGLE_ECDH_USE + if ssl.OPENSSL_VERSION_INFO >= (1, 0): + ssl.OP_NO_COMPRESSION + self.assertIn(ssl.HAS_SNI, {True, False}) + self.assertIn(ssl.HAS_ECDH, {True, False}) + ssl.OP_NO_SSLv2 + ssl.OP_NO_SSLv3 + ssl.OP_NO_TLSv1 + ssl.OP_NO_TLSv1_3 + if ssl.OPENSSL_VERSION_INFO >= (1, 0, 1): + ssl.OP_NO_TLSv1_1 + ssl.OP_NO_TLSv1_2 + + def test_str_for_enums(self): + # Make sure that the PROTOCOL_* constants have enum-like string + # reprs. + proto = ssl.PROTOCOL_TLS + self.assertEqual(str(proto), '_SSLMethod.PROTOCOL_TLS') + ctx = ssl.SSLContext(proto) + self.assertIs(ctx.protocol, proto) + + def test_random(self): + v = ssl.RAND_status() + if support.verbose: + sys.stdout.write("\n RAND_status is %d (%s)\n" + % (v, (v and "sufficient randomness") or + "insufficient randomness")) + + data, is_cryptographic = ssl.RAND_pseudo_bytes(16) + self.assertEqual(len(data), 16) + self.assertEqual(is_cryptographic, v == 1) + if v: + data = ssl.RAND_bytes(16) + self.assertEqual(len(data), 16) + else: + self.assertRaises(ssl.SSLError, ssl.RAND_bytes, 16) + + # negative num is invalid + self.assertRaises(ValueError, ssl.RAND_bytes, -5) + self.assertRaises(ValueError, ssl.RAND_pseudo_bytes, -5) + + if hasattr(ssl, 'RAND_egd'): + self.assertRaises(TypeError, ssl.RAND_egd, 1) + self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1) + ssl.RAND_add("this is a random string", 75.0) + ssl.RAND_add(b"this is a random bytes object", 75.0) + ssl.RAND_add(bytearray(b"this is a random bytearray object"), 75.0) + + @unittest.skipUnless(os.name == 'posix', 'requires posix') + def test_random_fork(self): + status = ssl.RAND_status() + if not status: + self.fail("OpenSSL's PRNG has insufficient randomness") + + rfd, wfd = os.pipe() + pid = os.fork() + if pid == 0: + try: + os.close(rfd) + child_random = ssl.RAND_pseudo_bytes(16)[0] + self.assertEqual(len(child_random), 16) + os.write(wfd, child_random) + os.close(wfd) + except BaseException: + os._exit(1) + else: + os._exit(0) + else: + os.close(wfd) + self.addCleanup(os.close, rfd) + _, status = os.waitpid(pid, 0) + self.assertEqual(status, 0) + + child_random = os.read(rfd, 16) + self.assertEqual(len(child_random), 16) + parent_random = ssl.RAND_pseudo_bytes(16)[0] + self.assertEqual(len(parent_random), 16) + + self.assertNotEqual(child_random, parent_random) + + maxDiff = None + + def test_parse_cert(self): + # note that this uses an 'unofficial' function in _ssl.c, + # provided solely for this test, to exercise the certificate + # parsing code + p = ssl._ssl._test_decode_cert(CERTFILE) + if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + self.assertEqual(p['issuer'], + ((('countryName', 'XY'),), + (('localityName', 'Castle Anthrax'),), + (('organizationName', 'Python Software Foundation'),), + (('commonName', 'localhost'),)) + ) + # Note the next three asserts will fail if the keys are regenerated + self.assertEqual(p['notAfter'], asn1time('Aug 26 14:23:15 2028 GMT')) + self.assertEqual(p['notBefore'], asn1time('Aug 29 14:23:15 2018 GMT')) + self.assertEqual(p['serialNumber'], '98A7CF88C74A32ED') + self.assertEqual(p['subject'], + ((('countryName', 'XY'),), + (('localityName', 'Castle Anthrax'),), + (('organizationName', 'Python Software Foundation'),), + (('commonName', 'localhost'),)) + ) + self.assertEqual(p['subjectAltName'], (('DNS', 'localhost'),)) + # Issue #13034: the subjectAltName in some certificates + # (notably projects.developer.nokia.com:443) wasn't parsed + p = ssl._ssl._test_decode_cert(NOKIACERT) + if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + self.assertEqual(p['subjectAltName'], + (('DNS', 'projects.developer.nokia.com'), + ('DNS', 'projects.forum.nokia.com')) + ) + # extra OCSP and AIA fields + self.assertEqual(p['OCSP'], ('http://ocsp.verisign.com',)) + self.assertEqual(p['caIssuers'], + ('http://SVRIntl-G3-aia.verisign.com/SVRIntlG3.cer',)) + self.assertEqual(p['crlDistributionPoints'], + ('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',)) + + def test_parse_cert_CVE_2019_5010(self): + p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP) + if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + self.assertEqual( + p, + { + 'issuer': ( + (('countryName', 'UK'),), (('commonName', 'cody-ca'),)), + 'notAfter': 'Jun 14 18:00:58 2028 GMT', + 'notBefore': 'Jun 18 18:00:58 2018 GMT', + 'serialNumber': '02', + 'subject': ((('countryName', 'UK'),), + (('commonName', + 'codenomicon-vm-2.test.lal.cisco.com'),)), + 'subjectAltName': ( + ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),), + 'version': 3 + } + ) + + def test_parse_cert_CVE_2013_4238(self): + p = ssl._ssl._test_decode_cert(NULLBYTECERT) + if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + subject = ((('countryName', 'US'),), + (('stateOrProvinceName', 'Oregon'),), + (('localityName', 'Beaverton'),), + (('organizationName', 'Python Software Foundation'),), + (('organizationalUnitName', 'Python Core Development'),), + (('commonName', 'null.python.org\x00example.org'),), + (('emailAddress', 'python-dev at python.org'),)) + self.assertEqual(p['subject'], subject) + self.assertEqual(p['issuer'], subject) + if ssl._OPENSSL_API_VERSION >= (0, 9, 8): + san = (('DNS', 'altnull.python.org\x00example.com'), + ('email', 'null at python.org\x00user at example.org'), + ('URI', 'http://null.python.org\x00http://example.org'), + ('IP Address', '192.0.2.1'), + ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) + else: + # OpenSSL 0.9.7 doesn't support IPv6 addresses in subjectAltName + san = (('DNS', 'altnull.python.org\x00example.com'), + ('email', 'null at python.org\x00user at example.org'), + ('URI', 'http://null.python.org\x00http://example.org'), + ('IP Address', '192.0.2.1'), + ('IP Address', '')) + + self.assertEqual(p['subjectAltName'], san) + + def test_parse_all_sans(self): + p = ssl._ssl._test_decode_cert(ALLSANFILE) + self.assertEqual(p['subjectAltName'], + ( + ('DNS', 'allsans'), + ('othername', ''), + ('othername', ''), + ('email', 'user at example.org'), + ('DNS', 'www.example.org'), + ('DirName', + ((('countryName', 'XY'),), + (('localityName', 'Castle Anthrax'),), + (('organizationName', 'Python Software Foundation'),), + (('commonName', 'dirname example'),))), + ('URI', 'https://www.python.org/'), + ('IP Address', '127.0.0.1'), + ('IP Address', '0:0:0:0:0:0:0:1\n'), + ('Registered ID', '1.2.3.4.5') + ) + ) + + def test_DER_to_PEM(self): + with open(CAFILE_CACERT, 'r') as f: + pem = f.read() + d1 = ssl.PEM_cert_to_DER_cert(pem) + p2 = ssl.DER_cert_to_PEM_cert(d1) + d2 = ssl.PEM_cert_to_DER_cert(p2) + self.assertEqual(d1, d2) + if not p2.startswith(ssl.PEM_HEADER + '\n'): + self.fail("DER-to-PEM didn't include correct header:\n%r\n" % p2) + if not p2.endswith('\n' + ssl.PEM_FOOTER + '\n'): + self.fail("DER-to-PEM didn't include correct footer:\n%r\n" % p2) + + def test_openssl_version(self): + n = ssl.OPENSSL_VERSION_NUMBER + t = ssl.OPENSSL_VERSION_INFO + s = ssl.OPENSSL_VERSION + self.assertIsInstance(n, int) + self.assertIsInstance(t, tuple) + self.assertIsInstance(s, str) + # Some sanity checks follow + # >= 0.9 + self.assertGreaterEqual(n, 0x900000) + # < 3.0 + self.assertLess(n, 0x30000000) + major, minor, fix, patch, status = t + self.assertGreaterEqual(major, 0) + self.assertLess(major, 3) + self.assertGreaterEqual(minor, 0) + self.assertLess(minor, 256) + self.assertGreaterEqual(fix, 0) + self.assertLess(fix, 256) + self.assertGreaterEqual(patch, 0) + self.assertLessEqual(patch, 63) + self.assertGreaterEqual(status, 0) + self.assertLessEqual(status, 15) + # Version string as returned by {Open,Libre}SSL, the format might change + if IS_LIBRESSL: + self.assertTrue(s.startswith("LibreSSL {:d}".format(major)), + (s, t, hex(n))) + else: + self.assertTrue(s.startswith("OpenSSL {:d}.{:d}.{:d}".format(major, minor, fix)), + (s, t, hex(n))) + + @support.cpython_only + def test_refcycle(self): + # Issue #7943: an SSL object doesn't create reference cycles with + # itself. + s = socket.socket(socket.AF_INET) + ss = test_wrap_socket(s) + wr = weakref.ref(ss) + with support.check_warnings(("", ResourceWarning)): + del ss + self.assertEqual(wr(), None) + + def test_wrapped_unconnected(self): + # Methods on an unconnected SSLSocket propagate the original + # OSError raise by the underlying socket object. + s = socket.socket(socket.AF_INET) + with test_wrap_socket(s) as ss: + self.assertRaises(OSError, ss.recv, 1) + self.assertRaises(OSError, ss.recv_into, bytearray(b'x')) + self.assertRaises(OSError, ss.recvfrom, 1) + self.assertRaises(OSError, ss.recvfrom_into, bytearray(b'x'), 1) + self.assertRaises(OSError, ss.send, b'x') + self.assertRaises(OSError, ss.sendto, b'x', ('0.0.0.0', 0)) + self.assertRaises(NotImplementedError, ss.dup) + self.assertRaises(NotImplementedError, ss.sendmsg, + [b'x'], (), 0, ('0.0.0.0', 0)) + self.assertRaises(NotImplementedError, ss.recvmsg, 100) + self.assertRaises(NotImplementedError, ss.recvmsg_into, + [bytearray(100)]) + + def test_timeout(self): + # Issue #8524: when creating an SSL socket, the timeout of the + # original socket should be retained. + for timeout in (None, 0.0, 5.0): + s = socket.socket(socket.AF_INET) + s.settimeout(timeout) + with test_wrap_socket(s) as ss: + self.assertEqual(timeout, ss.gettimeout()) + + def test_errors_sslwrap(self): + sock = socket.socket() + self.assertRaisesRegex(ValueError, + "certfile must be specified", + ssl.wrap_socket, sock, keyfile=CERTFILE) + self.assertRaisesRegex(ValueError, + "certfile must be specified for server-side operations", + ssl.wrap_socket, sock, server_side=True) + self.assertRaisesRegex(ValueError, + "certfile must be specified for server-side operations", + ssl.wrap_socket, sock, server_side=True, certfile="") + with ssl.wrap_socket(sock, server_side=True, certfile=CERTFILE) as s: + self.assertRaisesRegex(ValueError, "can't connect in server-side mode", + s.connect, (HOST, 8080)) + with self.assertRaises(OSError) as cm: + with socket.socket() as sock: + ssl.wrap_socket(sock, certfile=NONEXISTINGCERT) + self.assertEqual(cm.exception.errno, errno.ENOENT) + with self.assertRaises(OSError) as cm: + with socket.socket() as sock: + ssl.wrap_socket(sock, + certfile=CERTFILE, keyfile=NONEXISTINGCERT) + self.assertEqual(cm.exception.errno, errno.ENOENT) + with self.assertRaises(OSError) as cm: + with socket.socket() as sock: + ssl.wrap_socket(sock, + certfile=NONEXISTINGCERT, keyfile=NONEXISTINGCERT) + self.assertEqual(cm.exception.errno, errno.ENOENT) + + def bad_cert_test(self, certfile): + """Check that trying to use the given client certificate fails""" + certfile = os.path.join(os.path.dirname(__file__) or os.curdir, + certfile) + sock = socket.socket() + self.addCleanup(sock.close) + with self.assertRaises(ssl.SSLError): + test_wrap_socket(sock, + certfile=certfile, + ssl_version=ssl.PROTOCOL_TLSv1) + + def test_empty_cert(self): + """Wrapping with an empty cert file""" + self.bad_cert_test("nullcert.pem") + + def test_malformed_cert(self): + """Wrapping with a badly formatted certificate (syntax error)""" + self.bad_cert_test("badcert.pem") + + def test_malformed_key(self): + """Wrapping with a badly formatted key (syntax error)""" + self.bad_cert_test("badkey.pem") + + def test_match_hostname(self): + def ok(cert, hostname): + ssl.match_hostname(cert, hostname) + def fail(cert, hostname): + self.assertRaises(ssl.CertificateError, + ssl.match_hostname, cert, hostname) + + # -- Hostname matching -- + + cert = {'subject': ((('commonName', 'example.com'),),)} + ok(cert, 'example.com') + ok(cert, 'ExAmple.cOm') + fail(cert, 'www.example.com') + fail(cert, '.example.com') + fail(cert, 'example.org') + fail(cert, 'exampleXcom') + + cert = {'subject': ((('commonName', '*.a.com'),),)} + ok(cert, 'foo.a.com') + fail(cert, 'bar.foo.a.com') + fail(cert, 'a.com') + fail(cert, 'Xa.com') + fail(cert, '.a.com') + + # only match one left-most wildcard + cert = {'subject': ((('commonName', 'f*.com'),),)} + ok(cert, 'foo.com') + ok(cert, 'f.com') + fail(cert, 'bar.com') + fail(cert, 'foo.a.com') + fail(cert, 'bar.foo.com') + + # NULL bytes are bad, CVE-2013-4073 + cert = {'subject': ((('commonName', + 'null.python.org\x00example.org'),),)} + ok(cert, 'null.python.org\x00example.org') # or raise an error? + fail(cert, 'example.org') + fail(cert, 'null.python.org') + + # error cases with wildcards + cert = {'subject': ((('commonName', '*.*.a.com'),),)} + fail(cert, 'bar.foo.a.com') + fail(cert, 'a.com') + fail(cert, 'Xa.com') + fail(cert, '.a.com') + + cert = {'subject': ((('commonName', 'a.*.com'),),)} + fail(cert, 'a.foo.com') + fail(cert, 'a..com') + fail(cert, 'a.com') + + # wildcard doesn't match IDNA prefix 'xn--' + idna = 'püthon.python.org'.encode("idna").decode("ascii") + cert = {'subject': ((('commonName', idna),),)} + ok(cert, idna) + cert = {'subject': ((('commonName', 'x*.python.org'),),)} + fail(cert, idna) + cert = {'subject': ((('commonName', 'xn--p*.python.org'),),)} + fail(cert, idna) + + # wildcard in first fragment and IDNA A-labels in sequent fragments + # are supported. + idna = 'www*.pythön.org'.encode("idna").decode("ascii") + cert = {'subject': ((('commonName', idna),),)} + ok(cert, 'www.pythön.org'.encode("idna").decode("ascii")) + ok(cert, 'www1.pythön.org'.encode("idna").decode("ascii")) + fail(cert, 'ftp.pythön.org'.encode("idna").decode("ascii")) + fail(cert, 'pythön.org'.encode("idna").decode("ascii")) + + # Slightly fake real-world example + cert = {'notAfter': 'Jun 26 21:41:46 2011 GMT', + 'subject': ((('commonName', 'linuxfrz.org'),),), + 'subjectAltName': (('DNS', 'linuxfr.org'), + ('DNS', 'linuxfr.com'), + ('othername', ''))} + ok(cert, 'linuxfr.org') + ok(cert, 'linuxfr.com') + # Not a "DNS" entry + fail(cert, '') + # When there is a subjectAltName, commonName isn't used + fail(cert, 'linuxfrz.org') + + # A pristine real-world example + cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT', + 'subject': ((('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'Mountain View'),), + (('organizationName', 'Google Inc'),), + (('commonName', 'mail.google.com'),))} + ok(cert, 'mail.google.com') + fail(cert, 'gmail.com') + # Only commonName is considered + fail(cert, 'California') + + # -- IPv4 matching -- + cert = {'subject': ((('commonName', 'example.com'),),), + 'subjectAltName': (('DNS', 'example.com'), + ('IP Address', '10.11.12.13'), + ('IP Address', '14.15.16.17'))} + ok(cert, '10.11.12.13') + ok(cert, '14.15.16.17') + fail(cert, '14.15.16.18') + fail(cert, 'example.net') + + # -- IPv6 matching -- + cert = {'subject': ((('commonName', 'example.com'),),), + 'subjectAltName': (('DNS', 'example.com'), + ('IP Address', '2001:0:0:0:0:0:0:CAFE\n'), + ('IP Address', '2003:0:0:0:0:0:0:BABA\n'))} + ok(cert, '2001::cafe') + ok(cert, '2003::baba') + fail(cert, '2003::bebe') + fail(cert, 'example.net') + + # -- Miscellaneous -- + + # Neither commonName nor subjectAltName + cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT', + 'subject': ((('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'Mountain View'),), + (('organizationName', 'Google Inc'),))} + fail(cert, 'mail.google.com') + + # No DNS entry in subjectAltName but a commonName + cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT', + 'subject': ((('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'Mountain View'),), + (('commonName', 'mail.google.com'),)), + 'subjectAltName': (('othername', 'blabla'), )} + ok(cert, 'mail.google.com') + + # No DNS entry subjectAltName and no commonName + cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT', + 'subject': ((('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'Mountain View'),), + (('organizationName', 'Google Inc'),)), + 'subjectAltName': (('othername', 'blabla'),)} + fail(cert, 'google.com') + + # Empty cert / no cert + self.assertRaises(ValueError, ssl.match_hostname, None, 'example.com') + self.assertRaises(ValueError, ssl.match_hostname, {}, 'example.com') + + # Issue #17980: avoid denials of service by refusing more than one + # wildcard per fragment. + cert = {'subject': ((('commonName', 'a*b.com'),),)} + ok(cert, 'axxb.com') + cert = {'subject': ((('commonName', 'a*b.co*'),),)} + fail(cert, 'axxb.com') + cert = {'subject': ((('commonName', 'a*b*.com'),),)} + with self.assertRaises(ssl.CertificateError) as cm: + ssl.match_hostname(cert, 'axxbxxc.com') + self.assertIn("too many wildcards", str(cm.exception)) + + def test_server_side(self): + # server_hostname doesn't work for server sockets + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + with socket.socket() as sock: + self.assertRaises(ValueError, ctx.wrap_socket, sock, True, + server_hostname="some.hostname") + + def test_unknown_channel_binding(self): + # should raise ValueError for unknown type + s = socket.socket(socket.AF_INET) + s.bind(('127.0.0.1', 0)) + s.listen() + c = socket.socket(socket.AF_INET) + c.connect(s.getsockname()) + with test_wrap_socket(c, do_handshake_on_connect=False) as ss: + with self.assertRaises(ValueError): + ss.get_channel_binding("unknown-type") + s.close() + + @unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES, + "'tls-unique' channel binding not available") + def test_tls_unique_channel_binding(self): + # unconnected should return None for known type + s = socket.socket(socket.AF_INET) + with test_wrap_socket(s) as ss: + self.assertIsNone(ss.get_channel_binding("tls-unique")) + # the same for server-side + s = socket.socket(socket.AF_INET) + with test_wrap_socket(s, server_side=True, certfile=CERTFILE) as ss: + self.assertIsNone(ss.get_channel_binding("tls-unique")) + + def test_dealloc_warn(self): + ss = test_wrap_socket(socket.socket(socket.AF_INET)) + r = repr(ss) + with self.assertWarns(ResourceWarning) as cm: + ss = None + support.gc_collect() + self.assertIn(r, str(cm.warning.args[0])) + + def test_get_default_verify_paths(self): + paths = ssl.get_default_verify_paths() + self.assertEqual(len(paths), 6) + self.assertIsInstance(paths, ssl.DefaultVerifyPaths) + + with support.EnvironmentVarGuard() as env: + env["SSL_CERT_DIR"] = CAPATH + env["SSL_CERT_FILE"] = CERTFILE + paths = ssl.get_default_verify_paths() + self.assertEqual(paths.cafile, CERTFILE) + self.assertEqual(paths.capath, CAPATH) + + @unittest.skipUnless(sys.platform == "win32", "Windows specific") + def test_enum_certificates(self): + self.assertTrue(ssl.enum_certificates("CA")) + self.assertTrue(ssl.enum_certificates("ROOT")) + + self.assertRaises(TypeError, ssl.enum_certificates) + self.assertRaises(WindowsError, ssl.enum_certificates, "") + + trust_oids = set() + for storename in ("CA", "ROOT"): + store = ssl.enum_certificates(storename) + self.assertIsInstance(store, list) + for element in store: + self.assertIsInstance(element, tuple) + self.assertEqual(len(element), 3) + cert, enc, trust = element + self.assertIsInstance(cert, bytes) + self.assertIn(enc, {"x509_asn", "pkcs_7_asn"}) + self.assertIsInstance(trust, (set, bool)) + if isinstance(trust, set): + trust_oids.update(trust) + + serverAuth = "1.3.6.1.5.5.7.3.1" + self.assertIn(serverAuth, trust_oids) + + @unittest.skipUnless(sys.platform == "win32", "Windows specific") + def test_enum_crls(self): + self.assertTrue(ssl.enum_crls("CA")) + self.assertRaises(TypeError, ssl.enum_crls) + self.assertRaises(WindowsError, ssl.enum_crls, "") + + crls = ssl.enum_crls("CA") + self.assertIsInstance(crls, list) + for element in crls: + self.assertIsInstance(element, tuple) + self.assertEqual(len(element), 2) + self.assertIsInstance(element[0], bytes) + self.assertIn(element[1], {"x509_asn", "pkcs_7_asn"}) + + + def test_asn1object(self): + expected = (129, 'serverAuth', 'TLS Web Server Authentication', + '1.3.6.1.5.5.7.3.1') + + val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1') + self.assertEqual(val, expected) + self.assertEqual(val.nid, 129) + self.assertEqual(val.shortname, 'serverAuth') + self.assertEqual(val.longname, 'TLS Web Server Authentication') + self.assertEqual(val.oid, '1.3.6.1.5.5.7.3.1') + self.assertIsInstance(val, ssl._ASN1Object) + self.assertRaises(ValueError, ssl._ASN1Object, 'serverAuth') + + val = ssl._ASN1Object.fromnid(129) + self.assertEqual(val, expected) + self.assertIsInstance(val, ssl._ASN1Object) + self.assertRaises(ValueError, ssl._ASN1Object.fromnid, -1) + with self.assertRaisesRegex(ValueError, "unknown NID 100000"): + ssl._ASN1Object.fromnid(100000) + for i in range(1000): + try: + obj = ssl._ASN1Object.fromnid(i) + except ValueError: + pass + else: + self.assertIsInstance(obj.nid, int) + self.assertIsInstance(obj.shortname, str) + self.assertIsInstance(obj.longname, str) + self.assertIsInstance(obj.oid, (str, type(None))) + + val = ssl._ASN1Object.fromname('TLS Web Server Authentication') + self.assertEqual(val, expected) + self.assertIsInstance(val, ssl._ASN1Object) + self.assertEqual(ssl._ASN1Object.fromname('serverAuth'), expected) + self.assertEqual(ssl._ASN1Object.fromname('1.3.6.1.5.5.7.3.1'), + expected) + with self.assertRaisesRegex(ValueError, "unknown object 'serverauth'"): + ssl._ASN1Object.fromname('serverauth') + + def test_purpose_enum(self): + val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1') + self.assertIsInstance(ssl.Purpose.SERVER_AUTH, ssl._ASN1Object) + self.assertEqual(ssl.Purpose.SERVER_AUTH, val) + self.assertEqual(ssl.Purpose.SERVER_AUTH.nid, 129) + self.assertEqual(ssl.Purpose.SERVER_AUTH.shortname, 'serverAuth') + self.assertEqual(ssl.Purpose.SERVER_AUTH.oid, + '1.3.6.1.5.5.7.3.1') + + val = ssl._ASN1Object('1.3.6.1.5.5.7.3.2') + self.assertIsInstance(ssl.Purpose.CLIENT_AUTH, ssl._ASN1Object) + self.assertEqual(ssl.Purpose.CLIENT_AUTH, val) + self.assertEqual(ssl.Purpose.CLIENT_AUTH.nid, 130) + self.assertEqual(ssl.Purpose.CLIENT_AUTH.shortname, 'clientAuth') + self.assertEqual(ssl.Purpose.CLIENT_AUTH.oid, + '1.3.6.1.5.5.7.3.2') + + def test_unsupported_dtls(self): + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + self.addCleanup(s.close) + with self.assertRaises(NotImplementedError) as cx: + test_wrap_socket(s, cert_reqs=ssl.CERT_NONE) + self.assertEqual(str(cx.exception), "only stream sockets are supported") + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + with self.assertRaises(NotImplementedError) as cx: + ctx.wrap_socket(s) + self.assertEqual(str(cx.exception), "only stream sockets are supported") + + def cert_time_ok(self, timestring, timestamp): + self.assertEqual(ssl.cert_time_to_seconds(timestring), timestamp) + + def cert_time_fail(self, timestring): + with self.assertRaises(ValueError): + ssl.cert_time_to_seconds(timestring) + + @unittest.skipUnless(utc_offset(), + 'local time needs to be different from UTC') + def test_cert_time_to_seconds_timezone(self): + # Issue #19940: ssl.cert_time_to_seconds() returns wrong + # results if local timezone is not UTC + self.cert_time_ok("May 9 00:00:00 2007 GMT", 1178668800.0) + self.cert_time_ok("Jan 5 09:34:43 2018 GMT", 1515144883.0) + + def test_cert_time_to_seconds(self): + timestring = "Jan 5 09:34:43 2018 GMT" + ts = 1515144883.0 + self.cert_time_ok(timestring, ts) + # accept keyword parameter, assert its name + self.assertEqual(ssl.cert_time_to_seconds(cert_time=timestring), ts) + # accept both %e and %d (space or zero generated by strftime) + self.cert_time_ok("Jan 05 09:34:43 2018 GMT", ts) + # case-insensitive + self.cert_time_ok("JaN 5 09:34:43 2018 GmT", ts) + self.cert_time_fail("Jan 5 09:34 2018 GMT") # no seconds + self.cert_time_fail("Jan 5 09:34:43 2018") # no GMT + self.cert_time_fail("Jan 5 09:34:43 2018 UTC") # not GMT timezone + self.cert_time_fail("Jan 35 09:34:43 2018 GMT") # invalid day + self.cert_time_fail("Jon 5 09:34:43 2018 GMT") # invalid month + self.cert_time_fail("Jan 5 24:00:00 2018 GMT") # invalid hour + self.cert_time_fail("Jan 5 09:60:43 2018 GMT") # invalid minute + + newyear_ts = 1230768000.0 + # leap seconds + self.cert_time_ok("Dec 31 23:59:60 2008 GMT", newyear_ts) + # same timestamp + self.cert_time_ok("Jan 1 00:00:00 2009 GMT", newyear_ts) + + self.cert_time_ok("Jan 5 09:34:59 2018 GMT", 1515144899) + # allow 60th second (even if it is not a leap second) + self.cert_time_ok("Jan 5 09:34:60 2018 GMT", 1515144900) + # allow 2nd leap second for compatibility with time.strptime() + self.cert_time_ok("Jan 5 09:34:61 2018 GMT", 1515144901) + self.cert_time_fail("Jan 5 09:34:62 2018 GMT") # invalid seconds + + # no special treatment for the special value: + # 99991231235959Z (rfc 5280) + self.cert_time_ok("Dec 31 23:59:59 9999 GMT", 253402300799.0) + + @support.run_with_locale('LC_ALL', '') + def test_cert_time_to_seconds_locale(self): + # `cert_time_to_seconds()` should be locale independent + + def local_february_name(): + return time.strftime('%b', (1, 2, 3, 4, 5, 6, 0, 0, 0)) + + if local_february_name().lower() == 'feb': + self.skipTest("locale-specific month name needs to be " + "different from C locale") + + # locale-independent + self.cert_time_ok("Feb 9 00:00:00 2007 GMT", 1170979200.0) + self.cert_time_fail(local_february_name() + " 9 00:00:00 2007 GMT") + + def test_connect_ex_error(self): + server = socket.socket(socket.AF_INET) + self.addCleanup(server.close) + port = support.bind_port(server) # Reserve port but don't listen + s = test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED) + self.addCleanup(s.close) + rc = s.connect_ex((HOST, port)) + # Issue #19919: Windows machines or VMs hosted on Windows + # machines sometimes return EWOULDBLOCK. + errors = ( + errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT, + errno.EWOULDBLOCK, + ) + self.assertIn(rc, errors) + + +class ContextTests(unittest.TestCase): + + @skip_if_broken_ubuntu_ssl + def test_constructor(self): + for protocol in PROTOCOLS: + ssl.SSLContext(protocol) + ctx = ssl.SSLContext() + self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLS) + self.assertRaises(ValueError, ssl.SSLContext, -1) + self.assertRaises(ValueError, ssl.SSLContext, 42) + + @skip_if_broken_ubuntu_ssl + def test_protocol(self): + for proto in PROTOCOLS: + ctx = ssl.SSLContext(proto) + self.assertEqual(ctx.protocol, proto) + + def test_ciphers(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.set_ciphers("ALL") + ctx.set_ciphers("DEFAULT") + with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"): + ctx.set_ciphers("^$:,;?*'dorothyx") + + @unittest.skipIf(ssl.OPENSSL_VERSION_INFO < (1, 0, 2, 0, 0), 'OpenSSL too old') + def test_get_ciphers(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.set_ciphers('AESGCM') + names = set(d['name'] for d in ctx.get_ciphers()) + self.assertIn('AES256-GCM-SHA384', names) + self.assertIn('AES128-GCM-SHA256', names) + + @skip_if_broken_ubuntu_ssl + def test_options(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + # OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3 is the default value + default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3) + # SSLContext also enables these by default + default |= (OP_NO_COMPRESSION | OP_CIPHER_SERVER_PREFERENCE | + OP_SINGLE_DH_USE | OP_SINGLE_ECDH_USE | + OP_ENABLE_MIDDLEBOX_COMPAT) + self.assertEqual(default, ctx.options) + ctx.options |= ssl.OP_NO_TLSv1 + self.assertEqual(default | ssl.OP_NO_TLSv1, ctx.options) + if can_clear_options(): + ctx.options = (ctx.options & ~ssl.OP_NO_TLSv1) + self.assertEqual(default, ctx.options) + ctx.options = 0 + # Ubuntu has OP_NO_SSLv3 forced on by default + self.assertEqual(0, ctx.options & ~ssl.OP_NO_SSLv3) + else: + with self.assertRaises(ValueError): + ctx.options = 0 + + def test_verify_mode(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + # Default value + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + ctx.verify_mode = ssl.CERT_OPTIONAL + self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL) + ctx.verify_mode = ssl.CERT_REQUIRED + self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) + ctx.verify_mode = ssl.CERT_NONE + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + with self.assertRaises(TypeError): + ctx.verify_mode = None + with self.assertRaises(ValueError): + ctx.verify_mode = 42 + + @unittest.skipUnless(have_verify_flags(), + "verify_flags need OpenSSL > 0.9.8") + def test_verify_flags(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + # default value + tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0) + self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT | tf) + ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF + self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_LEAF) + ctx.verify_flags = ssl.VERIFY_CRL_CHECK_CHAIN + self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_CHAIN) + ctx.verify_flags = ssl.VERIFY_DEFAULT + self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT) + # supports any value + ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT + self.assertEqual(ctx.verify_flags, + ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT) + with self.assertRaises(TypeError): + ctx.verify_flags = None + + def test_load_cert_chain(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + # Combined key and cert in a single file + ctx.load_cert_chain(CERTFILE, keyfile=None) + ctx.load_cert_chain(CERTFILE, keyfile=CERTFILE) + self.assertRaises(TypeError, ctx.load_cert_chain, keyfile=CERTFILE) + with self.assertRaises(OSError) as cm: + ctx.load_cert_chain(NONEXISTINGCERT) + self.assertEqual(cm.exception.errno, errno.ENOENT) + with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + ctx.load_cert_chain(BADCERT) + with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + ctx.load_cert_chain(EMPTYCERT) + # Separate key and cert + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_cert_chain(ONLYCERT, ONLYKEY) + ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY) + ctx.load_cert_chain(certfile=BYTES_ONLYCERT, keyfile=BYTES_ONLYKEY) + with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + ctx.load_cert_chain(ONLYCERT) + with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + ctx.load_cert_chain(ONLYKEY) + with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + ctx.load_cert_chain(certfile=ONLYKEY, keyfile=ONLYCERT) + # Mismatching key and cert + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with self.assertRaisesRegex(ssl.SSLError, "key values mismatch"): + ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY) + # Password protected key and cert + ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) + ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode()) + ctx.load_cert_chain(CERTFILE_PROTECTED, + password=bytearray(KEY_PASSWORD.encode())) + ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD) + ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD.encode()) + ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, + bytearray(KEY_PASSWORD.encode())) + with self.assertRaisesRegex(TypeError, "should be a string"): + ctx.load_cert_chain(CERTFILE_PROTECTED, password=True) + with self.assertRaises(ssl.SSLError): + ctx.load_cert_chain(CERTFILE_PROTECTED, password="badpass") + with self.assertRaisesRegex(ValueError, "cannot be longer"): + # openssl has a fixed limit on the password buffer. + # PEM_BUFSIZE is generally set to 1kb. + # Return a string larger than this. + ctx.load_cert_chain(CERTFILE_PROTECTED, password=b'a' * 102400) + # Password callback + def getpass_unicode(): + return KEY_PASSWORD + def getpass_bytes(): + return KEY_PASSWORD.encode() + def getpass_bytearray(): + return bytearray(KEY_PASSWORD.encode()) + def getpass_badpass(): + return "badpass" + def getpass_huge(): + return b'a' * (1024 * 1024) + def getpass_bad_type(): + return 9 + def getpass_exception(): + raise Exception('getpass error') + class GetPassCallable: + def __call__(self): + return KEY_PASSWORD + def getpass(self): + return KEY_PASSWORD + ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_unicode) + ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytes) + ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytearray) + ctx.load_cert_chain(CERTFILE_PROTECTED, password=GetPassCallable()) + ctx.load_cert_chain(CERTFILE_PROTECTED, + password=GetPassCallable().getpass) + with self.assertRaises(ssl.SSLError): + ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_badpass) + with self.assertRaisesRegex(ValueError, "cannot be longer"): + ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_huge) + with self.assertRaisesRegex(TypeError, "must return a string"): + ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bad_type) + with self.assertRaisesRegex(Exception, "getpass error"): + ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_exception) + # Make sure the password function isn't called if it isn't needed + ctx.load_cert_chain(CERTFILE, password=getpass_exception) + + def test_load_verify_locations(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_verify_locations(CERTFILE) + ctx.load_verify_locations(cafile=CERTFILE, capath=None) + ctx.load_verify_locations(BYTES_CERTFILE) + ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None) + self.assertRaises(TypeError, ctx.load_verify_locations) + self.assertRaises(TypeError, ctx.load_verify_locations, None, None, None) + with self.assertRaises(OSError) as cm: + ctx.load_verify_locations(NONEXISTINGCERT) + self.assertEqual(cm.exception.errno, errno.ENOENT) + with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + ctx.load_verify_locations(BADCERT) + ctx.load_verify_locations(CERTFILE, CAPATH) + ctx.load_verify_locations(CERTFILE, capath=BYTES_CAPATH) + + # Issue #10989: crash if the second argument type is invalid + self.assertRaises(TypeError, ctx.load_verify_locations, None, True) + + def test_load_verify_cadata(self): + # test cadata + with open(CAFILE_CACERT) as f: + cacert_pem = f.read() + cacert_der = ssl.PEM_cert_to_DER_cert(cacert_pem) + with open(CAFILE_NEURONIO) as f: + neuronio_pem = f.read() + neuronio_der = ssl.PEM_cert_to_DER_cert(neuronio_pem) + + # test PEM + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 0) + ctx.load_verify_locations(cadata=cacert_pem) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 1) + ctx.load_verify_locations(cadata=neuronio_pem) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) + # cert already in hash table + ctx.load_verify_locations(cadata=neuronio_pem) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) + + # combined + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + combined = "\n".join((cacert_pem, neuronio_pem)) + ctx.load_verify_locations(cadata=combined) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) + + # with junk around the certs + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + combined = ["head", cacert_pem, "other", neuronio_pem, "again", + neuronio_pem, "tail"] + ctx.load_verify_locations(cadata="\n".join(combined)) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) + + # test DER + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_verify_locations(cadata=cacert_der) + ctx.load_verify_locations(cadata=neuronio_der) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) + # cert already in hash table + ctx.load_verify_locations(cadata=cacert_der) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) + + # combined + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + combined = b"".join((cacert_der, neuronio_der)) + ctx.load_verify_locations(cadata=combined) + self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2) + + # error cases + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + self.assertRaises(TypeError, ctx.load_verify_locations, cadata=object) + + with self.assertRaisesRegex(ssl.SSLError, "no start line"): + ctx.load_verify_locations(cadata="broken") + with self.assertRaisesRegex(ssl.SSLError, "not enough data"): + ctx.load_verify_locations(cadata=b"broken") + + + def test_load_dh_params(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_dh_params(DHFILE) + if os.name != 'nt': + ctx.load_dh_params(BYTES_DHFILE) + self.assertRaises(TypeError, ctx.load_dh_params) + self.assertRaises(TypeError, ctx.load_dh_params, None) + with self.assertRaises(FileNotFoundError) as cm: + ctx.load_dh_params(NONEXISTINGCERT) + self.assertEqual(cm.exception.errno, errno.ENOENT) + with self.assertRaises(ssl.SSLError) as cm: + ctx.load_dh_params(CERTFILE) + + @skip_if_broken_ubuntu_ssl + def test_session_stats(self): + for proto in PROTOCOLS: + ctx = ssl.SSLContext(proto) + self.assertEqual(ctx.session_stats(), { + 'number': 0, + 'connect': 0, + 'connect_good': 0, + 'connect_renegotiate': 0, + 'accept': 0, + 'accept_good': 0, + 'accept_renegotiate': 0, + 'hits': 0, + 'misses': 0, + 'timeouts': 0, + 'cache_full': 0, + }) + + def test_set_default_verify_paths(self): + # There's not much we can do to test that it acts as expected, + # so just check it doesn't crash or raise an exception. + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.set_default_verify_paths() + + @unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build") + def test_set_ecdh_curve(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.set_ecdh_curve("prime256v1") + ctx.set_ecdh_curve(b"prime256v1") + self.assertRaises(TypeError, ctx.set_ecdh_curve) + self.assertRaises(TypeError, ctx.set_ecdh_curve, None) + self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo") + self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo") + + @needs_sni + def test_sni_callback(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + + # set_servername_callback expects a callable, or None + self.assertRaises(TypeError, ctx.set_servername_callback) + self.assertRaises(TypeError, ctx.set_servername_callback, 4) + self.assertRaises(TypeError, ctx.set_servername_callback, "") + self.assertRaises(TypeError, ctx.set_servername_callback, ctx) + + def dummycallback(sock, servername, ctx): + pass + ctx.set_servername_callback(None) + ctx.set_servername_callback(dummycallback) + + @needs_sni + def test_sni_callback_refcycle(self): + # Reference cycles through the servername callback are detected + # and cleared. + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + def dummycallback(sock, servername, ctx, cycle=ctx): + pass + ctx.set_servername_callback(dummycallback) + wr = weakref.ref(ctx) + del ctx, dummycallback + gc.collect() + self.assertIs(wr(), None) + + def test_cert_store_stats(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + self.assertEqual(ctx.cert_store_stats(), + {'x509_ca': 0, 'crl': 0, 'x509': 0}) + ctx.load_cert_chain(CERTFILE) + self.assertEqual(ctx.cert_store_stats(), + {'x509_ca': 0, 'crl': 0, 'x509': 0}) + ctx.load_verify_locations(CERTFILE) + self.assertEqual(ctx.cert_store_stats(), + {'x509_ca': 0, 'crl': 0, 'x509': 1}) + ctx.load_verify_locations(CAFILE_CACERT) + self.assertEqual(ctx.cert_store_stats(), + {'x509_ca': 1, 'crl': 0, 'x509': 2}) + + def test_get_ca_certs(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + self.assertEqual(ctx.get_ca_certs(), []) + # CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE + ctx.load_verify_locations(CERTFILE) + self.assertEqual(ctx.get_ca_certs(), []) + # but CAFILE_CACERT is a CA cert + ctx.load_verify_locations(CAFILE_CACERT) + self.assertEqual(ctx.get_ca_certs(), + [{'issuer': ((('organizationName', 'Root CA'),), + (('organizationalUnitName', 'http://www.cacert.org'),), + (('commonName', 'CA Cert Signing Authority'),), + (('emailAddress', 'support at cacert.org'),)), + 'notAfter': asn1time('Mar 29 12:29:49 2033 GMT'), + 'notBefore': asn1time('Mar 30 12:29:49 2003 GMT'), + 'serialNumber': '00', + 'crlDistributionPoints': ('https://www.cacert.org/revoke.crl',), + 'subject': ((('organizationName', 'Root CA'),), + (('organizationalUnitName', 'http://www.cacert.org'),), + (('commonName', 'CA Cert Signing Authority'),), + (('emailAddress', 'support at cacert.org'),)), + 'version': 3}]) + + with open(CAFILE_CACERT) as f: + pem = f.read() + der = ssl.PEM_cert_to_DER_cert(pem) + self.assertEqual(ctx.get_ca_certs(True), [der]) + + def test_load_default_certs(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_default_certs() + + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_default_certs(ssl.Purpose.SERVER_AUTH) + ctx.load_default_certs() + + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_default_certs(ssl.Purpose.CLIENT_AUTH) + + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + self.assertRaises(TypeError, ctx.load_default_certs, None) + self.assertRaises(TypeError, ctx.load_default_certs, 'SERVER_AUTH') + + @unittest.skipIf(sys.platform == "win32", "not-Windows specific") + @unittest.skipIf(IS_LIBRESSL, "LibreSSL doesn't support env vars") + def test_load_default_certs_env(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with support.EnvironmentVarGuard() as env: + env["SSL_CERT_DIR"] = CAPATH + env["SSL_CERT_FILE"] = CERTFILE + ctx.load_default_certs() + self.assertEqual(ctx.cert_store_stats(), {"crl": 0, "x509": 1, "x509_ca": 0}) + + @unittest.skipUnless(sys.platform == "win32", "Windows specific") + def test_load_default_certs_env_windows(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ctx.load_default_certs() + stats = ctx.cert_store_stats() + + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with support.EnvironmentVarGuard() as env: + env["SSL_CERT_DIR"] = CAPATH + env["SSL_CERT_FILE"] = CERTFILE + ctx.load_default_certs() + stats["x509"] += 1 + self.assertEqual(ctx.cert_store_stats(), stats) + + def _assert_context_options(self, ctx): + self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2) + if OP_NO_COMPRESSION != 0: + self.assertEqual(ctx.options & OP_NO_COMPRESSION, + OP_NO_COMPRESSION) + if OP_SINGLE_DH_USE != 0: + self.assertEqual(ctx.options & OP_SINGLE_DH_USE, + OP_SINGLE_DH_USE) + if OP_SINGLE_ECDH_USE != 0: + self.assertEqual(ctx.options & OP_SINGLE_ECDH_USE, + OP_SINGLE_ECDH_USE) + if OP_CIPHER_SERVER_PREFERENCE != 0: + self.assertEqual(ctx.options & OP_CIPHER_SERVER_PREFERENCE, + OP_CIPHER_SERVER_PREFERENCE) + + def test_create_default_context(self): + ctx = ssl.create_default_context() + + self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) + self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) + self.assertTrue(ctx.check_hostname) + self._assert_context_options(ctx) + + + with open(SIGNING_CA) as f: + cadata = f.read() + ctx = ssl.create_default_context(cafile=SIGNING_CA, capath=CAPATH, + cadata=cadata) + self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) + self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) + self._assert_context_options(ctx) + + ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + self._assert_context_options(ctx) + + def test__create_stdlib_context(self): + ctx = ssl._create_stdlib_context() + self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + self.assertFalse(ctx.check_hostname) + self._assert_context_options(ctx) + + ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1) + self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1) + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + self._assert_context_options(ctx) + + ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1, + cert_reqs=ssl.CERT_REQUIRED, + check_hostname=True) + self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1) + self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) + self.assertTrue(ctx.check_hostname) + self._assert_context_options(ctx) + + ctx = ssl._create_stdlib_context(purpose=ssl.Purpose.CLIENT_AUTH) + self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23) + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + self._assert_context_options(ctx) + + def test_check_hostname(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + self.assertFalse(ctx.check_hostname) + + # Requires CERT_REQUIRED or CERT_OPTIONAL + with self.assertRaises(ValueError): + ctx.check_hostname = True + ctx.verify_mode = ssl.CERT_REQUIRED + self.assertFalse(ctx.check_hostname) + ctx.check_hostname = True + self.assertTrue(ctx.check_hostname) + + ctx.verify_mode = ssl.CERT_OPTIONAL + ctx.check_hostname = True + self.assertTrue(ctx.check_hostname) + + # Cannot set CERT_NONE with check_hostname enabled + with self.assertRaises(ValueError): + ctx.verify_mode = ssl.CERT_NONE + ctx.check_hostname = False + self.assertFalse(ctx.check_hostname) + + def test_context_client_server(self): + # PROTOCOL_TLS_CLIENT has sane defaults + ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + self.assertTrue(ctx.check_hostname) + self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED) + + # PROTOCOL_TLS_SERVER has different but also sane defaults + ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + self.assertFalse(ctx.check_hostname) + self.assertEqual(ctx.verify_mode, ssl.CERT_NONE) + + +class SSLErrorTests(unittest.TestCase): + + def test_str(self): + # The str() of a SSLError doesn't include the errno + e = ssl.SSLError(1, "foo") + self.assertEqual(str(e), "foo") + self.assertEqual(e.errno, 1) + # Same for a subclass + e = ssl.SSLZeroReturnError(1, "foo") + self.assertEqual(str(e), "foo") + self.assertEqual(e.errno, 1) + + def test_lib_reason(self): + # Test the library and reason attributes + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with self.assertRaises(ssl.SSLError) as cm: + ctx.load_dh_params(CERTFILE) + self.assertEqual(cm.exception.library, 'PEM') + self.assertEqual(cm.exception.reason, 'NO_START_LINE') + s = str(cm.exception) + self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s) + + def test_subclass(self): + # Check that the appropriate SSLError subclass is raised + # (this only tests one of them) + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with socket.socket() as s: + s.bind(("127.0.0.1", 0)) + s.listen() + c = socket.socket() + c.connect(s.getsockname()) + c.setblocking(False) + with ctx.wrap_socket(c, False, do_handshake_on_connect=False) as c: + with self.assertRaises(ssl.SSLWantReadError) as cm: + c.do_handshake() + s = str(cm.exception) + self.assertTrue(s.startswith("The operation did not complete (read)"), s) + # For compatibility + self.assertEqual(cm.exception.errno, ssl.SSL_ERROR_WANT_READ) + + +class MemoryBIOTests(unittest.TestCase): + + def test_read_write(self): + bio = ssl.MemoryBIO() + bio.write(b'foo') + self.assertEqual(bio.read(), b'foo') + self.assertEqual(bio.read(), b'') + bio.write(b'foo') + bio.write(b'bar') + self.assertEqual(bio.read(), b'foobar') + self.assertEqual(bio.read(), b'') + bio.write(b'baz') + self.assertEqual(bio.read(2), b'ba') + self.assertEqual(bio.read(1), b'z') + self.assertEqual(bio.read(1), b'') + + def test_eof(self): + bio = ssl.MemoryBIO() + self.assertFalse(bio.eof) + self.assertEqual(bio.read(), b'') + self.assertFalse(bio.eof) + bio.write(b'foo') + self.assertFalse(bio.eof) + bio.write_eof() + self.assertFalse(bio.eof) + self.assertEqual(bio.read(2), b'fo') + self.assertFalse(bio.eof) + self.assertEqual(bio.read(1), b'o') + self.assertTrue(bio.eof) + self.assertEqual(bio.read(), b'') + self.assertTrue(bio.eof) + + def test_pending(self): + bio = ssl.MemoryBIO() + self.assertEqual(bio.pending, 0) + bio.write(b'foo') + self.assertEqual(bio.pending, 3) + for i in range(3): + bio.read(1) + self.assertEqual(bio.pending, 3-i-1) + for i in range(3): + bio.write(b'x') + self.assertEqual(bio.pending, i+1) + bio.read() + self.assertEqual(bio.pending, 0) + + def test_buffer_types(self): + bio = ssl.MemoryBIO() + bio.write(b'foo') + self.assertEqual(bio.read(), b'foo') + bio.write(bytearray(b'bar')) + self.assertEqual(bio.read(), b'bar') + bio.write(memoryview(b'baz')) + self.assertEqual(bio.read(), b'baz') + + def test_error_types(self): + bio = ssl.MemoryBIO() + self.assertRaises(TypeError, bio.write, 'foo') + self.assertRaises(TypeError, bio.write, None) + self.assertRaises(TypeError, bio.write, True) + self.assertRaises(TypeError, bio.write, 1) + + + at unittest.skipUnless(_have_threads, "Needs threading module") +class SimpleBackgroundTests(unittest.TestCase): + + """Tests that connect to a simple server running in the background""" + + def setUp(self): + server = ThreadedEchoServer(SIGNED_CERTFILE) + self.server_addr = (HOST, server.port) + server.__enter__() + self.addCleanup(server.__exit__, None, None, None) + + def test_connect(self): + with test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_NONE) as s: + s.connect(self.server_addr) + self.assertEqual({}, s.getpeercert()) + self.assertFalse(s.server_side) + + # this should succeed because we specify the root cert + with test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SIGNING_CA) as s: + s.connect(self.server_addr) + self.assertTrue(s.getpeercert()) + self.assertFalse(s.server_side) + + def test_connect_fail(self): + # This should fail because we have no verification certs. Connection + # failure crashes ThreadedEchoServer, so run this in an independent + # test method. + s = test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED) + self.addCleanup(s.close) + self.assertRaisesRegex(ssl.SSLError, "certificate verify failed", + s.connect, self.server_addr) + + def test_connect_ex(self): + # Issue #11326: check connect_ex() implementation + s = test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SIGNING_CA) + self.addCleanup(s.close) + self.assertEqual(0, s.connect_ex(self.server_addr)) + self.assertTrue(s.getpeercert()) + + def test_non_blocking_connect_ex(self): + # Issue #11326: non-blocking connect_ex() should allow handshake + # to proceed after the socket gets ready. + s = test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + ca_certs=SIGNING_CA, + do_handshake_on_connect=False) + self.addCleanup(s.close) + s.setblocking(False) + rc = s.connect_ex(self.server_addr) + # EWOULDBLOCK under Windows, EINPROGRESS elsewhere + self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK)) + # Wait for connect to finish + select.select([], [s], [], 5.0) + # Non-blocking handshake + while True: + try: + s.do_handshake() + break + except ssl.SSLWantReadError: + select.select([s], [], [], 5.0) + except ssl.SSLWantWriteError: + select.select([], [s], [], 5.0) + # SSL established + self.assertTrue(s.getpeercert()) + + def test_connect_with_context(self): + # Same as test_connect, but with a separately created context + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s: + s.connect(self.server_addr) + self.assertEqual({}, s.getpeercert()) + # Same with a server hostname + with ctx.wrap_socket(socket.socket(socket.AF_INET), + server_hostname="dummy") as s: + s.connect(self.server_addr) + ctx.verify_mode = ssl.CERT_REQUIRED + # This should succeed because we specify the root cert + ctx.load_verify_locations(SIGNING_CA) + with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s: + s.connect(self.server_addr) + cert = s.getpeercert() + self.assertTrue(cert) + + def test_connect_with_context_fail(self): + # This should fail because we have no verification certs. Connection + # failure crashes ThreadedEchoServer, so run this in an independent + # test method. + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_REQUIRED + s = ctx.wrap_socket(socket.socket(socket.AF_INET)) + self.addCleanup(s.close) + self.assertRaisesRegex(ssl.SSLError, "certificate verify failed", + s.connect, self.server_addr) + + def test_connect_capath(self): + # Verify server certificates using the `capath` argument + # NOTE: the subject hashing algorithm has been changed between + # OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must + # contain both versions of each certificate (same content, different + # filename) for this test to be portable across OpenSSL releases. + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_REQUIRED + ctx.load_verify_locations(capath=CAPATH) + with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s: + s.connect(self.server_addr) + cert = s.getpeercert() + self.assertTrue(cert) + # Same with a bytes `capath` argument + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_REQUIRED + ctx.load_verify_locations(capath=BYTES_CAPATH) + with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s: + s.connect(self.server_addr) + cert = s.getpeercert() + self.assertTrue(cert) + + def test_connect_cadata(self): + with open(SIGNING_CA) as f: + pem = f.read() + der = ssl.PEM_cert_to_DER_cert(pem) + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_REQUIRED + ctx.load_verify_locations(cadata=pem) + with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s: + s.connect(self.server_addr) + cert = s.getpeercert() + self.assertTrue(cert) + + # same with DER + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_REQUIRED + ctx.load_verify_locations(cadata=der) + with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s: + s.connect(self.server_addr) + cert = s.getpeercert() + self.assertTrue(cert) + + @unittest.skipIf(os.name == "nt", "Can't use a socket as a file under Windows") + def test_makefile_close(self): + # Issue #5238: creating a file-like object with makefile() shouldn't + # delay closing the underlying "real socket" (here tested with its + # file descriptor, hence skipping the test under Windows). + ss = test_wrap_socket(socket.socket(socket.AF_INET)) + ss.connect(self.server_addr) + fd = ss.fileno() + f = ss.makefile() + f.close() + # The fd is still open + os.read(fd, 0) + # Closing the SSL socket should close the fd too + ss.close() + gc.collect() + with self.assertRaises(OSError) as e: + os.read(fd, 0) + self.assertEqual(e.exception.errno, errno.EBADF) + + def test_non_blocking_handshake(self): + s = socket.socket(socket.AF_INET) + s.connect(self.server_addr) + s.setblocking(False) + s = test_wrap_socket(s, + cert_reqs=ssl.CERT_NONE, + do_handshake_on_connect=False) + self.addCleanup(s.close) + count = 0 + while True: + try: + count += 1 + s.do_handshake() + break + except ssl.SSLWantReadError: + select.select([s], [], []) + except ssl.SSLWantWriteError: + select.select([], [s], []) + if support.verbose: + sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count) + + def test_get_server_certificate(self): + _test_get_server_certificate(self, *self.server_addr, cert=SIGNING_CA) + + def test_get_server_certificate_fail(self): + # Connection failure crashes ThreadedEchoServer, so run this in an + # independent test method + _test_get_server_certificate_fail(self, *self.server_addr) + + def test_ciphers(self): + with test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_NONE, ciphers="ALL") as s: + s.connect(self.server_addr) + with test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT") as s: + s.connect(self.server_addr) + # Error checking can happen at instantiation or when connecting + with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"): + with socket.socket(socket.AF_INET) as sock: + s = test_wrap_socket(sock, + cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx") + s.connect(self.server_addr) + + def test_get_ca_certs_capath(self): + # capath certs are loaded on request + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_REQUIRED + ctx.load_verify_locations(capath=CAPATH) + self.assertEqual(ctx.get_ca_certs(), []) + with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s: + s.connect(self.server_addr) + cert = s.getpeercert() + self.assertTrue(cert) + self.assertEqual(len(ctx.get_ca_certs()), 1) + + @needs_sni + @unittest.skipUnless(hasattr(ssl, "PROTOCOL_TLSv1_2"), "needs TLS 1.2") + def test_context_setget(self): + # Check that the context of a connected socket can be replaced. + ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) + ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + s = socket.socket(socket.AF_INET) + with ctx1.wrap_socket(s) as ss: + ss.connect(self.server_addr) + self.assertIs(ss.context, ctx1) + self.assertIs(ss._sslobj.context, ctx1) + ss.context = ctx2 + self.assertIs(ss.context, ctx2) + self.assertIs(ss._sslobj.context, ctx2) + + def ssl_io_loop(self, sock, incoming, outgoing, func, *args, **kwargs): + # A simple IO loop. Call func(*args) depending on the error we get + # (WANT_READ or WANT_WRITE) move data between the socket and the BIOs. + timeout = kwargs.get('timeout', 10) + count = 0 + while True: + errno = None + count += 1 + try: + ret = func(*args) + except ssl.SSLError as e: + if e.errno not in (ssl.SSL_ERROR_WANT_READ, + ssl.SSL_ERROR_WANT_WRITE): + raise + errno = e.errno + # Get any data from the outgoing BIO irrespective of any error, and + # send it to the socket. + buf = outgoing.read() + sock.sendall(buf) + # If there's no error, we're done. For WANT_READ, we need to get + # data from the socket and put it in the incoming BIO. + if errno is None: + break + elif errno == ssl.SSL_ERROR_WANT_READ: + buf = sock.recv(32768) + if buf: + incoming.write(buf) + else: + incoming.write_eof() + if support.verbose: + sys.stdout.write("Needed %d calls to complete %s().\n" + % (count, func.__name__)) + return ret + + def test_bio_handshake(self): + sock = socket.socket(socket.AF_INET) + self.addCleanup(sock.close) + sock.connect(self.server_addr) + incoming = ssl.MemoryBIO() + outgoing = ssl.MemoryBIO() + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_REQUIRED + ctx.load_verify_locations(SIGNING_CA) + ctx.check_hostname = True + sslobj = ctx.wrap_bio(incoming, outgoing, False, 'localhost') + self.assertIs(sslobj._sslobj.owner, sslobj) + self.assertIsNone(sslobj.cipher()) + # cypthon implementation detail + # self.assertIsNone(sslobj.version()) + self.assertIsNotNone(sslobj.shared_ciphers()) + self.assertRaises(ValueError, sslobj.getpeercert) + if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES: + self.assertIsNone(sslobj.get_channel_binding('tls-unique')) + self.ssl_io_loop(sock, incoming, outgoing, sslobj.do_handshake) + self.assertTrue(sslobj.cipher()) + self.assertIsNotNone(sslobj.shared_ciphers()) + self.assertIsNotNone(sslobj.version()) + self.assertTrue(sslobj.getpeercert()) + if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES: + self.assertTrue(sslobj.get_channel_binding('tls-unique')) + try: + self.ssl_io_loop(sock, incoming, outgoing, sslobj.unwrap) + except ssl.SSLSyscallError: + # If the server shuts down the TCP connection without sending a + # secure shutdown message, this is reported as SSL_ERROR_SYSCALL + pass + self.assertRaises(ssl.SSLError, sslobj.write, b'foo') + + def test_bio_read_write_data(self): + sock = socket.socket(socket.AF_INET) + self.addCleanup(sock.close) + sock.connect(self.server_addr) + incoming = ssl.MemoryBIO() + outgoing = ssl.MemoryBIO() + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.verify_mode = ssl.CERT_NONE + sslobj = ctx.wrap_bio(incoming, outgoing, False) + self.ssl_io_loop(sock, incoming, outgoing, sslobj.do_handshake) + req = b'FOO\n' + self.ssl_io_loop(sock, incoming, outgoing, sslobj.write, req) + buf = self.ssl_io_loop(sock, incoming, outgoing, sslobj.read, 1024) + self.assertEqual(buf, b'foo\n') + self.ssl_io_loop(sock, incoming, outgoing, sslobj.unwrap) + + +class NetworkedTests(unittest.TestCase): + + def test_timeout_connect_ex(self): + # Issue #12065: on a timeout, connect_ex() should return the original + # errno (mimicking the behaviour of non-SSL sockets). + with support.transient_internet(REMOTE_HOST): + s = test_wrap_socket(socket.socket(socket.AF_INET), + cert_reqs=ssl.CERT_REQUIRED, + do_handshake_on_connect=False) + self.addCleanup(s.close) + s.settimeout(0.0000001) + rc = s.connect_ex((REMOTE_HOST, 443)) + if rc == 0: + self.skipTest("REMOTE_HOST responded too quickly") + self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK)) + + @unittest.skipUnless(support.IPV6_ENABLED, 'Needs IPv6') + def test_get_server_certificate_ipv6(self): + with support.transient_internet('ipv6.google.com'): + _test_get_server_certificate(self, 'ipv6.google.com', 443) + _test_get_server_certificate_fail(self, 'ipv6.google.com', 443) + + +def _test_get_server_certificate(test, host, port, cert=None): + pem = ssl.get_server_certificate((host, port)) + if not pem: + test.fail("No server certificate on %s:%s!" % (host, port)) + + pem = ssl.get_server_certificate((host, port), ca_certs=cert) + if not pem: + test.fail("No server certificate on %s:%s!" % (host, port)) + if support.verbose: + sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem)) + +def _test_get_server_certificate_fail(test, host, port): + try: + pem = ssl.get_server_certificate((host, port), ca_certs=CERTFILE) + except ssl.SSLError as x: + #should fail + if support.verbose: + sys.stdout.write("%s\n" % x) + else: + test.fail("Got server certificate %s for %s:%s!" % (pem, host, port)) + + +if _have_threads: + from test.ssl_servers import make_https_server + + class ThreadedEchoServer(threading.Thread): + + class ConnectionHandler(threading.Thread): + + """A mildly complicated class, because we want it to work both + with and without the SSL wrapper around the socket connection, so + that we can test the STARTTLS functionality.""" + + def __init__(self, server, connsock, addr): + self.server = server + self.running = False + self.sock = connsock + self.addr = addr + self.sock.setblocking(1) + self.sslconn = None + threading.Thread.__init__(self) + self.daemon = True + + def wrap_conn(self): + try: + self.sslconn = self.server.context.wrap_socket( + self.sock, server_side=True) + self.server.selected_npn_protocols.append(self.sslconn.selected_npn_protocol()) + self.server.selected_alpn_protocols.append(self.sslconn.selected_alpn_protocol()) + except (ConnectionResetError, BrokenPipeError) as e: + # We treat ConnectionResetError as though it were an + # SSLError - OpenSSL on Ubuntu abruptly closes the + # connection when asked to use an unsupported protocol. + # + # BrokenPipeError is raised in TLS 1.3 mode, when OpenSSL + # tries to send session tickets after handshake. + # https://github.com/openssl/openssl/issues/6342 + self.server.conn_errors.append(str(e)) + if self.server.chatty: + handle_error( + "\n server: bad connection attempt from " + repr( + self.addr) + ":\n") + self.running = False + self.close() + return False + except (ssl.SSLError, OSError) as e: + # OSError may occur with wrong protocols, e.g. both + # sides use PROTOCOL_TLS_SERVER. + # + # XXX Various errors can have happened here, for example + # a mismatching protocol version, an invalid certificate, + # or a low-level bug. This should be made more discriminating. + # + # bpo-31323: Store the exception as string to prevent + # a reference leak: server -> conn_errors -> exception + # -> traceback -> self (ConnectionHandler) -> server + self.server.conn_errors.append(str(e)) + if self.server.chatty: + handle_error("\n server: bad connection attempt from " + repr(self.addr) + ":\n") + self.running = False + self.server.stop() + self.close() + return False + else: + self.server.shared_ciphers.append(self.sslconn.shared_ciphers()) + if self.server.context.verify_mode == ssl.CERT_REQUIRED: + cert = self.sslconn.getpeercert() + if support.verbose and self.server.chatty: + sys.stdout.write(" client cert is " + pprint.pformat(cert) + "\n") + cert_binary = self.sslconn.getpeercert(True) + if support.verbose and self.server.chatty: + sys.stdout.write(" cert binary is " + str(len(cert_binary)) + " bytes\n") + cipher = self.sslconn.cipher() + if support.verbose and self.server.chatty: + sys.stdout.write(" server: connection cipher is now " + str(cipher) + "\n") + sys.stdout.write(" server: selected protocol is now " + + str(self.sslconn.selected_npn_protocol()) + "\n") + return True + + def read(self): + if self.sslconn: + return self.sslconn.read() + else: + return self.sock.recv(1024) + + def write(self, bytes): + if self.sslconn: + return self.sslconn.write(bytes) + else: + return self.sock.send(bytes) + + def close(self): + if self.sslconn: + self.sslconn.close() + else: From pypy.commits at gmail.com Thu Aug 15 02:46:03 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 14 Aug 2019 23:46:03 -0700 (PDT) Subject: [pypy-commit] pypy default: add const qualifier for conditional compilation Message-ID: <5d54ffab.1c69fb81.4d05c.dc3f@mx.google.com> Author: Matti Picus Branch: Changeset: r97182:0e1858e6bbc7 Date: 2019-08-15 09:37 +0300 http://bitbucket.org/pypy/pypy/changeset/0e1858e6bbc7/ Log: add const qualifier for conditional compilation diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py @@ -621,9 +621,9 @@ #ifdef OPENSSL_NO_SSL3_METHOD static const long Cryptography_HAS_SSL3_METHOD = 0; -SSL_METHOD* (*SSLv3_method)(void) = NULL; -SSL_METHOD* (*SSLv3_client_method)(void) = NULL; -SSL_METHOD* (*SSLv3_server_method)(void) = NULL; +const SSL_METHOD* (*SSLv3_method)(void) = NULL; +const SSL_METHOD* (*SSLv3_client_method)(void) = NULL; +const SSL_METHOD* (*SSLv3_server_method)(void) = NULL; #else static const long Cryptography_HAS_SSL3_METHOD = 1; #endif From pypy.commits at gmail.com Thu Aug 15 06:23:46 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 15 Aug 2019 03:23:46 -0700 (PDT) Subject: [pypy-commit] buildbot default: do not symlink to empty files from failed builds Message-ID: <5d5532b2.1c69fb81.a79ec.1643@mx.google.com> Author: Matti Picus Branch: Changeset: r1091:00dca717ce23 Date: 2019-08-15 13:23 +0300 http://bitbucket.org/pypy/buildbot/changeset/00dca717ce23/ Log: do not symlink to empty files from failed builds diff --git a/bot2/pypybuildbot/builds.py b/bot2/pypybuildbot/builds.py --- a/bot2/pypybuildbot/builds.py +++ b/bot2/pypybuildbot/builds.py @@ -80,10 +80,11 @@ os.chmod(self.masterdest, 0644) except OSError: pass - try: - symlink_force(os.path.basename(self.masterdest), self.symlinkname) - except OSError: - pass + if os.stat(self.masterdest).st_size > 10: + try: + symlink_force(os.path.basename(self.masterdest), self.symlinkname) + except OSError: + pass class PyPyDownload(transfer.FileDownload): parms = transfer.FileDownload.parms + ['basename'] From pypy.commits at gmail.com Thu Aug 15 08:55:24 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 15 Aug 2019 05:55:24 -0700 (PDT) Subject: [pypy-commit] pypy default: add more missing things from cryptography, remove offending macro from build Message-ID: <5d55563c.1c69fb81.5bc75.62c0@mx.google.com> Author: Matti Picus Branch: Changeset: r97183:0cbabb9554cd Date: 2019-08-15 15:54 +0300 http://bitbucket.org/pypy/pypy/changeset/0cbabb9554cd/ Log: add more missing things from cryptography, remove offending macro from build diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py @@ -23,6 +23,7 @@ static const int NID_ED448; static const int NID_poly1305; +static const int NID_X9_62_prime256v1; static const int NID_info_access; static const int NID_subject_alt_name; static const int NID_crl_distribution_points; diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py new file mode 100644 --- /dev/null +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py @@ -0,0 +1,84 @@ +# +# An extra bit of logic for the Win32-only functionality that is missing from the +# version from cryptography. +# + +import sys + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... *HCERTSTORE; +typedef ... *HCRYPTPROV_LEGACY; + +typedef struct { + DWORD dwCertEncodingType; + BYTE *pbCertEncoded; + DWORD cbCertEncoded; + ...; +} CERT_CONTEXT, *PCCERT_CONTEXT; + +typedef struct { + DWORD dwCertEncodingType; + BYTE *pbCrlEncoded; + DWORD cbCrlEncoded; + ...; +} CRL_CONTEXT, *PCCRL_CONTEXT; + +typedef struct { + DWORD cUsageIdentifier; + LPSTR *rgpszUsageIdentifier; + ...; +} CERT_ENHKEY_USAGE, *PCERT_ENHKEY_USAGE; +""" + +FUNCTIONS = """ +HCERTSTORE WINAPI CertOpenStore( + LPCSTR lpszStoreProvider, + DWORD dwMsgAndCertEncodingType, + HCRYPTPROV_LEGACY hCryptProv, + DWORD dwFlags, + const char *pvPara +); +PCCERT_CONTEXT WINAPI CertEnumCertificatesInStore( + HCERTSTORE hCertStore, + PCCERT_CONTEXT pPrevCertContext +); +BOOL WINAPI CertFreeCertificateContext( + PCCERT_CONTEXT pCertContext +); +BOOL WINAPI CertFreeCRLContext( + PCCRL_CONTEXT pCrlContext +); +BOOL WINAPI CertCloseStore( + HCERTSTORE hCertStore, + DWORD dwFlags +); +BOOL WINAPI CertGetEnhancedKeyUsage( + PCCERT_CONTEXT pCertContext, + DWORD dwFlags, + PCERT_ENHKEY_USAGE pUsage, + DWORD *pcbUsage +); +PCCRL_CONTEXT WINAPI CertEnumCRLsInStore( + HCERTSTORE hCertStore, + PCCRL_CONTEXT pPrevCrlContext +); +""" + +MACROS = """ +#define CERT_STORE_READONLY_FLAG ... +#define CERT_SYSTEM_STORE_LOCAL_MACHINE ... +#define CRYPT_E_NOT_FOUND ... +#define CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG ... +#define CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG ... +#define X509_ASN_ENCODING ... +#define PKCS_7_ASN_ENCODING ... + +static const LPCSTR CERT_STORE_PROV_SYSTEM_A; +""" + +CUSTOMIZATIONS = """ +""" diff --git a/lib_pypy/_ssl_build.py b/lib_pypy/_ssl_build.py --- a/lib_pypy/_ssl_build.py +++ b/lib_pypy/_ssl_build.py @@ -53,7 +53,6 @@ "callbacks", ] + pypy_win32_extra, libraries=_get_openssl_libraries(sys.platform), - extra_compile_args=['-DOPENSSL_NO_SSL3_METHOD'], extra_link_args=extra_link_args(compiler_type()), ) From pypy.commits at gmail.com Thu Aug 15 12:35:46 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 15 Aug 2019 09:35:46 -0700 (PDT) Subject: [pypy-commit] pypy default: qualify memoryview.cast to python 3.3 and up Message-ID: <5d5589e2.1c69fb81.85d30.169a@mx.google.com> Author: Matti Picus Branch: Changeset: r97186:d087a60ceaaf Date: 2019-08-15 18:13 +0300 http://bitbucket.org/pypy/pypy/changeset/d087a60ceaaf/ Log: qualify memoryview.cast to python 3.3 and up diff --git a/extra_tests/ctypes_tests/test_cast.py b/extra_tests/ctypes_tests/test_cast.py --- a/extra_tests/ctypes_tests/test_cast.py +++ b/extra_tests/ctypes_tests/test_cast.py @@ -30,9 +30,11 @@ assert x.value is True def test_cast_array(): + import sys data = b'data' ubyte = c_ubyte * len(data) byteslike = ubyte.from_buffer_copy(data) m = memoryview(byteslike) - b = m.cast('B') - assert bytes(b) == data + if sys.version_info > (3, 3): + b = m.cast('B') + assert bytes(b) == data From pypy.commits at gmail.com Thu Aug 15 12:35:48 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 15 Aug 2019 09:35:48 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default into py3.6 Message-ID: <5d5589e4.1c69fb81.ba772.7470@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97187:c5962a500215 Date: 2019-08-15 19:35 +0300 http://bitbucket.org/pypy/pypy/changeset/c5962a500215/ Log: merge default into py3.6 diff --git a/extra_tests/ctypes_tests/test_cast.py b/extra_tests/ctypes_tests/test_cast.py --- a/extra_tests/ctypes_tests/test_cast.py +++ b/extra_tests/ctypes_tests/test_cast.py @@ -30,9 +30,11 @@ assert x.value is True def test_cast_array(): + import sys data = b'data' ubyte = c_ubyte * len(data) byteslike = ubyte.from_buffer_copy(data) m = memoryview(byteslike) - b = m.cast('B') - assert bytes(b) == data + if sys.version_info > (3, 3): + b = m.cast('B') + assert bytes(b) == data diff --git a/lib-python/3.2/test/test_tools.py b/lib-python/3.2/test/test_tools.py new file mode 100644 --- /dev/null +++ b/lib-python/3.2/test/test_tools.py @@ -0,0 +1,433 @@ +"""Tests for scripts in the Tools directory. + +This file contains regression tests for some of the scripts found in the +Tools directory of a Python checkout or tarball, such as reindent.py. +""" + +import os +import sys +import imp +import unittest +import shutil +import subprocess +import sysconfig +import tempfile +import textwrap +from test import support +from test.script_helper import assert_python_ok, temp_dir + +if not sysconfig.is_python_build(): + # XXX some installers do contain the tools, should we detect that + # and run the tests in that case too? + raise unittest.SkipTest('test irrelevant for an installed Python') + +basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), + 'Tools') +scriptsdir = os.path.join(basepath, 'scripts') + + +class ReindentTests(unittest.TestCase): + script = os.path.join(scriptsdir, 'reindent.py') + + def test_noargs(self): + assert_python_ok(self.script) + + def test_help(self): + rc, out, err = assert_python_ok(self.script, '-h') + self.assertEqual(out, b'') + self.assertGreater(err, b'') + + +class PindentTests(unittest.TestCase): + script = os.path.join(scriptsdir, 'pindent.py') + + def assertFileEqual(self, fn1, fn2): + with open(fn1) as f1, open(fn2) as f2: + self.assertEqual(f1.readlines(), f2.readlines()) + + def pindent(self, source, *args): + with subprocess.Popen( + (sys.executable, self.script) + args, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + universal_newlines=True) as proc: + out, err = proc.communicate(source) + self.assertIsNone(err) + return out + + def lstriplines(self, data): + return '\n'.join(line.lstrip() for line in data.splitlines()) + '\n' + + def test_selftest(self): + self.maxDiff = None + with temp_dir() as directory: + data_path = os.path.join(directory, '_test.py') + with open(self.script) as f: + closed = f.read() + with open(data_path, 'w') as f: + f.write(closed) + + rc, out, err = assert_python_ok(self.script, '-d', data_path) + self.assertEqual(out, b'') + self.assertEqual(err, b'') + backup = data_path + '~' + self.assertTrue(os.path.exists(backup)) + with open(backup) as f: + self.assertEqual(f.read(), closed) + with open(data_path) as f: + clean = f.read() + compile(clean, '_test.py', 'exec') + self.assertEqual(self.pindent(clean, '-c'), closed) + self.assertEqual(self.pindent(closed, '-d'), clean) + + rc, out, err = assert_python_ok(self.script, '-c', data_path) + self.assertEqual(out, b'') + self.assertEqual(err, b'') + with open(backup) as f: + self.assertEqual(f.read(), clean) + with open(data_path) as f: + self.assertEqual(f.read(), closed) + + broken = self.lstriplines(closed) + with open(data_path, 'w') as f: + f.write(broken) + rc, out, err = assert_python_ok(self.script, '-r', data_path) + self.assertEqual(out, b'') + self.assertEqual(err, b'') + with open(backup) as f: + self.assertEqual(f.read(), broken) + with open(data_path) as f: + indented = f.read() + compile(indented, '_test.py', 'exec') + self.assertEqual(self.pindent(broken, '-r'), indented) + + def pindent_test(self, clean, closed): + self.assertEqual(self.pindent(clean, '-c'), closed) + self.assertEqual(self.pindent(closed, '-d'), clean) + broken = self.lstriplines(closed) + self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '4'), closed) + + def test_statements(self): + clean = textwrap.dedent("""\ + if a: + pass + + if a: + pass + else: + pass + + if a: + pass + elif: + pass + else: + pass + + while a: + break + + while a: + break + else: + pass + + for i in a: + break + + for i in a: + break + else: + pass + + try: + pass + finally: + pass + + try: + pass + except TypeError: + pass + except ValueError: + pass + else: + pass + + try: + pass + except TypeError: + pass + except ValueError: + pass + finally: + pass + + with a: + pass + + class A: + pass + + def f(): + pass + """) + + closed = textwrap.dedent("""\ + if a: + pass + # end if + + if a: + pass + else: + pass + # end if + + if a: + pass + elif: + pass + else: + pass + # end if + + while a: + break + # end while + + while a: + break + else: + pass + # end while + + for i in a: + break + # end for + + for i in a: + break + else: + pass + # end for + + try: + pass + finally: + pass + # end try + + try: + pass + except TypeError: + pass + except ValueError: + pass + else: + pass + # end try + + try: + pass + except TypeError: + pass + except ValueError: + pass + finally: + pass + # end try + + with a: + pass + # end with + + class A: + pass + # end class A + + def f(): + pass + # end def f + """) + self.pindent_test(clean, closed) + + def test_multilevel(self): + clean = textwrap.dedent("""\ + def foobar(a, b): + if a == b: + a = a+1 + elif a < b: + b = b-1 + if b > a: a = a-1 + else: + print 'oops!' + """) + closed = textwrap.dedent("""\ + def foobar(a, b): + if a == b: + a = a+1 + elif a < b: + b = b-1 + if b > a: a = a-1 + # end if + else: + print 'oops!' + # end if + # end def foobar + """) + self.pindent_test(clean, closed) + + def test_preserve_indents(self): + clean = textwrap.dedent("""\ + if a: + if b: + pass + """) + closed = textwrap.dedent("""\ + if a: + if b: + pass + # end if + # end if + """) + self.assertEqual(self.pindent(clean, '-c'), closed) + self.assertEqual(self.pindent(closed, '-d'), clean) + broken = self.lstriplines(closed) + self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '9'), closed) + clean = textwrap.dedent("""\ + if a: + \tif b: + \t\tpass + """) + closed = textwrap.dedent("""\ + if a: + \tif b: + \t\tpass + \t# end if + # end if + """) + self.assertEqual(self.pindent(clean, '-c'), closed) + self.assertEqual(self.pindent(closed, '-d'), clean) + broken = self.lstriplines(closed) + self.assertEqual(self.pindent(broken, '-r'), closed) + + def test_escaped_newline(self): + clean = textwrap.dedent("""\ + class\\ + \\ + A: + def\ + \\ + f: + pass + """) + closed = textwrap.dedent("""\ + class\\ + \\ + A: + def\ + \\ + f: + pass + # end def f + # end class A + """) + self.assertEqual(self.pindent(clean, '-c'), closed) + self.assertEqual(self.pindent(closed, '-d'), clean) + + def test_empty_line(self): + clean = textwrap.dedent("""\ + if a: + + pass + """) + closed = textwrap.dedent("""\ + if a: + + pass + # end if + """) + self.pindent_test(clean, closed) + + def test_oneline(self): + clean = textwrap.dedent("""\ + if a: pass + """) + closed = textwrap.dedent("""\ + if a: pass + # end if + """) + self.pindent_test(clean, closed) + + +class TestSundryScripts(unittest.TestCase): + # At least make sure the rest don't have syntax errors. When tests are + # added for a script it should be added to the whitelist below. + + # scripts that have independent tests. + whitelist = ['reindent.py'] + # scripts that can't be imported without running + blacklist = ['make_ctype.py'] + # scripts that use windows-only modules + windows_only = ['win_add2path.py'] + # blacklisted for other reasons + other = ['analyze_dxp.py'] + + skiplist = blacklist + whitelist + windows_only + other + + def setUp(self): + cm = support.DirsOnSysPath(scriptsdir) + cm.__enter__() + self.addCleanup(cm.__exit__) + + def test_sundry(self): + for fn in os.listdir(scriptsdir): + if fn.endswith('.py') and fn not in self.skiplist: + __import__(fn[:-3]) + + @unittest.skipIf(sys.platform != "win32", "Windows-only test") + def test_sundry_windows(self): + for fn in self.windows_only: + __import__(fn[:-3]) + + @unittest.skipIf(not support.threading, "test requires _thread module") + def test_analyze_dxp_import(self): + if hasattr(sys, 'getdxp'): + import analyze_dxp + else: + with self.assertRaises(RuntimeError): + import analyze_dxp + + +class PdepsTests(unittest.TestCase): + + @classmethod + def setUpClass(self): + path = os.path.join(scriptsdir, 'pdeps.py') + self.pdeps = imp.load_source('pdeps', path) + + @classmethod + def tearDownClass(self): + if 'pdeps' in sys.modules: + del sys.modules['pdeps'] + + def test_process_errors(self): + # Issue #14492: m_import.match(line) can be None. + with tempfile.TemporaryDirectory() as tmpdir: + fn = os.path.join(tmpdir, 'foo') + with open(fn, 'w') as stream: + stream.write("#!/this/will/fail") + self.pdeps.process(fn, {}) + + def test_inverse_attribute_error(self): + # Issue #14492: this used to fail with an AttributeError. + self.pdeps.inverse({'a': []}) + + +def test_main(): + support.run_unittest(*[obj for obj in globals().values() + if isinstance(obj, type)]) + + +if __name__ == '__main__': + unittest.main() diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py --- a/lib-python/3/test/test_ssl.py +++ b/lib-python/3/test/test_ssl.py @@ -2844,6 +2844,10 @@ else: s.close() + def test_socketserver_urlib_uses_bisect(self): + b = urllib.request.bisect + raise ValueError('urllib.request.bisect is %s' % str(b)) + def test_socketserver(self): """Using socketserver to create and manage SSL connections.""" server = make_https_server(self, certfile=CERTFILE) diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/nid.py @@ -23,6 +23,7 @@ static const int NID_ED448; static const int NID_poly1305; +static const int NID_X9_62_prime256v1; static const int NID_info_access; static const int NID_subject_alt_name; static const int NID_crl_distribution_points; diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pem.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pem.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pem.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pem.py @@ -44,8 +44,6 @@ X509_CRL *PEM_read_bio_X509_CRL(BIO *, X509_CRL **, pem_password_cb *, void *); -X509 *PEM_read_bio_X509_AUX(BIO *, X509 **, pem_password_cb *, void *); - int PEM_write_bio_X509_CRL(BIO *, X509_CRL *); PKCS7 *PEM_read_bio_PKCS7(BIO *, PKCS7 **, pem_password_cb *, void *); diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py new file mode 100644 --- /dev/null +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py @@ -0,0 +1,84 @@ +# +# An extra bit of logic for the Win32-only functionality that is missing from the +# version from cryptography. +# + +import sys + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... *HCERTSTORE; +typedef ... *HCRYPTPROV_LEGACY; + +typedef struct { + DWORD dwCertEncodingType; + BYTE *pbCertEncoded; + DWORD cbCertEncoded; + ...; +} CERT_CONTEXT, *PCCERT_CONTEXT; + +typedef struct { + DWORD dwCertEncodingType; + BYTE *pbCrlEncoded; + DWORD cbCrlEncoded; + ...; +} CRL_CONTEXT, *PCCRL_CONTEXT; + +typedef struct { + DWORD cUsageIdentifier; + LPSTR *rgpszUsageIdentifier; + ...; +} CERT_ENHKEY_USAGE, *PCERT_ENHKEY_USAGE; +""" + +FUNCTIONS = """ +HCERTSTORE WINAPI CertOpenStore( + LPCSTR lpszStoreProvider, + DWORD dwMsgAndCertEncodingType, + HCRYPTPROV_LEGACY hCryptProv, + DWORD dwFlags, + const char *pvPara +); +PCCERT_CONTEXT WINAPI CertEnumCertificatesInStore( + HCERTSTORE hCertStore, + PCCERT_CONTEXT pPrevCertContext +); +BOOL WINAPI CertFreeCertificateContext( + PCCERT_CONTEXT pCertContext +); +BOOL WINAPI CertFreeCRLContext( + PCCRL_CONTEXT pCrlContext +); +BOOL WINAPI CertCloseStore( + HCERTSTORE hCertStore, + DWORD dwFlags +); +BOOL WINAPI CertGetEnhancedKeyUsage( + PCCERT_CONTEXT pCertContext, + DWORD dwFlags, + PCERT_ENHKEY_USAGE pUsage, + DWORD *pcbUsage +); +PCCRL_CONTEXT WINAPI CertEnumCRLsInStore( + HCERTSTORE hCertStore, + PCCRL_CONTEXT pPrevCrlContext +); +""" + +MACROS = """ +#define CERT_STORE_READONLY_FLAG ... +#define CERT_SYSTEM_STORE_LOCAL_MACHINE ... +#define CRYPT_E_NOT_FOUND ... +#define CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG ... +#define CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG ... +#define X509_ASN_ENCODING ... +#define PKCS_7_ASN_ENCODING ... + +static const LPCSTR CERT_STORE_PROV_SYSTEM_A; +""" + +CUSTOMIZATIONS = """ +""" diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -82,8 +82,15 @@ if lib.Cryptography_HAS_SSL2: PROTOCOL_SSLv2 = 0 +SSLv3_method_ok = False if lib.Cryptography_HAS_SSL3_METHOD: - PROTOCOL_SSLv3 = 1 + # Some Ubuntu systems disable SSLv3 + ctx = lib.SSL_CTX_new(lib.SSLv3_method()) + if ctx: + PROTOCOL_SSLv3 = 1 + lib.SSL_CTX_free(ctx) + SSLv3_method_ok = True + PROTOCOL_SSLv23 = 2 PROTOCOL_TLS = PROTOCOL_SSLv23 PROTOCOL_TLSv1 = 3 @@ -857,7 +864,7 @@ method = lib.TLSv1_1_method() elif lib.Cryptography_HAS_TLSv1_2 and protocol == PROTOCOL_TLSv1_2 : method = lib.TLSv1_2_method() - elif lib.Cryptography_HAS_SSL3_METHOD and protocol == PROTOCOL_SSLv3: + elif SSLv3_method_ok and protocol == PROTOCOL_SSLv3: method = lib.SSLv3_method() elif lib.Cryptography_HAS_SSL2 and protocol == PROTOCOL_SSLv2: method = lib.SSLv2_method() @@ -888,7 +895,7 @@ options = lib.SSL_OP_ALL & ~lib.SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS if not lib.Cryptography_HAS_SSL2 or protocol != PROTOCOL_SSLv2: options |= lib.SSL_OP_NO_SSLv2 - if not lib.Cryptography_HAS_SSL3_METHOD or protocol != PROTOCOL_SSLv3: + if not SSLv3_method_ok or protocol != PROTOCOL_SSLv3: options |= lib.SSL_OP_NO_SSLv3 # Minimal security flags for server and client side context. # Client sockets ignore server-side parameters. diff --git a/lib_pypy/_cffi_ssl/_stdssl/certificate.py b/lib_pypy/_cffi_ssl/_stdssl/certificate.py --- a/lib_pypy/_cffi_ssl/_stdssl/certificate.py +++ b/lib_pypy/_cffi_ssl/_stdssl/certificate.py @@ -296,7 +296,7 @@ lib.BIO_free(cert) raise ssl_error("Can't open file") - x = lib.PEM_read_bio_X509_AUX(cert, ffi.NULL, ffi.NULL, ffi.NULL) + x = lib.PEM_read_bio_X509(cert, ffi.NULL, ffi.NULL, ffi.NULL) if x is ffi.NULL: ssl_error("Error decoding PEM-encoded file") diff --git a/lib_pypy/_ssl_build.py b/lib_pypy/_ssl_build.py --- a/lib_pypy/_ssl_build.py +++ b/lib_pypy/_ssl_build.py @@ -53,7 +53,6 @@ "callbacks", ] + pypy_win32_extra, libraries=_get_openssl_libraries(sys.platform), - extra_compile_args=['-DOPENSSL_NO_SSL3_METHOD'], extra_link_args=extra_link_args(compiler_type()), ) From pypy.commits at gmail.com Fri Aug 16 03:08:14 2019 From: pypy.commits at gmail.com (stevie_92) Date: Fri, 16 Aug 2019 00:08:14 -0700 (PDT) Subject: [pypy-commit] pypy cpyext-gc-cycle: Refactored rrc to support multiple implementations Message-ID: <5d56565e.1c69fb81.9aa22.c0c1@mx.google.com> Author: Stefan Beyer Branch: cpyext-gc-cycle Changeset: r97188:f454ba4d28f6 Date: 2019-08-14 10:54 +0200 http://bitbucket.org/pypy/pypy/changeset/f454ba4d28f6/ Log: Refactored rrc to support multiple implementations diff too long, truncating to 2000 out of 2308 lines diff --git a/rpython/config/translationoption.py b/rpython/config/translationoption.py --- a/rpython/config/translationoption.py +++ b/rpython/config/translationoption.py @@ -105,14 +105,14 @@ "asmgcc": [("translation.gctransformer", "framework"), ("translation.backend", "c")], }), - ChoiceOption("cpyextgc", "Garbage Collection Strategy for cpyext", - ["boehm", "trialdeletion", "none"], - default="trialdeletion", + ChoiceOption("rrcgc", "Garbage Collection Strategy for raw refcounted objects in cpyext", + ["mark", "incmark", "none"], + default="mark", requires={ - "boehm": [("translation.gc", "incminimark")], - "trialdeletion": [("translation.gc", "incminimark")], + "mark": [("translation.gc", "incminimark")], + "incmark": [("translation.gc", "incminimark")], }, - cmdline="--cpyextgc"), + cmdline="--rrcgc"), # other noticeable options BoolOption("thread", "enable use of threading primitives", diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -75,7 +75,7 @@ from rpython.rlib.objectmodel import specialize from rpython.rlib import rgc from rpython.memory.gc.minimarkpage import out_of_memory -from rpython.rtyper.lltypesystem import rffi +from rpython.memory.gc.rrc.mark import RawRefCountBaseGC, RawRefCountMarkGC # # Handles the objects in 2 generations: @@ -198,6 +198,7 @@ ('forw', llmemory.Address)) FORWARDSTUBPTR = lltype.Ptr(FORWARDSTUB) NURSARRAY = lltype.Array(llmemory.Address) +ADDRARRAY = lltype.Array(llmemory.Address, hints={'nolength': True}) # ____________________________________________________________ @@ -400,8 +401,7 @@ # collection. self.probably_young_objects_with_finalizers = self.AddressDeque() self.old_objects_with_finalizers = self.AddressDeque() - p = lltype.malloc(self._ADDRARRAY, 1, flavor='raw', - track_allocation=False) + p = lltype.malloc(ADDRARRAY, 1, flavor='raw', track_allocation=False) self.singleaddr = llmemory.cast_ptr_to_adr(p) # # Two lists of all objects with destructors. @@ -794,7 +794,8 @@ else: # This does a complete minor and major collection. self.minor_and_major_collection() - self.rrc_invoke_callback() + if self.rrc_enabled: + self.rrc_gc.invoke_callback() def collect_step(self): """ @@ -807,7 +808,8 @@ old_state = self.gc_state self._minor_collection() self.major_collection_step() - self.rrc_invoke_callback() + if self.rrc_enabled: + self.rrc_gc.invoke_callback() return rgc._encode_states(old_state, self.gc_state) def minor_collection_with_major_progress(self, extrasize=0, @@ -848,7 +850,8 @@ self._minor_collection() self.major_collection_step(extrasize) - self.rrc_invoke_callback() + if self.rrc_enabled: + self.rrc_gc.invoke_callback() def collect_and_reserve(self, totalsize): @@ -903,7 +906,7 @@ self.minor_collection_with_major_progress() else: # Nursery too full again. This is likely because of - # execute_finalizers() or rrc_invoke_callback(). + # execute_finalizers() or rrc_gc.invoke_callback()(). # we need to fix it with another call to minor_collection() # ---this time only the minor part so that we are sure that # the nursery is empty (apart from pinned objects). @@ -1768,7 +1771,7 @@ # # visit the P list from rawrefcount, if enabled. if self.rrc_enabled: - self.rrc_minor_collection_trace() + self.rrc_gc.minor_trace() # # visit the "probably young" objects with finalizers. They # all survive, except if IGNORE_FINALIZER is set. @@ -1820,7 +1823,7 @@ # # visit the P and O lists from rawrefcount, if enabled. if self.rrc_enabled: - self.rrc_minor_collection_free() + self.rrc_gc.minor_collection_free() # # Walk the list of young raw-malloced objects, and either free # them or make them old. @@ -2403,7 +2406,7 @@ self.visit_all_objects() # if self.rrc_enabled: - self.rrc_major_collection_trace() + self.rrc_gc.major_collection_trace() # ll_assert(not (self.probably_young_objects_with_finalizers .non_empty()), @@ -2445,7 +2448,7 @@ self.updated_old_objects_pointing_to_pinned = True # if self.rrc_enabled: - self.rrc_major_collection_free() + self.rrc_gc.major_collection_free() # self.stat_ac_arenas_count = self.ac.arenas_count self.stat_rawmalloced_total_size = self.rawmalloced_total_size @@ -2720,7 +2723,7 @@ hdr.tid |= GCFLAG_VISITED | GCFLAG_TRACK_YOUNG_PTRS if self.rrc_enabled and \ - self.rrc_state == self.RAWREFCOUNT_STATE_MARKING: + self.rrc_gc.state == RawRefCountBaseGC.STATE_MARKING: hdr.tid |= GCFLAG_GARBAGE if self.has_gcptr(llop.extract_ushort(llgroup.HALFWORD, hdr.tid)): @@ -3072,935 +3075,96 @@ # ---------- # RawRefCount + PYOBJ_HDR = RawRefCountBaseGC.PYOBJ_HDR + PYOBJ_HDR_PTR = RawRefCountBaseGC.PYOBJ_HDR_PTR + RAWREFCOUNT_DEALLOC_TRIGGER = RawRefCountBaseGC.RAWREFCOUNT_DEALLOC_TRIGGER + RAWREFCOUNT_VISIT = RawRefCountBaseGC.RAWREFCOUNT_VISIT + RAWREFCOUNT_TRAVERSE = RawRefCountBaseGC.RAWREFCOUNT_TRAVERSE + PYOBJ_GC_HDR_PTR = RawRefCountBaseGC.PYOBJ_GC_HDR_PTR + PYOBJ_GC_HDR = RawRefCountBaseGC.PYOBJ_GC_HDR + RAWREFCOUNT_GC_AS_PYOBJ = RawRefCountBaseGC.RAWREFCOUNT_GC_AS_PYOBJ + RAWREFCOUNT_PYOBJ_AS_GC = RawRefCountBaseGC.RAWREFCOUNT_PYOBJ_AS_GC + RAWREFCOUNT_FINALIZER_TYPE = RawRefCountBaseGC.RAWREFCOUNT_FINALIZER_TYPE + RAWREFCOUNT_CLEAR_WR_TYPE = RawRefCountBaseGC.RAWREFCOUNT_CLEAR_WR_TYPE + RAWREFCOUNT_MAYBE_UNTRACK_TUPLE = \ + RawRefCountBaseGC.RAWREFCOUNT_MAYBE_UNTRACK_TUPLE rrc_enabled = False - - # Default state, no rawrefcount specific code is executed during normal marking. - RAWREFCOUNT_STATE_DEFAULT = 0 - - # Here cyclic garbage only reachable from legacy finalizers is marked. - RAWREFCOUNT_STATE_MARKING = 1 - - # The state in which cyclic garbage with legacy finalizers is traced. - # Do not mark objects during this state, because we remove the flag - # during tracing and we do not want to trace those objects again. Also - # during this phase no new objects can be marked, as we are only building - # the list of cyclic garbage. - RAWREFCOUNT_STATE_GARBAGE = 2 - - _ADDRARRAY = lltype.Array(llmemory.Address, hints={'nolength': True}) - PYOBJ_HDR = lltype.Struct('GCHdr_PyObject', - ('c_ob_refcnt', lltype.Signed), - ('c_ob_pypy_link', lltype.Signed)) - PYOBJ_HDR_PTR = lltype.Ptr(PYOBJ_HDR) - RAWREFCOUNT_DEALLOC_TRIGGER = lltype.Ptr(lltype.FuncType([], lltype.Void)) - RAWREFCOUNT_VISIT = lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR, rffi.VOIDP], - rffi.INT_real)) - RAWREFCOUNT_TRAVERSE = lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR, - RAWREFCOUNT_VISIT, - rffi.VOIDP], - lltype.Void)) - PYOBJ_GC_HDR_PTR = lltype.Ptr(lltype.ForwardReference()) - PYOBJ_GC_HDR = lltype.Struct('PyGC_Head', - ('c_gc_next', PYOBJ_GC_HDR_PTR), - ('c_gc_prev', PYOBJ_GC_HDR_PTR), - ('c_gc_refs', lltype.Signed)) - PYOBJ_GC_HDR_PTR.TO.become(PYOBJ_GC_HDR) - RAWREFCOUNT_GC_AS_PYOBJ = lltype.Ptr(lltype.FuncType([PYOBJ_GC_HDR_PTR], - PYOBJ_HDR_PTR)) - RAWREFCOUNT_PYOBJ_AS_GC = lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR], - PYOBJ_GC_HDR_PTR)) - RAWREFCOUNT_FINALIZER_TYPE = lltype.Ptr(lltype.FuncType([PYOBJ_GC_HDR_PTR], - lltype.Signed)) - RAWREFCOUNT_CLEAR_WR_TYPE = lltype.Ptr(lltype.FuncType([llmemory.GCREF], - lltype.Void)) - RAWREFCOUNT_MAYBE_UNTRACK_TUPLE = \ - lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR], lltype.Signed)) - RAWREFCOUNT_FINALIZER_NONE = 0 - RAWREFCOUNT_FINALIZER_MODERN = 1 - RAWREFCOUNT_FINALIZER_LEGACY = 2 - RAWREFCOUNT_REFS_SHIFT = 1 - RAWREFCOUNT_REFS_MASK_FINALIZED = 1 - RAWREFCOUNT_REFS_UNTRACKED = -2 << RAWREFCOUNT_REFS_SHIFT - - def _pyobj(self, pyobjaddr): - return llmemory.cast_adr_to_ptr(pyobjaddr, self.PYOBJ_HDR_PTR) - def _pygchdr(self, pygchdraddr): - return llmemory.cast_adr_to_ptr(pygchdraddr, self.PYOBJ_GC_HDR_PTR) + rrc_gc = None def rawrefcount_init(self, dealloc_trigger_callback, tp_traverse, pyobj_list, tuple_list, gc_as_pyobj, pyobj_as_gc, finalizer_type, clear_weakref_callback, tuple_maybe_untrack): - # see pypy/doc/discussion/rawrefcount.rst if not self.rrc_enabled: - self.rrc_p_list_young = self.AddressStack() - self.rrc_p_list_old = self.AddressStack() - self.rrc_o_list_young = self.AddressStack() - self.rrc_o_list_old = self.AddressStack() - self.rrc_p_dict = self.AddressDict() # non-nursery keys only - self.rrc_p_dict_nurs = self.AddressDict() # nursery keys only - self.rrc_dealloc_trigger_callback = dealloc_trigger_callback - self.rrc_dealloc_pending = self.AddressStack() - self.rrc_refcnt_dict = self.AddressDict() - self.rrc_tp_traverse = tp_traverse - self.rrc_pyobj_list = self._pygchdr(pyobj_list) - self.rrc_tuple_list = self._pygchdr(tuple_list) - self.rrc_pyobj_old_list = self._rrc_gc_list_new() - self.rrc_pyobj_isolate_list = self._rrc_gc_list_new() - self.rrc_pyobj_dead_list = self._rrc_gc_list_new() - self.rrc_pyobj_garbage_list = self._rrc_gc_list_new() - self.rrc_garbage_to_trace = self.AddressStack() - self.rrc_gc_as_pyobj = gc_as_pyobj - self.rrc_pyobj_as_gc = pyobj_as_gc - self.rrc_finalizer_type = finalizer_type - self.rrc_clear_weakref_callback = clear_weakref_callback - self.rrc_tuple_maybe_untrack = tuple_maybe_untrack + gc_flags = (GCFLAG_VISITED_RMY, GCFLAG_VISITED, + GCFLAG_NO_HEAP_PTRS, GCFLAG_GARBAGE) + self.rrc_gc.init(self, gc_flags, dealloc_trigger_callback, + tp_traverse, pyobj_list, tuple_list, gc_as_pyobj, + pyobj_as_gc, finalizer_type, + clear_weakref_callback, tuple_maybe_untrack) self.rrc_enabled = True - self.rrc_cycle_enabled = True - self.rrc_state = self.RAWREFCOUNT_STATE_DEFAULT - def check_no_more_rawrefcount_state(self): - "NOT_RPYTHON: for tests" - assert self.rrc_p_list_young.length() == 0 - assert self.rrc_p_list_old .length() == 0 - assert self.rrc_o_list_young.length() == 0 - assert self.rrc_o_list_old .length() == 0 - def check_value_is_null(key, value, ignore): - assert value == llmemory.NULL - self.rrc_p_dict.foreach(check_value_is_null, None) - self.rrc_p_dict_nurs.foreach(check_value_is_null, None) + def activate_rawrefcount_cycle(self): + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + self.rrc_gc.rrc_cycle_enabled = True def deactivate_rawrefcount_cycle(self): - self.rrc_cycle_enabled = False - - def activate_rawrefcount_cycle(self): - self.rrc_cycle_enabled = True + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + self.rrc_gc.rrc_cycle_enabled = False def rawrefcount_create_link_pypy(self, gcobj, pyobject): ll_assert(self.rrc_enabled, "rawrefcount.init not called") - obj = llmemory.cast_ptr_to_adr(gcobj) - objint = llmemory.cast_adr_to_int(obj, "symbolic") - self._pyobj(pyobject).c_ob_pypy_link = objint - # - lst = self.rrc_p_list_young - if self.is_in_nursery(obj): - dct = self.rrc_p_dict_nurs - else: - dct = self.rrc_p_dict - if not self.is_young_object(obj): - lst = self.rrc_p_list_old - lst.append(pyobject) - dct.setitem(obj, pyobject) + self.rrc_gc.create_link_pypy(gcobj, pyobject) def rawrefcount_create_link_pyobj(self, gcobj, pyobject): ll_assert(self.rrc_enabled, "rawrefcount.init not called") - obj = llmemory.cast_ptr_to_adr(gcobj) - if self.is_young_object(obj): - self.rrc_o_list_young.append(pyobject) - else: - self.rrc_o_list_old.append(pyobject) - objint = llmemory.cast_adr_to_int(obj, "symbolic") - self._pyobj(pyobject).c_ob_pypy_link = objint - # there is no rrc_o_dict + self.rrc_gc.create_link_pyobj(gcobj, pyobject) + + def rawrefcount_from_obj(self, gcobj): + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + return self.rrc_gc.from_obj(gcobj) + + def rawrefcount_to_obj(self, pyobject): + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + return self.rrc_gc.to_obj(pyobject) def rawrefcount_mark_deallocating(self, gcobj, pyobject): ll_assert(self.rrc_enabled, "rawrefcount.init not called") - obj = llmemory.cast_ptr_to_adr(gcobj) # should be a prebuilt obj - objint = llmemory.cast_adr_to_int(obj, "symbolic") - self._pyobj(pyobject).c_ob_pypy_link = objint - - def rawrefcount_from_obj(self, gcobj): - obj = llmemory.cast_ptr_to_adr(gcobj) - if self.is_in_nursery(obj): - dct = self.rrc_p_dict_nurs - else: - dct = self.rrc_p_dict - return dct.get(obj) - - def rawrefcount_to_obj(self, pyobject): - obj = llmemory.cast_int_to_adr(self._pyobj(pyobject).c_ob_pypy_link) - return llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) + self.rrc_gc.mark_deallocating(gcobj, pyobject) def rawrefcount_next_dead(self): - if self.rrc_dealloc_pending.non_empty(): - return self.rrc_dealloc_pending.pop() - return llmemory.NULL + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + return self.rrc_gc.next_dead() def rawrefcount_next_cyclic_isolate(self): - if not self._rrc_gc_list_is_empty(self.rrc_pyobj_isolate_list): - gchdr = self._rrc_gc_list_pop(self.rrc_pyobj_isolate_list) - self._rrc_gc_list_add(self.rrc_pyobj_old_list, gchdr) - return llmemory.cast_ptr_to_adr(self.rrc_gc_as_pyobj(gchdr)) - return llmemory.NULL + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + return self.rrc_gc.next_cyclic_isolate() def rawrefcount_cyclic_garbage_head(self): - if not self._rrc_gc_list_is_empty(self.rrc_pyobj_dead_list): - return llmemory.cast_ptr_to_adr( - self.rrc_gc_as_pyobj(self.rrc_pyobj_dead_list.c_gc_next)) - else: - return llmemory.NULL + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + return self.rrc_gc.cyclic_garbage_head() def rawrefcount_cyclic_garbage_remove(self): - gchdr = self.rrc_pyobj_dead_list.c_gc_next - # remove from old list - next = gchdr.c_gc_next - next.c_gc_prev = gchdr.c_gc_prev - gchdr.c_gc_prev.c_gc_next = next - # add to new list, may die later - next = self.rrc_pyobj_list.c_gc_next - self.rrc_pyobj_list.c_gc_next = gchdr - gchdr.c_gc_prev = self.rrc_pyobj_list - gchdr.c_gc_next = next - next.c_gc_prev = gchdr + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + self.rrc_gc.cyclic_garbage_remove() def rawrefcount_begin_garbage(self): - # after this, no new objects should be marked with the GCFLAG_GARBAGE - # flag - self.rrc_state = self.RAWREFCOUNT_STATE_GARBAGE + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + self.rrc_gc.state = RawRefCountBaseGC.STATE_GARBAGE def rawrefcount_end_garbage(self): - # now there should not be any object left with a GCFLAG_GARBAGE flag - # set - self.rrc_state = self.RAWREFCOUNT_STATE_DEFAULT + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + self.rrc_gc.state = RawRefCountBaseGC.STATE_DEFAULT def rawrefcount_next_garbage_pypy(self): - if self.rrc_garbage_to_trace.non_empty(): - # remove one object from the wavefront and move the wavefront - obj = self.rrc_garbage_to_trace.pop() - if self._rrc_garbage_visit(obj): - return llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) - else: - return lltype.nullptr(llmemory.GCREF.TO) - else: - return lltype.nullptr(llmemory.GCREF.TO) + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + return self.rrc_gc.next_garbage_pypy() def rawrefcount_next_garbage_pyobj(self): - if self._rrc_gc_list_is_empty(self.rrc_pyobj_garbage_list): - return llmemory.NULL - else: - # rrc_pyobj_garbage_list is not a real list, it just points to - # the first (c_gc_next) and last (c_gc_prev) pyobject in the list - # of live objects that are garbage, so just fix the references - list = self.rrc_pyobj_garbage_list - gchdr = list.c_gc_next - if list.c_gc_prev == gchdr: - list.c_gc_next = list # reached end of list, reset it - else: - list.c_gc_next = gchdr.c_gc_next # move pointer foward - return llmemory.cast_ptr_to_adr(self.rrc_gc_as_pyobj(gchdr)) + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + return self.rrc_gc.next_garbage_pyobj() - def rrc_invoke_callback(self): - if self.rrc_enabled and (self.rrc_dealloc_pending.non_empty() or - not self._rrc_gc_list_is_empty( - self.rrc_pyobj_isolate_list) or - not self._rrc_gc_list_is_empty( - self.rrc_pyobj_dead_list) or - not self._rrc_gc_list_is_empty( - self.rrc_pyobj_garbage_list)): - self.rrc_dealloc_trigger_callback() - - def rrc_minor_collection_trace(self): - length_estimate = self.rrc_p_dict_nurs.length() - self.rrc_p_dict_nurs.delete() - self.rrc_p_dict_nurs = self.AddressDict(length_estimate) - self.rrc_p_list_young.foreach(self._rrc_minor_trace, - self.singleaddr) - - def _rrc_minor_trace(self, pyobject, singleaddr): - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT - # - rc = self._pyobj(pyobject).c_ob_refcnt - if rc == REFCNT_FROM_PYPY or rc == REFCNT_FROM_PYPY_LIGHT: - pass # the corresponding object may die - else: - # force the corresponding object to be alive - intobj = self._pyobj(pyobject).c_ob_pypy_link - singleaddr.address[0] = llmemory.cast_int_to_adr(intobj) - self._trace_drag_out1(singleaddr) - - def rrc_minor_collection_free(self): - ll_assert(self.rrc_p_dict_nurs.length() == 0, "p_dict_nurs not empty 1") - lst = self.rrc_p_list_young - while lst.non_empty(): - self._rrc_minor_free(lst.pop(), self.rrc_p_list_old, - self.rrc_p_dict) - lst = self.rrc_o_list_young - no_o_dict = self.null_address_dict() - while lst.non_empty(): - self._rrc_minor_free(lst.pop(), self.rrc_o_list_old, - no_o_dict) - - def _rrc_minor_free(self, pyobject, surviving_list, surviving_dict): - intobj = self._pyobj(pyobject).c_ob_pypy_link - obj = llmemory.cast_int_to_adr(intobj) - if self.is_in_nursery(obj): - if self.is_forwarded(obj): - # Common case: survives and moves - obj = self.get_forwarding_address(obj) - intobj = llmemory.cast_adr_to_int(obj, "symbolic") - self._pyobj(pyobject).c_ob_pypy_link = intobj - surviving = True - if surviving_dict: - # Surviving nursery object: was originally in - # rrc_p_dict_nurs and now must be put into rrc_p_dict - surviving_dict.setitem(obj, pyobject) - else: - surviving = False - elif (bool(self.young_rawmalloced_objects) and - self.young_rawmalloced_objects.contains(obj)): - # young weakref to a young raw-malloced object - if self.header(obj).tid & GCFLAG_VISITED_RMY: - surviving = True # survives, but does not move - else: - surviving = False - if surviving_dict: - # Dying young large object: was in rrc_p_dict, - # must be deleted - surviving_dict.setitem(obj, llmemory.NULL) - else: - ll_assert(False, "rrc_X_list_young contains non-young obj") - return - # - if surviving: - surviving_list.append(pyobject) - else: - self._rrc_free(pyobject) - - def _rrc_free(self, pyobject, major=False): - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT - # - rc = self._pyobj(pyobject).c_ob_refcnt - if rc >= REFCNT_FROM_PYPY_LIGHT: - rc -= REFCNT_FROM_PYPY_LIGHT - if rc == 0: - pygchdr = self.rrc_pyobj_as_gc(self._pyobj(pyobject)) - if pygchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): - next = pygchdr.c_gc_next - next.c_gc_prev = pygchdr.c_gc_prev - pygchdr.c_gc_prev.c_gc_next = next - lltype.free(self._pyobj(pyobject), flavor='raw') - else: - # can only occur if LIGHT is used in create_link_pyobj() - self._pyobj(pyobject).c_ob_refcnt = rc - self._pyobj(pyobject).c_ob_pypy_link = 0 - else: - ll_assert(rc >= REFCNT_FROM_PYPY, "refcount underflow?") - ll_assert(rc < int(REFCNT_FROM_PYPY_LIGHT * 0.99), - "refcount underflow from REFCNT_FROM_PYPY_LIGHT?") - rc -= REFCNT_FROM_PYPY - self._pyobj(pyobject).c_ob_pypy_link = 0 - if rc == 0: - #pygchdr = self.rrc_pyobj_as_gc(self._pyobj(pyobject)) - #if pygchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): - # self._rrc_debug_check_list(pygchdr) - - # TODO? - #pygchdr = self.rrc_pyobj_as_gc(self._pyobj(pyobject)) - #if pygchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): - # next = pygchdr.c_gc_next - # next.c_gc_prev = pygchdr.c_gc_prev - # pygchdr.c_gc_prev.c_gc_next = next - - self.rrc_dealloc_pending.append(pyobject) - # an object with refcnt == 0 cannot stay around waiting - # for its deallocator to be called. Some code (lxml) - # expects that tp_dealloc is called immediately when - # the refcnt drops to 0. If it isn't, we get some - # uncleared raw pointer that can still be used to access - # the object; but (PyObject *)raw_pointer is then bogus - # because after a Py_INCREF()/Py_DECREF() on it, its - # tp_dealloc is also called! - rc = 1 - self._pyobj(pyobject).c_ob_refcnt = rc - _rrc_free._always_inline_ = True - - def rrc_major_collection_trace(self): - if not self.rrc_cycle_enabled: - self._rrc_debug_check_consistency(print_label="begin-mark") - - # First, untrack all tuples with only non-gc rrc objects and promote - # all other tuples to the pyobj_list - self._rrc_untrack_tuples() - - # Only trace and mark rawrefcounted object if we are not doing - # something special, like building gc.garbage. - if (self.rrc_state == self.RAWREFCOUNT_STATE_DEFAULT and - self.rrc_cycle_enabled): - merged_old_list = False - # check objects with finalizers from last collection cycle - if not self._rrc_gc_list_is_empty(self.rrc_pyobj_old_list): - merged_old_list = self._rrc_check_finalizer() - # collect all rawrefcounted roots - self._rrc_collect_roots(self.rrc_pyobj_list) - if merged_old_list: - # set all refcounts to zero for objects in dead list - # (might have been incremented) by fix_refcnt - gchdr = self.rrc_pyobj_dead_list.c_gc_next - while gchdr <> self.rrc_pyobj_dead_list: - gchdr.c_gc_refs = 0 - gchdr = gchdr.c_gc_next - self._rrc_debug_check_consistency(print_label="roots-marked") - # mark all objects reachable from rawrefcounted roots - self._rrc_mark_rawrefcount() - self._rrc_debug_check_consistency(print_label="before-fin") - self.rrc_state = self.RAWREFCOUNT_STATE_MARKING - if self._rrc_find_garbage(): # handle legacy finalizers - self._rrc_mark_garbage() - self._rrc_debug_check_consistency(print_label="end-legacy-fin") - self.rrc_state = self.RAWREFCOUNT_STATE_DEFAULT - found_finalizer = self._rrc_find_finalizer() # modern finalizers - if found_finalizer: - self._rrc_gc_list_move(self.rrc_pyobj_old_list, - self.rrc_pyobj_isolate_list) - use_cylicrc = not found_finalizer - self._rrc_debug_check_consistency(print_label="end-mark-cyclic") - else: - use_cylicrc = False # don't sweep any objects in cyclic isolates - - # now mark all pypy objects at the border, depending on the results - debug_print("use_cylicrc", use_cylicrc) - self.rrc_p_list_old.foreach(self._rrc_major_trace, use_cylicrc) - self._rrc_debug_check_consistency(print_label="end-mark") - - # fix refcnt back - self.rrc_refcnt_dict.foreach(self._rrc_fix_refcnt_back, None) - self.rrc_refcnt_dict.delete() - self.rrc_refcnt_dict = self.AddressDict() - - def _rrc_fix_refcnt_back(self, pyobject, link, ignore): - pyobj = self._pyobj(pyobject) - link_int = llmemory.cast_adr_to_int(link, "symbolic") - pyobj.c_ob_refcnt = pyobj.c_ob_pypy_link - pyobj.c_ob_pypy_link = link_int - - def _rrc_major_trace(self, pyobject, use_cylicrefcnt): - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT - # - pyobj = self._pyobj(pyobject) - cyclic_rc = -42 - if use_cylicrefcnt: - pygchdr = self.rrc_pyobj_as_gc(pyobj) - if pygchdr != lltype.nullptr(self.PYOBJ_GC_HDR): - if pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: - rc = pygchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT - cyclic_rc = rc - else: - rc = pyobj.c_ob_refcnt - else: - rc = pyobj.c_ob_refcnt - else: - rc = pyobj.c_ob_refcnt - - if rc == REFCNT_FROM_PYPY or rc == REFCNT_FROM_PYPY_LIGHT or rc == 0: - pass # the corresponding object may die - else: - # force the corresponding object to be alive - debug_print("pyobj stays alive", pyobj, "rc", rc, "cyclic_rc", - cyclic_rc) - obj = self.rrc_refcnt_dict.get(pyobject) - self.objects_to_trace.append(obj) - self.visit_all_objects() - - def _rrc_major_trace_nongc(self, pyobject, ignore): - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT - # - pyobj = self._pyobj(pyobject) - pygchdr = self.rrc_pyobj_as_gc(pyobj) - if pygchdr != lltype.nullptr(self.PYOBJ_GC_HDR): - if pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: - rc = 0 - else: - rc = pyobj.c_ob_refcnt - else: - rc = pyobj.c_ob_refcnt - - if rc == REFCNT_FROM_PYPY or rc == REFCNT_FROM_PYPY_LIGHT or rc == 0: - pass # the corresponding object may die - else: - # force the corresponding object to be alive - debug_print("pyobj stays alive", pyobj, "rc", rc) - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - obj = self.rrc_refcnt_dict.get(pyobject) - self.objects_to_trace.append(obj) - self.visit_all_objects() - - def rrc_major_collection_free(self): - if self.rrc_state == self.RAWREFCOUNT_STATE_DEFAULT: - self._rrc_debug_check_consistency() - if not self._rrc_gc_list_is_empty(self.rrc_pyobj_old_list): - self._rrc_clear_weakref_callbacks() - self._rrc_gc_list_merge(self.rrc_pyobj_old_list, - self.rrc_pyobj_dead_list) - self._rrc_debug_check_consistency(print_label="before-sweep") - - ll_assert(self.rrc_p_dict_nurs.length() == 0, "p_dict_nurs not empty 2") - length_estimate = self.rrc_p_dict.length() - self.rrc_p_dict.delete() - self.rrc_p_dict = new_p_dict = self.AddressDict(length_estimate) - new_p_list = self.AddressStack() - while self.rrc_p_list_old.non_empty(): - self._rrc_major_free(self.rrc_p_list_old.pop(), new_p_list, - new_p_dict) - self.rrc_p_list_old.delete() - self.rrc_p_list_old = new_p_list - # - new_o_list = self.AddressStack() - no_o_dict = self.null_address_dict() - while self.rrc_o_list_old.non_empty(): - self._rrc_major_free(self.rrc_o_list_old.pop(), new_o_list, - no_o_dict) - self.rrc_o_list_old.delete() - self.rrc_o_list_old = new_o_list - - def _rrc_clear_weakref_callbacks(self): - # Look for any weakrefs within the trash cycle and remove the callback. - # This is only needed for weakrefs created from rawrefcounted objects - # because weakrefs from gc-managed objects are going away anyway. - return - list = self.rrc_pyobj_old_list - gchdr = list.c_gc_next - while gchdr <> list: - pyobj = self.rrc_gc_as_pyobj(gchdr) - self._rrc_traverse_weakref(pyobj) - gchdr = gchdr.c_gc_next - - def _rrc_untrack_tuples(self): - gchdr = self.rrc_tuple_list.c_gc_next - while gchdr <> self.rrc_tuple_list: - gchdr_next = gchdr.c_gc_next - pyobj = self.rrc_gc_as_pyobj(gchdr) - result = self.rrc_tuple_maybe_untrack(pyobj) - if result == 1: # contains gc objects -> promote to pyobj list - next = gchdr.c_gc_next - next.c_gc_prev = gchdr.c_gc_prev - gchdr.c_gc_prev.c_gc_next = next - self._rrc_gc_list_add(self.rrc_pyobj_list, gchdr) - gchdr = gchdr_next - - def _rrc_visit_weakref(pyobj, self_ptr): - from rpython.rtyper.annlowlevel import cast_adr_to_nongc_instance - # - self_adr = rffi.cast(llmemory.Address, self_ptr) - self = cast_adr_to_nongc_instance(IncrementalMiniMarkGC, self_adr) - self._rrc_visit_weakref_action(pyobj, None) - return rffi.cast(rffi.INT_real, 0) - - def _rrc_visit_weakref_action(self, pyobj, ignore): - intobj = pyobj.c_ob_pypy_link - if intobj <> 0: - obj = llmemory.cast_int_to_adr(intobj) - object = llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) - self.rrc_clear_weakref_callback(object) - - def _rrc_traverse_weakref(self, pyobj): - from rpython.rlib.objectmodel import we_are_translated - from rpython.rtyper.annlowlevel import (cast_nongc_instance_to_adr, - llhelper) - # - if we_are_translated(): - callback_ptr = llhelper(self.RAWREFCOUNT_VISIT, - IncrementalMiniMarkGC._rrc_visit_weakref) - self_ptr = rffi.cast(rffi.VOIDP, cast_nongc_instance_to_adr(self)) - self.rrc_tp_traverse(pyobj, callback_ptr, self_ptr) - else: - self.rrc_tp_traverse(pyobj, self._rrc_visit_weakref_action, None) - - def _rrc_major_free(self, pyobject, surviving_list, surviving_dict): - # The pyobject survives if the corresponding obj survives. - # This is true if the obj has one of the following two flags: - # * GCFLAG_VISITED: was seen during tracing - # * GCFLAG_NO_HEAP_PTRS: immortal object never traced (so far) - intobj = self._pyobj(pyobject).c_ob_pypy_link - obj = llmemory.cast_int_to_adr(intobj) - if self.header(obj).tid & (GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS): - surviving_list.append(pyobject) - if surviving_dict: - surviving_dict.insertclean(obj, pyobject) - else: - self._rrc_free(pyobject, True) - - def _rrc_collect_roots(self, pygclist): - # Initialize the cyclic refcount with the real refcount. - self._rrc_collect_roots_init_list(pygclist) - - # Save the real refcount of objects at border - self.rrc_p_list_old.foreach(self._rrc_obj_save_refcnt, None) - self.rrc_o_list_old.foreach(self._rrc_obj_save_refcnt, None) - - # Subtract all internal refcounts from the cyclic refcount - # of rawrefcounted objects - self._rrc_collect_roots_subtract_internal(pygclist) - - # For all non-gc pyobjects which have a refcount > 0, - # mark all reachable objects on the pypy side - self.rrc_p_list_old.foreach(self._rrc_major_trace_nongc, None) - - # For every object in this set, if it is marked, add 1 as a real - # refcount (p_list => pyobj stays alive if obj stays alive). - self.rrc_p_list_old.foreach(self._rrc_obj_fix_refcnt, None) - self.rrc_o_list_old.foreach(self._rrc_obj_fix_refcnt, None) - - # now all rawrefcounted roots or live border objects have a - # refcount > 0 - self._rrc_debug_check_consistency(print_label="rc-initialized") - - - def _rrc_collect_roots_init_list(self, pygclist): - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT - pygchdr = pygclist.c_gc_next - while pygchdr <> pygclist: - refcnt = self.rrc_gc_as_pyobj(pygchdr).c_ob_refcnt - if refcnt >= REFCNT_FROM_PYPY_LIGHT: - refcnt -= REFCNT_FROM_PYPY_LIGHT - elif refcnt >= REFCNT_FROM_PYPY: - refcnt -= REFCNT_FROM_PYPY - self._rrc_pyobj_gc_refcnt_set(pygchdr, refcnt) - pygchdr = pygchdr.c_gc_next - - def _rrc_collect_roots_subtract_internal(self, pygclist): - pygchdr = pygclist.c_gc_next - while pygchdr <> pygclist: - pyobj = self.rrc_gc_as_pyobj(pygchdr) - self._rrc_traverse(pyobj, -1) - pygchdr = pygchdr.c_gc_next - - def _rrc_pyobj_gc_refcnt_set(self, pygchdr, refcnt): - pygchdr.c_gc_refs &= self.RAWREFCOUNT_REFS_MASK_FINALIZED - pygchdr.c_gc_refs |= refcnt << self.RAWREFCOUNT_REFS_SHIFT - - def _rrc_obj_save_refcnt(self, pyobject, ignore): - pyobj = self._pyobj(pyobject) - link = llmemory.cast_int_to_adr(pyobj.c_ob_pypy_link) - self.rrc_refcnt_dict.setitem(pyobject, link) - pyobj.c_ob_pypy_link = pyobj.c_ob_refcnt - - def _rrc_obj_fix_refcnt(self, pyobject, ignore): - pyobj = self._pyobj(pyobject) - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - obj = self.rrc_refcnt_dict.get(pyobject) - gchdr = self.rrc_pyobj_as_gc(pyobj) - if gchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): - rc = gchdr.c_gc_refs - refcnt = gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT - if rc == self.RAWREFCOUNT_REFS_UNTRACKED: - debug_print("gc obj not tracked", gchdr, ": obj", obj, - "cyclic-rc", rc) - else: - debug_print("gc obj tracked", gchdr, ": obj", obj, "real-rc", - refcnt, "gc-next", - gchdr.c_gc_next, "gc-prev", gchdr.c_gc_prev) - if self.header(obj).tid & (GCFLAG_VISITED | - GCFLAG_NO_HEAP_PTRS): - refcnt += 1 - self._rrc_pyobj_gc_refcnt_set(gchdr, refcnt) - - def _rrc_mark_rawrefcount(self): - if self._rrc_gc_list_is_empty(self.rrc_pyobj_list): - self._rrc_gc_list_init(self.rrc_pyobj_old_list) - else: - self._rrc_gc_list_move(self.rrc_pyobj_list, - self.rrc_pyobj_old_list) - # as long as new objects with cyclic a refcount > 0 or alive border - # objects are found, increment the refcount of all referenced objects - # of those newly found objects - found_alive = True - pyobj_old = self.rrc_pyobj_list - # - while found_alive: - found_alive = False - gchdr = self.rrc_pyobj_old_list.c_gc_next - while gchdr <> self.rrc_pyobj_old_list: - next_old = gchdr.c_gc_next - found_alive |= self._rrc_mark_rawrefcount_obj(gchdr, pyobj_old) - gchdr = next_old - # - # now all rawrefcounted objects, which are alive, have a cyclic - # refcount > 0 or are marked - - def _rrc_mark_rawrefcount_obj(self, gchdr, gchdr_move): - alive = (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0 - pyobj = self.rrc_gc_as_pyobj(gchdr) - obj = llmemory.NULL - if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - pyobject = llmemory.cast_ptr_to_adr(pyobj) - obj = self.rrc_refcnt_dict.get(pyobject) - if not alive and self.header(obj).tid & ( - GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS): - # add fake refcount, to mark it as live - gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT - alive = True - if alive: - # remove from old list - next = gchdr.c_gc_next - next.c_gc_prev = gchdr.c_gc_prev - gchdr.c_gc_prev.c_gc_next = next - # add to new list (or not, if it is a tuple) - self._rrc_gc_list_add(gchdr_move, gchdr) - # increment refcounts - self._rrc_traverse(pyobj, 1) - # mark recursively, if it is a pypyobj - if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - self.objects_to_trace.append(obj) - self.visit_all_objects() - return alive - - def _rrc_find_garbage(self): - found_garbage = False - gchdr = self.rrc_pyobj_old_list.c_gc_next - while gchdr <> self.rrc_pyobj_old_list: - next_old = gchdr.c_gc_next - garbage = self.rrc_finalizer_type(gchdr) == \ - self.RAWREFCOUNT_FINALIZER_LEGACY - if garbage: - self._rrc_move_to_garbage(gchdr) - found_garbage = True - gchdr = next_old - return found_garbage - - def _rrc_mark_garbage(self): - found_garbage = True - # - while found_garbage: - found_garbage = False - gchdr = self.rrc_pyobj_old_list.c_gc_next - while gchdr <> self.rrc_pyobj_old_list: - next_old = gchdr.c_gc_next - alive = (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0 - pyobj = self.rrc_gc_as_pyobj(gchdr) - if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - pyobject = llmemory.cast_ptr_to_adr(pyobj) - obj = self.rrc_refcnt_dict.get(pyobject) - if not alive and self.header(obj).tid & ( - GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS): - # add fake refcount, to mark it as live - gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT - alive = True - if alive: - self._rrc_move_to_garbage(gchdr) - found_garbage = True - gchdr = next_old - - def _rrc_move_to_garbage(self, gchdr): - pyobj = self.rrc_gc_as_pyobj(gchdr) - # remove from old list - next = gchdr.c_gc_next - next.c_gc_prev = gchdr.c_gc_prev - gchdr.c_gc_prev.c_gc_next = next - # add to beginning of pyobj_list - self._rrc_gc_list_add(self.rrc_pyobj_list, gchdr) - # set as new beginning (and optionally end) of - # pyobj_garbage_list (not a real list, just pointers to - # begin and end) - if self._rrc_gc_list_is_empty(self.rrc_pyobj_garbage_list): - self.rrc_pyobj_garbage_list.c_gc_prev = gchdr - self.rrc_pyobj_garbage_list.c_gc_next = gchdr - # mark referenced objects alive (so objects in the old list - # will be detected as garbage, as they should have a cyclic - # refcount of zero or an unmarked linked pypy object) - self._rrc_traverse(pyobj, 1) - if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - pyobject = llmemory.cast_ptr_to_adr(pyobj) - obj = self.rrc_refcnt_dict.get(pyobject) - #obj = llmemory.cast_int_to_adr(intobj) - self.rrc_garbage_to_trace.append(obj) - self.objects_to_trace.append(obj) - self.visit_all_objects() - - def _rrc_collect_obj(self, obj, ignored): - llop.debug_nonnull_pointer(lltype.Void, obj) - self.rrc_garbage_to_trace.append(obj) - _rrc_collect_obj._always_inline_ = True - - def _rrc_collect_ref_rec(self, root, ignored): - self._rrc_collect_obj(root.address[0], None) - - def _rrc_garbage_visit(self, obj): - # If GCFLAG_GARBAGE is set, remove the flag and trace the object - hdr = self.header(obj) - if not (hdr.tid & GCFLAG_GARBAGE): - return False - hdr.tid &= ~GCFLAG_GARBAGE - if self.has_gcptr(llop.extract_ushort(llgroup.HALFWORD, hdr.tid)): - self.trace(obj, self._rrc_collect_ref_rec, None) - return True - - def _rrc_check_finalizer(self): - # Check, if the cyclic isolate from the last collection cycle - # is reachable from outside, after the finalizers have been - # executed (and if all finalizers have been executed). - found_alive = self._rrc_gc_list_is_empty(self.rrc_pyobj_isolate_list) - if not found_alive: - found_alive = self._rrc_find_finalizer() - if not found_alive: - self._rrc_collect_roots(self.rrc_pyobj_old_list) - gchdr = self.rrc_pyobj_old_list.c_gc_next - while gchdr <> self.rrc_pyobj_old_list: - if (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0: - found_alive = True - break - gchdr = gchdr.c_gc_next - if found_alive: - self._rrc_gc_list_merge(self.rrc_pyobj_old_list, - self.rrc_pyobj_list) - return False - else: - self._rrc_clear_weakref_callbacks() - self._rrc_gc_list_merge(self.rrc_pyobj_old_list, - self.rrc_pyobj_dead_list) - return True - - def _rrc_find_finalizer(self): - gchdr = self.rrc_pyobj_old_list.c_gc_next - while gchdr <> self.rrc_pyobj_old_list: - if self.rrc_finalizer_type(gchdr) == \ - self.RAWREFCOUNT_FINALIZER_MODERN: - return True - gchdr = gchdr.c_gc_next - return False - - def _rrc_visit(pyobj, self_ptr): - from rpython.rtyper.annlowlevel import cast_adr_to_nongc_instance - # - self_adr = rffi.cast(llmemory.Address, self_ptr) - self = cast_adr_to_nongc_instance(IncrementalMiniMarkGC, self_adr) - self._rrc_visit_action(pyobj, None) - return rffi.cast(rffi.INT_real, 0) - - def _rrc_visit_action(self, pyobj, ignore): - pygchdr = self.rrc_pyobj_as_gc(pyobj) - if pygchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): - if pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: - pygchdr.c_gc_refs += self.rrc_refcnt_add << \ - self.RAWREFCOUNT_REFS_SHIFT - elif pyobj.c_ob_pypy_link != 0: - pyobj.c_ob_refcnt += self.rrc_refcnt_add - if self.rrc_refcnt_add > 0: - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - pyobject = llmemory.cast_ptr_to_adr(pyobj) - obj = self.rrc_refcnt_dict.get(pyobject) - self.objects_to_trace.append(obj) - self.visit_all_objects() - - def _rrc_traverse(self, pyobj, refcnt_add): - from rpython.rlib.objectmodel import we_are_translated - from rpython.rtyper.annlowlevel import (cast_nongc_instance_to_adr, - llhelper) - # - self.rrc_refcnt_add = refcnt_add - if we_are_translated(): - callback_ptr = llhelper(self.RAWREFCOUNT_VISIT, - IncrementalMiniMarkGC._rrc_visit) - self_ptr = rffi.cast(rffi.VOIDP, cast_nongc_instance_to_adr(self)) - self.rrc_tp_traverse(pyobj, callback_ptr, self_ptr) - else: - self.rrc_tp_traverse(pyobj, self._rrc_visit_action, None) - - def _rrc_gc_list_new(self): - list = lltype.malloc(self.PYOBJ_GC_HDR, flavor='raw', immortal=True) - self._rrc_gc_list_init(list) - return list - - def _rrc_gc_list_init(self, pygclist): - pygclist.c_gc_next = pygclist - pygclist.c_gc_prev = pygclist - - def _rrc_gc_list_add(self, pygclist, gchdr): - next = pygclist.c_gc_next - pygclist.c_gc_next = gchdr - gchdr.c_gc_prev = pygclist - gchdr.c_gc_next = next - next.c_gc_prev = gchdr - - def _rrc_gc_list_pop(self, pygclist): - ret = pygclist.c_gc_next - pygclist.c_gc_next = ret.c_gc_next - ret.c_gc_next.c_gc_prev = pygclist - return ret - - def _rrc_gc_list_move(self, pygclist_source, pygclist_dest): - pygclist_dest.c_gc_next = pygclist_source.c_gc_next - pygclist_dest.c_gc_prev = pygclist_source.c_gc_prev - pygclist_dest.c_gc_next.c_gc_prev = pygclist_dest - pygclist_dest.c_gc_prev.c_gc_next = pygclist_dest - pygclist_source.c_gc_next = pygclist_source - pygclist_source.c_gc_prev = pygclist_source - - def _rrc_gc_list_merge(self, pygclist_source, pygclist_dest): - next = pygclist_dest.c_gc_next - next_old = pygclist_source.c_gc_next - prev_old = pygclist_source.c_gc_prev - pygclist_dest.c_gc_next = next_old - next_old.c_gc_prev = pygclist_dest - prev_old.c_gc_next = next - next.c_gc_prev = prev_old - pygclist_source.c_gc_next = pygclist_source - pygclist_source.c_gc_prev = pygclist_source - - def _rrc_gc_list_is_empty(self, pygclist): - return pygclist.c_gc_next == pygclist - - def _rrc_debug_check_consistency(self, print_label=None): - if self.DEBUG: - should_print = print_label is not None - if should_print: - debug_start("rrc-lists " + print_label) - self._rrc_debug_check_list(self.rrc_pyobj_list, - should_print, "rrc_pyobj_list") - self._rrc_debug_check_list(self.rrc_tuple_list, - should_print, "rrc_tuple_list") - self._rrc_debug_check_list(self.rrc_pyobj_old_list, - should_print, "rrc_pyobj_old_list") - self._rrc_debug_check_list(self.rrc_pyobj_dead_list, - should_print, "rrc_pyobj_dead_list") - self._rrc_debug_check_list(self.rrc_pyobj_isolate_list, - should_print, "rrc_pyobj_isolate_list") - # rrc_pyobj_garbage_list is not a real list, it just marks the - # first and the last object in rrc_pyobj_list, which are garbage - - if should_print: - debug_stop("rrc-lists " + print_label) - - def _rrc_debug_check_list(self, list, should_print, print_label): - if should_print: - debug_start(print_label) - gchdr = list.c_gc_next - prev = list - while gchdr <> list: - if should_print: - pyobj = self.rrc_gc_as_pyobj(gchdr) - intobj = pyobj.c_ob_pypy_link - debug_print("item", gchdr, ": pyobj", pyobj, - "cyclic refcnt", - gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT, - "refcnt", pyobj.c_ob_refcnt, - "link", intobj) - #if intobj: TODO fix - # obj = llmemory.cast_int_to_adr(intobj) - # marked = self.header(obj).tid & \ - # (GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS) - # debug_print(" linked obj", obj, ": marked", marked) - - ll_assert(gchdr.c_gc_next != lltype.nullptr(self.PYOBJ_GC_HDR), - "gc_next is null") - ll_assert(gchdr.c_gc_prev == prev, "gc_prev is inconsistent") - prev = gchdr - gchdr = gchdr.c_gc_next - ll_assert(list.c_gc_prev == prev, "gc_prev is inconsistent") - if should_print: - debug_stop(print_label) \ No newline at end of file + def check_no_more_rawrefcount_state(self): + ll_assert(self.rrc_enabled, "rawrefcount.init not called") + self.rrc_gc.check_no_more_rawrefcount_state() diff --git a/rpython/memory/gc/__init__.py b/rpython/memory/gc/rrc/__init__.py copy from rpython/memory/gc/__init__.py copy to rpython/memory/gc/rrc/__init__.py diff --git a/rpython/memory/gc/rrc/base.py b/rpython/memory/gc/rrc/base.py new file mode 100644 --- /dev/null +++ b/rpython/memory/gc/rrc/base.py @@ -0,0 +1,882 @@ +from rpython.rtyper.lltypesystem import lltype, llmemory, llgroup, rffi +from rpython.rtyper.lltypesystem.lloperation import llop +from rpython.rlib.debug import ll_assert, debug_print, debug_start, debug_stop + +def choose_rrc_gc_from_config(config): + if config.translation.rrcgc: + classes = {"mark": "mark.RawRefCountMarkGC", + "incmark": "incmark.RawRefCountIncMarkGC", + } + try: + modulename, classname = classes[config.translation.rrcgc].split( + '.') + except KeyError: + raise ValueError("unknown value for translation.rrcgc: %r" % ( + config.translation.rrcgc,)) + module = __import__("rpython.memory.gc.rrc." + modulename, + globals(), locals(), [classname]) + GCClass = getattr(module, classname) + return GCClass + else: + return None + +class RawRefCountBaseGC(object): + # Default state, no rawrefcount specific code is executed during normal marking. + STATE_DEFAULT = 0 + + # Here cyclic garbage only reachable from legacy finalizers is marked. + STATE_MARKING = 1 + + # The state in which cyclic garbage with legacy finalizers is traced. + # Do not mark objects during this state, because we remove the flag + # during tracing and we do not want to trace those objects again. Also + # during this phase no new objects can be marked, as we are only building + # the list of cyclic garbage. + STATE_GARBAGE = 2 + + _ADDRARRAY = lltype.Array(llmemory.Address, hints={'nolength': True}) + PYOBJ_SNAPSHOT_OBJ = lltype.Struct('PyObject_Snapshot', + ('pyobj', llmemory.Address), + ('refcnt', lltype.Signed), + ('refcnt_internal', lltype.Signed), + ('refs_index', lltype.Signed), + ('refs_len', lltype.Signed)) + PYOBJ_SNAPSHOT = lltype.Array(PYOBJ_SNAPSHOT_OBJ, + hints={'nolength': True}) + PYOBJ_HDR = lltype.Struct('GCHdr_PyObject', + ('c_ob_refcnt', lltype.Signed), + ('c_ob_pypy_link', lltype.Signed)) + PYOBJ_HDR_PTR = lltype.Ptr(PYOBJ_HDR) + RAWREFCOUNT_DEALLOC_TRIGGER = lltype.Ptr(lltype.FuncType([], lltype.Void)) + RAWREFCOUNT_VISIT = lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR, rffi.VOIDP], + rffi.INT_real)) + RAWREFCOUNT_TRAVERSE = lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR, + RAWREFCOUNT_VISIT, + rffi.VOIDP], + lltype.Void)) + PYOBJ_GC_HDR_PTR = lltype.Ptr(lltype.ForwardReference()) + PYOBJ_GC_HDR = lltype.Struct('PyGC_Head', + ('c_gc_next', PYOBJ_GC_HDR_PTR), + ('c_gc_prev', PYOBJ_GC_HDR_PTR), + ('c_gc_refs', lltype.Signed)) + PYOBJ_GC_HDR_PTR.TO.become(PYOBJ_GC_HDR) + RAWREFCOUNT_GC_AS_PYOBJ = lltype.Ptr(lltype.FuncType([PYOBJ_GC_HDR_PTR], + PYOBJ_HDR_PTR)) + RAWREFCOUNT_PYOBJ_AS_GC = lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR], + PYOBJ_GC_HDR_PTR)) + RAWREFCOUNT_FINALIZER_TYPE = lltype.Ptr(lltype.FuncType([PYOBJ_GC_HDR_PTR], + lltype.Signed)) + RAWREFCOUNT_CLEAR_WR_TYPE = lltype.Ptr(lltype.FuncType([llmemory.GCREF], + lltype.Void)) + RAWREFCOUNT_MAYBE_UNTRACK_TUPLE = \ + lltype.Ptr(lltype.FuncType([PYOBJ_HDR_PTR], lltype.Signed)) + RAWREFCOUNT_FINALIZER_NONE = 0 + RAWREFCOUNT_FINALIZER_MODERN = 1 + RAWREFCOUNT_FINALIZER_LEGACY = 2 + RAWREFCOUNT_REFS_SHIFT = 1 + RAWREFCOUNT_REFS_MASK_FINALIZED = 1 + RAWREFCOUNT_REFS_UNTRACKED = -2 << RAWREFCOUNT_REFS_SHIFT + + def _pyobj(self, pyobjaddr): + return llmemory.cast_adr_to_ptr(pyobjaddr, self.PYOBJ_HDR_PTR) + def _pygchdr(self, pygchdraddr): + return llmemory.cast_adr_to_ptr(pygchdraddr, self.PYOBJ_GC_HDR_PTR) + + def init(self, gc, gc_flags, dealloc_trigger_callback, tp_traverse, + pyobj_list, tuple_list, gc_as_pyobj, pyobj_as_gc, finalizer_type, + clear_weakref_callback, tuple_maybe_untrack): + # see pypy/doc/discussion/rawrefcount.rst + self.gc = gc + (self.GCFLAG_VISITED_RMY, self.GCFLAG_VISITED, + self.GCFLAG_NO_HEAP_PTRS, self.GCFLAG_GARBAGE) = gc_flags + self.p_list_young = self.gc.AddressStack() + self.p_list_old = self.gc.AddressStack() + self.o_list_young = self.gc.AddressStack() + self.o_list_old = self.gc.AddressStack() + self.p_dict = self.gc.AddressDict() # non-nursery keys only + self.p_dict_nurs = self.gc.AddressDict() # nursery keys only + self.dealloc_trigger_callback = dealloc_trigger_callback + self.dealloc_pending = self.gc.AddressStack() + self.refcnt_dict = self.gc.AddressDict() + self.tp_traverse = tp_traverse + self.pyobj_list = self._pygchdr(pyobj_list) + self.tuple_list = self._pygchdr(tuple_list) + self.pyobj_old_list = self._gc_list_new() + self.pyobj_isolate_list = self._gc_list_new() + self.pyobj_dead_list = self._gc_list_new() + self.pyobj_garbage_list = self._gc_list_new() + self.garbage_to_trace = self.gc.AddressStack() + self.gc_as_pyobj = gc_as_pyobj + self.pyobj_as_gc = pyobj_as_gc + self.finalizer_type = finalizer_type + self.clear_weakref_callback = clear_weakref_callback + self.tuple_maybe_untrack = tuple_maybe_untrack + self.state = self.STATE_DEFAULT + self.cycle_enabled = True + + def create_link_pypy(self, gcobj, pyobject): + obj = llmemory.cast_ptr_to_adr(gcobj) + objint = llmemory.cast_adr_to_int(obj, "symbolic") + self._pyobj(pyobject).c_ob_pypy_link = objint + # + lst = self.p_list_young + if self.gc.is_in_nursery(obj): + dct = self.p_dict_nurs + else: + dct = self.p_dict + if not self.gc.is_young_object(obj): + lst = self.p_list_old + lst.append(pyobject) + dct.setitem(obj, pyobject) + + def create_link_pyobj(self, gcobj, pyobject): + obj = llmemory.cast_ptr_to_adr(gcobj) + if self.gc.is_young_object(obj): + self.o_list_young.append(pyobject) + else: + self.o_list_old.append(pyobject) + objint = llmemory.cast_adr_to_int(obj, "symbolic") + self._pyobj(pyobject).c_ob_pypy_link = objint + # there is no o_dict + + def from_obj(self, gcobj): + obj = llmemory.cast_ptr_to_adr(gcobj) + if self.gc.is_in_nursery(obj): + dct = self.p_dict_nurs + else: + dct = self.p_dict + return dct.get(obj) + + def to_obj(self, pyobject): + obj = llmemory.cast_int_to_adr(self._pyobj(pyobject).c_ob_pypy_link) + return llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) + + def mark_deallocating(self, gcobj, pyobject): + obj = llmemory.cast_ptr_to_adr(gcobj) # should be a prebuilt obj + objint = llmemory.cast_adr_to_int(obj, "symbolic") + self._pyobj(pyobject).c_ob_pypy_link = objint + + def invoke_callback(self): + if (self.dealloc_pending.non_empty() or + not self._gc_list_is_empty(self.pyobj_isolate_list) or + not self._gc_list_is_empty(self.pyobj_dead_list) or + not self._gc_list_is_empty(self.pyobj_garbage_list)): + self.dealloc_trigger_callback() + + def next_dead(self): + if self.dealloc_pending.non_empty(): + return self.dealloc_pending.pop() + return llmemory.NULL + + def next_cyclic_isolate(self): + if not self._gc_list_is_empty(self.pyobj_isolate_list): + gchdr = self._gc_list_pop(self.pyobj_isolate_list) + self._gc_list_add(self.pyobj_old_list, gchdr) + return llmemory.cast_ptr_to_adr(self.gc_as_pyobj(gchdr)) + return llmemory.NULL + + def cyclic_garbage_head(self): + if not self._gc_list_is_empty(self.pyobj_dead_list): + return llmemory.cast_ptr_to_adr( + self.gc_as_pyobj(self.pyobj_dead_list.c_gc_next)) + else: + return llmemory.NULL + + def cyclic_garbage_remove(self): + gchdr = self.pyobj_dead_list.c_gc_next + # remove from old list + next = gchdr.c_gc_next + next.c_gc_prev = gchdr.c_gc_prev + gchdr.c_gc_prev.c_gc_next = next + # add to new list, may die later + next = self.pyobj_list.c_gc_next + self.pyobj_list.c_gc_next = gchdr + gchdr.c_gc_prev = self.pyobj_list + gchdr.c_gc_next = next + next.c_gc_prev = gchdr + + def next_garbage_pypy(self): + if self.garbage_to_trace.non_empty(): + # remove one object from the wavefront and move the wavefront + obj = self.garbage_to_trace.pop() + if self._garbage_visit(obj): + return llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) + else: + return lltype.nullptr(llmemory.GCREF.TO) + else: + return lltype.nullptr(llmemory.GCREF.TO) + + def next_garbage_pyobj(self): + if self._gc_list_is_empty(self.pyobj_garbage_list): + return llmemory.NULL + else: + # pyobj_garbage_list is not a real list, it just points to + # the first (c_gc_next) and last (c_gc_prev) pyobject in the list + # of live objects that are garbage, so just fix the references + list = self.pyobj_garbage_list + gchdr = list.c_gc_next + if list.c_gc_prev == gchdr: + list.c_gc_next = list # reached end of list, reset it + else: + list.c_gc_next = gchdr.c_gc_next # move pointer foward + return llmemory.cast_ptr_to_adr(self.gc_as_pyobj(gchdr)) + + # --- Tracing --- + + def minor_trace(self): + length_estimate = self.p_dict_nurs.length() + self.p_dict_nurs.delete() + self.p_dict_nurs = self.gc.AddressDict(length_estimate) + self.p_list_young.foreach(self._minor_trace, self.gc.singleaddr) + + def _minor_trace(self, pyobject, singleaddr): + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT + # + rc = self._pyobj(pyobject).c_ob_refcnt + if rc == REFCNT_FROM_PYPY or rc == REFCNT_FROM_PYPY_LIGHT: + pass # the corresponding object may die + else: + # force the corresponding object to be alive + intobj = self._pyobj(pyobject).c_ob_pypy_link + singleaddr.address[0] = llmemory.cast_int_to_adr(intobj) + self.gc._trace_drag_out1(singleaddr) + + def minor_collection_free(self): + ll_assert(self.p_dict_nurs.length() == 0, + "p_dict_nurs not empty 1") + lst = self.p_list_young + while lst.non_empty(): + self._minor_free(lst.pop(), self.p_list_old, self.p_dict) + lst = self.o_list_young + no_o_dict = self.gc.null_address_dict() + while lst.non_empty(): + self._minor_free(lst.pop(), self.o_list_old, no_o_dict) + + def _minor_free(self, pyobject, surviving_list, surviving_dict): + intobj = self._pyobj(pyobject).c_ob_pypy_link + obj = llmemory.cast_int_to_adr(intobj) + if self.gc.is_in_nursery(obj): + if self.gc.is_forwarded(obj): + # Common case: survives and moves + obj = self.gc.get_forwarding_address(obj) + intobj = llmemory.cast_adr_to_int(obj, "symbolic") + self._pyobj(pyobject).c_ob_pypy_link = intobj + surviving = True + if surviving_dict: + # Surviving nursery object: was originally in + # p_dict_nurs and now must be put into p_dict + surviving_dict.setitem(obj, pyobject) + else: + surviving = False + elif (bool(self.gc.young_rawmalloced_objects) and + self.gc.young_rawmalloced_objects.contains(obj)): + # young weakref to a young raw-malloced object + if self.gc.header(obj).tid & self.GCFLAG_VISITED_RMY: + surviving = True # survives, but does not move + else: + surviving = False + if surviving_dict: + # Dying young large object: was in p_dict, + # must be deleted + surviving_dict.setitem(obj, llmemory.NULL) + else: + ll_assert(False, "X_list_young contains non-young obj") + return + # + if surviving: + surviving_list.append(pyobject) + else: + self._free(pyobject) + + def _free(self, pyobject, major=False): + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT + # + rc = self._pyobj(pyobject).c_ob_refcnt + if rc >= REFCNT_FROM_PYPY_LIGHT: + rc -= REFCNT_FROM_PYPY_LIGHT + if rc == 0: + pygchdr = self.pyobj_as_gc(self._pyobj(pyobject)) + if pygchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): + next = pygchdr.c_gc_next + next.c_gc_prev = pygchdr.c_gc_prev + pygchdr.c_gc_prev.c_gc_next = next + lltype.free(self._pyobj(pyobject), flavor='raw') + else: + # can only occur if LIGHT is used in create_link_pyobj() + self._pyobj(pyobject).c_ob_refcnt = rc + self._pyobj(pyobject).c_ob_pypy_link = 0 + else: + ll_assert(rc >= REFCNT_FROM_PYPY, "refcount underflow?") + ll_assert(rc < int(REFCNT_FROM_PYPY_LIGHT * 0.99), + "refcount underflow from REFCNT_FROM_PYPY_LIGHT?") + rc -= REFCNT_FROM_PYPY + self._pyobj(pyobject).c_ob_pypy_link = 0 + if rc == 0: + self.dealloc_pending.append(pyobject) + # an object with refcnt == 0 cannot stay around waiting + # for its deallocator to be called. Some code (lxml) + # expects that tp_dealloc is called immediately when + # the refcnt drops to 0. If it isn't, we get some + # uncleared raw pointer that can still be used to access + # the object; but (PyObject *)raw_pointer is then bogus + # because after a Py_INCREF()/Py_DECREF() on it, its + # tp_dealloc is also called! + rc = 1 + self._pyobj(pyobject).c_ob_refcnt = rc + _free._always_inline_ = True + + def major_collection_trace(self): + if not self.cycle_enabled: + self._debug_check_consistency(print_label="begin-mark") + + # First, untrack all tuples with only non-gc rrc objects and promote + # all other tuples to the pyobj_list + self._untrack_tuples() + + # Only trace and mark rawrefcounted object if we are not doing + # something special, like building gc.garbage. + if (self.state == self.STATE_DEFAULT and self.cycle_enabled): + merged_old_list = False + # check objects with finalizers from last collection cycle + if not self._gc_list_is_empty(self.pyobj_old_list): + merged_old_list = self._check_finalizer() + # collect all rawrefcounted roots + self._collect_roots(self.pyobj_list) # TODO: from snapshot + if merged_old_list: + # set all refcounts to zero for objects in dead list + # (might have been incremented) by fix_refcnt + gchdr = self.pyobj_dead_list.c_gc_next + while gchdr <> self.pyobj_dead_list: + gchdr.c_gc_refs = 0 + gchdr = gchdr.c_gc_next + self._debug_check_consistency(print_label="roots-marked") + # mark all objects reachable from rawrefcounted roots + self._mark_rawrefcount() # TODO: from snapshot + self._debug_check_consistency(print_label="before-fin") + self.state = self.STATE_MARKING + if self._find_garbage(): # handle legacy finalizers # TODO: from snapshot + self._mark_garbage() # TODO: from snapshot + self._debug_check_consistency(print_label="end-legacy-fin") + self.state = self.STATE_DEFAULT + found_finalizer = self._find_finalizer() # modern finalizers # TODO: from snapshot + if found_finalizer: + self._gc_list_move(self.pyobj_old_list, + self.pyobj_isolate_list) + use_cylicrc = not found_finalizer + self._debug_check_consistency(print_label="end-mark-cyclic") + else: + use_cylicrc = False # don't sweep any objects in cyclic isolates + + # now mark all pypy objects at the border, depending on the results + debug_print("use_cylicrc", use_cylicrc) + self.p_list_old.foreach(self._major_trace, use_cylicrc) + self._debug_check_consistency(print_label="end-mark") + + # fix refcnt back + self.refcnt_dict.foreach(self._fix_refcnt_back, None) # TODO: from snapshot? + self.refcnt_dict.delete() + self.refcnt_dict = self.gc.AddressDict() + + def _fix_refcnt_back(self, pyobject, link, ignore): + pyobj = self._pyobj(pyobject) + link_int = llmemory.cast_adr_to_int(link, "symbolic") + pyobj.c_ob_refcnt = pyobj.c_ob_pypy_link + pyobj.c_ob_pypy_link = link_int + + def _major_trace(self, pyobject, use_cylicrefcnt): + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT + # + pyobj = self._pyobj(pyobject) + cyclic_rc = -42 + if use_cylicrefcnt: + pygchdr = self.pyobj_as_gc(pyobj) + if pygchdr != lltype.nullptr(self.PYOBJ_GC_HDR): + if pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: + rc = pygchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT + cyclic_rc = rc + else: + rc = pyobj.c_ob_refcnt + else: + rc = pyobj.c_ob_refcnt + else: + rc = pyobj.c_ob_refcnt + + if rc == REFCNT_FROM_PYPY or rc == REFCNT_FROM_PYPY_LIGHT or rc == 0: + pass # the corresponding object may die + else: + # force the corresponding object to be alive + debug_print("pyobj stays alive", pyobj, "rc", rc, "cyclic_rc", + cyclic_rc) + obj = self.refcnt_dict.get(pyobject) + self.gc.objects_to_trace.append(obj) + self.gc.visit_all_objects() + + def _major_trace_nongc(self, pyobject, ignore): + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT + # + pyobj = self._pyobj(pyobject) + pygchdr = self.pyobj_as_gc(pyobj) + if pygchdr != lltype.nullptr(self.PYOBJ_GC_HDR): + if pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: + rc = 0 + else: + rc = pyobj.c_ob_refcnt + else: + rc = pyobj.c_ob_refcnt + + if rc == REFCNT_FROM_PYPY or rc == REFCNT_FROM_PYPY_LIGHT or rc == 0: + pass # the corresponding object may die + else: + # force the corresponding object to be alive + debug_print("pyobj stays alive", pyobj, "rc", rc) + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + obj = self.refcnt_dict.get(pyobject) + self.gc.objects_to_trace.append(obj) + self.gc.visit_all_objects() + + def major_collection_free(self): + if self.state == self.STATE_DEFAULT: + self._debug_check_consistency() + if not self._gc_list_is_empty(self.pyobj_old_list): + self._gc_list_merge(self.pyobj_old_list, self.pyobj_dead_list) + self._debug_check_consistency(print_label="before-sweep") + + ll_assert(self.p_dict_nurs.length() == 0, "p_dict_nurs not empty 2") + length_estimate = self.p_dict.length() + self.p_dict.delete() + self.p_dict = new_p_dict = self.gc.AddressDict(length_estimate) + new_p_list = self.gc.AddressStack() + while self.p_list_old.non_empty(): + self._major_free(self.p_list_old.pop(), new_p_list, + new_p_dict) + self.p_list_old.delete() + self.p_list_old = new_p_list + # + new_o_list = self.gc.AddressStack() + no_o_dict = self.gc.null_address_dict() + while self.o_list_old.non_empty(): + self._major_free(self.o_list_old.pop(), new_o_list, no_o_dict) + self.o_list_old.delete() + self.o_list_old = new_o_list + + def _major_free(self, pyobject, surviving_list, surviving_dict): + # The pyobject survives if the corresponding obj survives. + # This is true if the obj has one of the following two flags: + # * GCFLAG_VISITED: was seen during tracing + # * GCFLAG_NO_HEAP_PTRS: immortal object never traced (so far) + intobj = self._pyobj(pyobject).c_ob_pypy_link + obj = llmemory.cast_int_to_adr(intobj) + if self.gc.header(obj).tid & \ + (self.GCFLAG_VISITED | self.GCFLAG_NO_HEAP_PTRS): + surviving_list.append(pyobject) + if surviving_dict: + surviving_dict.insertclean(obj, pyobject) + else: + self._free(pyobject, True) + + + def _untrack_tuples(self): + gchdr = self.tuple_list.c_gc_next + while gchdr <> self.tuple_list: + gchdr_next = gchdr.c_gc_next + pyobj = self.gc_as_pyobj(gchdr) + result = self.tuple_maybe_untrack(pyobj) + if result == 1: # contains gc objects -> promote to pyobj list + next = gchdr.c_gc_next + next.c_gc_prev = gchdr.c_gc_prev + gchdr.c_gc_prev.c_gc_next = next + self._gc_list_add(self.pyobj_list, gchdr) + gchdr = gchdr_next + + def _collect_roots(self, pygclist): + # Initialize the cyclic refcount with the real refcount. + self._collect_roots_init_list(pygclist) + + # Save the real refcount of objects at border + self.p_list_old.foreach(self._obj_save_refcnt, None) + self.o_list_old.foreach(self._obj_save_refcnt, None) + + # Subtract all internal refcounts from the cyclic refcount + # of rawrefcounted objects + self._collect_roots_subtract_internal(pygclist) + + # For all non-gc pyobjects which have a refcount > 0, + # mark all reachable objects on the pypy side + self.p_list_old.foreach(self._major_trace_nongc, None) + + # For every object in this set, if it is marked, add 1 as a real + # refcount (p_list => pyobj stays alive if obj stays alive). + self.p_list_old.foreach(self._obj_fix_refcnt, None) + self.o_list_old.foreach(self._obj_fix_refcnt, None) + + # now all rawrefcounted roots or live border objects have a + # refcount > 0 + self._debug_check_consistency(print_label="rc-initialized") + + def _collect_roots_init_list(self, pygclist): + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT + pygchdr = pygclist.c_gc_next + while pygchdr <> pygclist: + refcnt = self.gc_as_pyobj(pygchdr).c_ob_refcnt + if refcnt >= REFCNT_FROM_PYPY_LIGHT: + refcnt -= REFCNT_FROM_PYPY_LIGHT + elif refcnt >= REFCNT_FROM_PYPY: + refcnt -= REFCNT_FROM_PYPY + self._pyobj_gc_refcnt_set(pygchdr, refcnt) + pygchdr = pygchdr.c_gc_next + + def _collect_roots_subtract_internal(self, pygclist): + pygchdr = pygclist.c_gc_next + while pygchdr <> pygclist: + pyobj = self.gc_as_pyobj(pygchdr) + self._traverse(pyobj, -1) + pygchdr = pygchdr.c_gc_next + + def _pyobj_gc_refcnt_set(self, pygchdr, refcnt): + pygchdr.c_gc_refs &= self.RAWREFCOUNT_REFS_MASK_FINALIZED + pygchdr.c_gc_refs |= refcnt << self.RAWREFCOUNT_REFS_SHIFT + + def _obj_save_refcnt(self, pyobject, ignore): + pyobj = self._pyobj(pyobject) + link = llmemory.cast_int_to_adr(pyobj.c_ob_pypy_link) + self.refcnt_dict.setitem(pyobject, link) + pyobj.c_ob_pypy_link = pyobj.c_ob_refcnt + + def _obj_fix_refcnt(self, pyobject, ignore): + pyobj = self._pyobj(pyobject) + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + obj = self.refcnt_dict.get(pyobject) + gchdr = self.pyobj_as_gc(pyobj) + if gchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): + rc = gchdr.c_gc_refs + refcnt = gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT + if rc == self.RAWREFCOUNT_REFS_UNTRACKED: + debug_print("gc obj not tracked", gchdr, ": obj", obj, + "cyclic-rc", rc) + else: + debug_print("gc obj tracked", gchdr, ": obj", obj, "real-rc", + refcnt, "gc-next", + gchdr.c_gc_next, "gc-prev", gchdr.c_gc_prev) + if self.gc.header(obj).tid & (self.GCFLAG_VISITED | + self.GCFLAG_NO_HEAP_PTRS): + refcnt += 1 + self._pyobj_gc_refcnt_set(gchdr, refcnt) + + def _mark_rawrefcount(self): + if self._gc_list_is_empty(self.pyobj_list): + self._gc_list_init(self.pyobj_old_list) + else: + self._gc_list_move(self.pyobj_list, self.pyobj_old_list) + # as long as new objects with cyclic a refcount > 0 or alive border + # objects are found, increment the refcount of all referenced objects + # of those newly found objects + found_alive = True + pyobj_old = self.pyobj_list + # + while found_alive: # TODO: working set to improve performance? + found_alive = False + gchdr = self.pyobj_old_list.c_gc_next + while gchdr <> self.pyobj_old_list: + next_old = gchdr.c_gc_next + found_alive |= self._mark_rawrefcount_obj(gchdr, pyobj_old) + gchdr = next_old + # + # now all rawrefcounted objects, which are alive, have a cyclic + # refcount > 0 or are marked + + def _mark_rawrefcount_obj(self, gchdr, gchdr_move): + alive = (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0 + pyobj = self.gc_as_pyobj(gchdr) + obj = llmemory.NULL + if pyobj.c_ob_pypy_link <> 0: + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + pyobject = llmemory.cast_ptr_to_adr(pyobj) + obj = self.refcnt_dict.get(pyobject) + if not alive and self.gc.header(obj).tid & ( + self.GCFLAG_VISITED | self.GCFLAG_NO_HEAP_PTRS): + # add fake refcount, to mark it as live + gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT + alive = True + if alive: + # remove from old list + next = gchdr.c_gc_next + next.c_gc_prev = gchdr.c_gc_prev + gchdr.c_gc_prev.c_gc_next = next + # add to new list (or not, if it is a tuple) + self._gc_list_add(gchdr_move, gchdr) + # increment refcounts + self._traverse(pyobj, 1) + # mark recursively, if it is a pypyobj + if pyobj.c_ob_pypy_link <> 0: + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + self.gc.objects_to_trace.append(obj) + self.gc.visit_all_objects() + return alive + + def _find_garbage(self): + found_garbage = False + gchdr = self.pyobj_old_list.c_gc_next + while gchdr <> self.pyobj_old_list: + next_old = gchdr.c_gc_next + garbage = self.finalizer_type(gchdr) == \ + self.RAWREFCOUNT_FINALIZER_LEGACY + if garbage: + self._move_to_garbage(gchdr) + found_garbage = True + gchdr = next_old + return found_garbage + + def _mark_garbage(self): + found_garbage = True + # + while found_garbage: + found_garbage = False + gchdr = self.pyobj_old_list.c_gc_next + while gchdr <> self.pyobj_old_list: + next_old = gchdr.c_gc_next + alive = (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0 + pyobj = self.gc_as_pyobj(gchdr) + if pyobj.c_ob_pypy_link <> 0: + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + pyobject = llmemory.cast_ptr_to_adr(pyobj) + obj = self.refcnt_dict.get(pyobject) + if not alive and self.gc.header(obj).tid & ( + self.GCFLAG_VISITED | self.GCFLAG_NO_HEAP_PTRS): + # add fake refcount, to mark it as live + gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT + alive = True + if alive: + self._move_to_garbage(gchdr) + found_garbage = True + gchdr = next_old + + def _move_to_garbage(self, gchdr): + pyobj = self.gc_as_pyobj(gchdr) + # remove from old list + next = gchdr.c_gc_next + next.c_gc_prev = gchdr.c_gc_prev + gchdr.c_gc_prev.c_gc_next = next + # add to beginning of pyobj_list + self._gc_list_add(self.pyobj_list, gchdr) + # set as new beginning (and optionally end) of + # pyobj_garbage_list (not a real list, just pointers to + # begin and end) + if self._gc_list_is_empty(self.pyobj_garbage_list): + self.pyobj_garbage_list.c_gc_prev = gchdr + self.pyobj_garbage_list.c_gc_next = gchdr + # mark referenced objects alive (so objects in the old list + # will be detected as garbage, as they should have a cyclic + # refcount of zero or an unmarked linked pypy object) + self._traverse(pyobj, 1) + if pyobj.c_ob_pypy_link <> 0: + #intobj = pyobj.c_ob_pypy_link + pyobject = llmemory.cast_ptr_to_adr(pyobj) + obj = self.refcnt_dict.get(pyobject) + #obj = llmemory.cast_int_to_adr(intobj) + self.garbage_to_trace.append(obj) + self.gc.objects_to_trace.append(obj) + self.gc.visit_all_objects() + + def _collect_obj(self, obj, ignored): + llop.debug_nonnull_pointer(lltype.Void, obj) + self.garbage_to_trace.append(obj) + _collect_obj._always_inline_ = True + + def _collect_ref_rec(self, root, ignored): + self._collect_obj(root.address[0], None) + + def _garbage_visit(self, obj): + # If GCFLAG_GARBAGE is set, remove the flag and trace the object + hdr = self.gc.header(obj) + if not (hdr.tid & self.GCFLAG_GARBAGE): + return False + hdr.tid &= ~self.GCFLAG_GARBAGE + if self.gc.has_gcptr(llop.extract_ushort(llgroup.HALFWORD, hdr.tid)): + self.gc.trace(obj, self._collect_ref_rec, None) + return True + + def _check_finalizer(self): + # Check, if the cyclic isolate from the last collection cycle + # is reachable from outside, after the finalizers have been + # executed (and if all finalizers have been executed). + found_alive = self._gc_list_is_empty(self.pyobj_isolate_list) + if not found_alive: + found_alive = self._find_finalizer() + if not found_alive: + self._collect_roots(self.pyobj_old_list) + gchdr = self.pyobj_old_list.c_gc_next + while gchdr <> self.pyobj_old_list: + if (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0: + found_alive = True + break + gchdr = gchdr.c_gc_next + if found_alive: + self._gc_list_merge(self.pyobj_old_list, self.pyobj_list) + return False + else: + self._gc_list_merge(self.pyobj_old_list, self.pyobj_dead_list) + return True + + def _find_finalizer(self): + gchdr = self.pyobj_old_list.c_gc_next + while gchdr <> self.pyobj_old_list: + if self.finalizer_type(gchdr) == \ + self.RAWREFCOUNT_FINALIZER_MODERN: + return True + gchdr = gchdr.c_gc_next + return False + + def _visit(pyobj, self_ptr): + from rpython.rtyper.annlowlevel import cast_adr_to_nongc_instance + # + self_adr = rffi.cast(llmemory.Address, self_ptr) + self = cast_adr_to_nongc_instance(RawRefCountBaseGC, self_adr) + self._visit_action(pyobj, None) + return rffi.cast(rffi.INT_real, 0) + + def _visit_action(self, pyobj, ignore): + pygchdr = self.pyobj_as_gc(pyobj) + if pygchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): + if pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: + pygchdr.c_gc_refs += self.refcnt_add << \ + self.RAWREFCOUNT_REFS_SHIFT + elif pyobj.c_ob_pypy_link != 0: + pyobj.c_ob_refcnt += self.refcnt_add + if self.refcnt_add > 0: + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + pyobject = llmemory.cast_ptr_to_adr(pyobj) + obj = self.refcnt_dict.get(pyobject) + self.gc.objects_to_trace.append(obj) + self.gc.visit_all_objects() + + def _traverse(self, pyobj, refcnt_add): + from rpython.rlib.objectmodel import we_are_translated + from rpython.rtyper.annlowlevel import (cast_nongc_instance_to_adr, + llhelper) + # + self.refcnt_add = refcnt_add + if we_are_translated(): + callback_ptr = llhelper(self.RAWREFCOUNT_VISIT, + RawRefCountBaseGC._visit) + self_ptr = rffi.cast(rffi.VOIDP, cast_nongc_instance_to_adr(self)) + self.tp_traverse(pyobj, callback_ptr, self_ptr) + else: + self.tp_traverse(pyobj, self._visit_action, None) + + # --- Helpers --- + + def _gc_list_new(self): + list = lltype.malloc(self.PYOBJ_GC_HDR, flavor='raw', immortal=True) + self._gc_list_init(list) + return list + + def _gc_list_init(self, pygclist): + pygclist.c_gc_next = pygclist + pygclist.c_gc_prev = pygclist + + def _gc_list_add(self, pygclist, gchdr): + next = pygclist.c_gc_next + pygclist.c_gc_next = gchdr + gchdr.c_gc_prev = pygclist + gchdr.c_gc_next = next + next.c_gc_prev = gchdr + + def _gc_list_pop(self, pygclist): + ret = pygclist.c_gc_next + pygclist.c_gc_next = ret.c_gc_next + ret.c_gc_next.c_gc_prev = pygclist + return ret + + def _gc_list_move(self, pygclist_source, pygclist_dest): + pygclist_dest.c_gc_next = pygclist_source.c_gc_next + pygclist_dest.c_gc_prev = pygclist_source.c_gc_prev + pygclist_dest.c_gc_next.c_gc_prev = pygclist_dest + pygclist_dest.c_gc_prev.c_gc_next = pygclist_dest + pygclist_source.c_gc_next = pygclist_source + pygclist_source.c_gc_prev = pygclist_source + + def _gc_list_merge(self, pygclist_source, pygclist_dest): + next = pygclist_dest.c_gc_next + next_old = pygclist_source.c_gc_next + prev_old = pygclist_source.c_gc_prev + pygclist_dest.c_gc_next = next_old + next_old.c_gc_prev = pygclist_dest + prev_old.c_gc_next = next + next.c_gc_prev = prev_old + pygclist_source.c_gc_next = pygclist_source + pygclist_source.c_gc_prev = pygclist_source + + def _gc_list_is_empty(self, pygclist): + return pygclist.c_gc_next == pygclist + + # --- Tests / Debugging --- + + def check_no_more_rawrefcount_state(self): + "NOT_RPYTHON: for tests" + assert self.p_list_young.length() == 0 + assert self.p_list_old .length() == 0 + assert self.o_list_young.length() == 0 + assert self.o_list_old .length() == 0 + def check_value_is_null(key, value, ignore): + assert value == llmemory.NULL + self.p_dict.foreach(check_value_is_null, None) + self.p_dict_nurs.foreach(check_value_is_null, None) + + def _debug_check_consistency(self, print_label=None): + if self.gc.DEBUG: + should_print = print_label is not None + if should_print: + debug_start("rrc-lists " + print_label) + self._debug_check_list(self.pyobj_list, should_print, "pyobj_list") + self._debug_check_list(self.tuple_list, should_print, "tuple_list") + self._debug_check_list(self.pyobj_old_list, should_print, + "pyobj_old_list") + self._debug_check_list(self.pyobj_dead_list, should_print, + "pyobj_dead_list") + self._debug_check_list(self.pyobj_isolate_list, should_print, + "pyobj_isolate_list") + # pyobj_garbage_list is not a real list, it just marks the + # first and the last object in pyobj_list, which are garbage + + if should_print: + debug_stop("rrc-lists " + print_label) + + def _debug_check_list(self, list, should_print, print_label): + if should_print: + debug_start(print_label) + gchdr = list.c_gc_next + prev = list + while gchdr <> list: + if should_print: + pyobj = self.gc_as_pyobj(gchdr) From pypy.commits at gmail.com Fri Aug 16 03:08:16 2019 From: pypy.commits at gmail.com (stevie_92) Date: Fri, 16 Aug 2019 00:08:16 -0700 (PDT) Subject: [pypy-commit] pypy cpyext-gc-cycle: WIP: adapted incremental rrc to use snapshot (finalizers still missing) Message-ID: <5d565660.1c69fb81.ce5b9.95c5@mx.google.com> Author: Stefan Beyer Branch: cpyext-gc-cycle Changeset: r97189:615c66be0a6a Date: 2019-08-14 17:43 +0200 http://bitbucket.org/pypy/pypy/changeset/615c66be0a6a/ Log: WIP: adapted incremental rrc to use snapshot (finalizers still missing) diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -2392,22 +2392,28 @@ self.more_objects_to_trace = swap self.visit_all_objects() + rrc_finished = False + if (not self.objects_to_trace.non_empty() and + not self.more_objects_to_trace.non_empty()): + # + # 'prebuilt_root_objects' might have grown since + # we scanned it in collect_roots() (rare case). Rescan. + self.collect_nonstack_roots() + self.visit_all_objects() + # + # If enabled, do a major collection step for rrc objects. + if self.rrc_enabled: + while not rrc_finished: # TODO: remove this line to do incremental collection + rrc_finished = self.rrc_gc.major_collection_trace_step() + else: + rrc_finished = True + # XXX A simplifying assumption that should be checked, # finalizers/weak references are rare and short which means that # they do not need a separate state and do not need to be # made incremental. # For now, the same applies to rawrefcount'ed objects. - if (not self.objects_to_trace.non_empty() and - not self.more_objects_to_trace.non_empty()): - # - # First, 'prebuilt_root_objects' might have grown since - # we scanned it in collect_roots() (rare case). Rescan. - self.collect_nonstack_roots() - self.visit_all_objects() - # - if self.rrc_enabled: - self.rrc_gc.major_collection_trace() - # + if rrc_finished: ll_assert(not (self.probably_young_objects_with_finalizers .non_empty()), "probably_young_objects_with_finalizers should be empty") @@ -2723,7 +2729,7 @@ hdr.tid |= GCFLAG_VISITED | GCFLAG_TRACK_YOUNG_PTRS if self.rrc_enabled and \ - self.rrc_gc.state == RawRefCountBaseGC.STATE_MARKING: + self.rrc_gc.state == RawRefCountBaseGC.STATE_GARBAGE_MARKING: hdr.tid |= GCFLAG_GARBAGE if self.has_gcptr(llop.extract_ushort(llgroup.HALFWORD, hdr.tid)): @@ -3155,7 +3161,7 @@ def rawrefcount_end_garbage(self): ll_assert(self.rrc_enabled, "rawrefcount.init not called") - self.rrc_gc.state = RawRefCountBaseGC.STATE_DEFAULT + self.rrc_gc.state = RawRefCountBaseGC.STATE_MARKING def rawrefcount_next_garbage_pypy(self): ll_assert(self.rrc_enabled, "rawrefcount.init not called") diff --git a/rpython/memory/gc/rrc/base.py b/rpython/memory/gc/rrc/base.py --- a/rpython/memory/gc/rrc/base.py +++ b/rpython/memory/gc/rrc/base.py @@ -21,26 +21,31 @@ return None class RawRefCountBaseGC(object): - # Default state, no rawrefcount specific code is executed during normal marking. + # Default state. STATE_DEFAULT = 0 + # Marking state. + STATE_MARKING = 1 + # Here cyclic garbage only reachable from legacy finalizers is marked. - STATE_MARKING = 1 + STATE_GARBAGE_MARKING = 2 # The state in which cyclic garbage with legacy finalizers is traced. # Do not mark objects during this state, because we remove the flag # during tracing and we do not want to trace those objects again. Also # during this phase no new objects can be marked, as we are only building # the list of cyclic garbage. - STATE_GARBAGE = 2 + STATE_GARBAGE = 3 _ADDRARRAY = lltype.Array(llmemory.Address, hints={'nolength': True}) PYOBJ_SNAPSHOT_OBJ = lltype.Struct('PyObject_Snapshot', ('pyobj', llmemory.Address), ('refcnt', lltype.Signed), - ('refcnt_internal', lltype.Signed), + ('refcnt_external', lltype.Signed), ('refs_index', lltype.Signed), - ('refs_len', lltype.Signed)) + ('refs_len', lltype.Signed), + ('pypy_link', lltype.Signed)) + PYOBJ_SNAPSHOT_OBJ_PTR = lltype.Ptr(PYOBJ_SNAPSHOT_OBJ) PYOBJ_SNAPSHOT = lltype.Array(PYOBJ_SNAPSHOT_OBJ, hints={'nolength': True}) PYOBJ_HDR = lltype.Struct('GCHdr_PyObject', @@ -327,57 +332,8 @@ self._pyobj(pyobject).c_ob_refcnt = rc _free._always_inline_ = True - def major_collection_trace(self): - if not self.cycle_enabled: - self._debug_check_consistency(print_label="begin-mark") - - # First, untrack all tuples with only non-gc rrc objects and promote - # all other tuples to the pyobj_list - self._untrack_tuples() - - # Only trace and mark rawrefcounted object if we are not doing - # something special, like building gc.garbage. - if (self.state == self.STATE_DEFAULT and self.cycle_enabled): - merged_old_list = False - # check objects with finalizers from last collection cycle - if not self._gc_list_is_empty(self.pyobj_old_list): - merged_old_list = self._check_finalizer() - # collect all rawrefcounted roots - self._collect_roots(self.pyobj_list) # TODO: from snapshot - if merged_old_list: - # set all refcounts to zero for objects in dead list - # (might have been incremented) by fix_refcnt - gchdr = self.pyobj_dead_list.c_gc_next - while gchdr <> self.pyobj_dead_list: - gchdr.c_gc_refs = 0 - gchdr = gchdr.c_gc_next - self._debug_check_consistency(print_label="roots-marked") - # mark all objects reachable from rawrefcounted roots - self._mark_rawrefcount() # TODO: from snapshot - self._debug_check_consistency(print_label="before-fin") - self.state = self.STATE_MARKING - if self._find_garbage(): # handle legacy finalizers # TODO: from snapshot - self._mark_garbage() # TODO: from snapshot - self._debug_check_consistency(print_label="end-legacy-fin") - self.state = self.STATE_DEFAULT - found_finalizer = self._find_finalizer() # modern finalizers # TODO: from snapshot - if found_finalizer: - self._gc_list_move(self.pyobj_old_list, - self.pyobj_isolate_list) - use_cylicrc = not found_finalizer - self._debug_check_consistency(print_label="end-mark-cyclic") - else: - use_cylicrc = False # don't sweep any objects in cyclic isolates - - # now mark all pypy objects at the border, depending on the results - debug_print("use_cylicrc", use_cylicrc) - self.p_list_old.foreach(self._major_trace, use_cylicrc) - self._debug_check_consistency(print_label="end-mark") - - # fix refcnt back - self.refcnt_dict.foreach(self._fix_refcnt_back, None) # TODO: from snapshot? - self.refcnt_dict.delete() - self.refcnt_dict = self.gc.AddressDict() + def major_collection_trace_step(self): + return True def _fix_refcnt_back(self, pyobject, link, ignore): pyobj = self._pyobj(pyobject) @@ -385,9 +341,10 @@ pyobj.c_ob_refcnt = pyobj.c_ob_pypy_link pyobj.c_ob_pypy_link = link_int - def _major_trace(self, pyobject, use_cylicrefcnt): + def _major_trace(self, pyobject, flags): from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT + (use_cylicrefcnt, use_dict) = flags # pyobj = self._pyobj(pyobject) cyclic_rc = -42 @@ -410,11 +367,15 @@ # force the corresponding object to be alive debug_print("pyobj stays alive", pyobj, "rc", rc, "cyclic_rc", cyclic_rc) - obj = self.refcnt_dict.get(pyobject) + if use_dict: + obj = self.refcnt_dict.get(pyobject) + else: + intobj = pyobj.c_ob_pypy_link + obj = llmemory.cast_int_to_adr(intobj) self.gc.objects_to_trace.append(obj) self.gc.visit_all_objects() - def _major_trace_nongc(self, pyobject, ignore): + def _major_trace_nongc(self, pyobject, use_dict): from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT # @@ -433,9 +394,11 @@ else: # force the corresponding object to be alive debug_print("pyobj stays alive", pyobj, "rc", rc) - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - obj = self.refcnt_dict.get(pyobject) + if use_dict: + obj = self.refcnt_dict.get(pyobject) + else: + intobj = pyobj.c_ob_pypy_link + obj = llmemory.cast_int_to_adr(intobj) self.gc.objects_to_trace.append(obj) self.gc.visit_all_objects() @@ -507,7 +470,7 @@ # For all non-gc pyobjects which have a refcount > 0, # mark all reachable objects on the pypy side - self.p_list_old.foreach(self._major_trace_nongc, None) + self.p_list_old.foreach(self._major_trace_nongc, True) # For every object in this set, if it is marked, add 1 as a real # refcount (p_list => pyobj stays alive if obj stays alive). diff --git a/rpython/memory/gc/rrc/incmark.py b/rpython/memory/gc/rrc/incmark.py --- a/rpython/memory/gc/rrc/incmark.py +++ b/rpython/memory/gc/rrc/incmark.py @@ -1,9 +1,157 @@ from rpython.rtyper.lltypesystem import lltype, llmemory from rpython.rtyper.lltypesystem import rffi from rpython.memory.gc.rrc.base import RawRefCountBaseGC +from rpython.rlib.debug import ll_assert, debug_print, debug_start, debug_stop class RawRefCountIncMarkGC(RawRefCountBaseGC): + def major_collection_trace_step(self): + if not self.cycle_enabled or self.state == self.STATE_GARBAGE: + self._debug_check_consistency(print_label="begin-mark") + self.p_list_old.foreach(self._major_trace, (False, False)) + self._debug_check_consistency(print_label="end-mark") + return True + + elif self.state == self.STATE_DEFAULT: + # First, untrack all tuples with only non-gc rrc objects and promote + # all other tuples to the pyobj_list + self._untrack_tuples() + + merged_old_list = False + # check objects with finalizers from last collection cycle + if not self._gc_list_is_empty(self.pyobj_old_list): + merged_old_list = self._check_finalizer() + + # For all non-gc pyobjects which have a refcount > 0, + # mark all reachable objects on the pypy side + self.p_list_old.foreach(self._major_trace_nongc, False) + + # Now take a snapshot + self._take_snapshot(self.pyobj_list) + + # collect all rawrefcounted roots + self._collect_roots(self.pyobj_list) + + if merged_old_list: + # set all refcounts to zero for objects in dead list + # (might have been incremented) by fix_refcnt + gchdr = self.pyobj_dead_list.c_gc_next + while gchdr <> self.pyobj_dead_list: + if (gchdr.c_gc_refs > 0 and gchdr.c_gc_refs != + self.RAWREFCOUNT_REFS_UNTRACKED): + pyobj = self.snapshot_objs[gchdr.c_gc_refs - 1] + pyobj.refcnt_external = 0 + gchdr = gchdr.c_gc_next + + self._debug_check_consistency(print_label="roots-marked") + self.state = self.STATE_MARKING + return False + + elif self.state == self.STATE_MARKING: + # mark all objects reachable from rawrefcounted roots + self._mark_rawrefcount() + + self._debug_check_consistency(print_label="before-fin") + self.state = self.STATE_GARBAGE_MARKING + return False + + elif self.state == self.STATE_GARBAGE_MARKING: + #if self._find_garbage(): # handle legacy finalizers # TODO: from snapshot + # self._mark_garbage() # TODO: from snapshot + # self._debug_check_consistency(print_label="end-legacy-fin") + self.state = self.STATE_DEFAULT + + # We are finished with marking, now finish things up + #found_finalizer = self._find_finalizer() # modern finalizers # TODO: from snapshot + #if found_finalizer: + # self._gc_list_move(self.pyobj_old_list, + # self.pyobj_isolate_list) + #use_cylicrc = not found_finalizer + use_cylicrc = True + + # now move all dead objs still in pyob_list to garbage + # dead -> pyobj_old_list + # live -> set cyclic refcount to > 0 + pygchdr = self.pyobj_list.c_gc_next + while pygchdr <> self.pyobj_list: + next_old = pygchdr.c_gc_next + snapobj = self.snapshot_objs[pygchdr.c_gc_refs - 1] + pygchdr.c_gc_refs = snapobj.refcnt_external + if snapobj.refcnt_external == 0: + # remove from old list + next = pygchdr.c_gc_next + next.c_gc_prev = pygchdr.c_gc_prev + pygchdr.c_gc_prev.c_gc_next = next + # add to new list (or not, if it is a tuple) + self._gc_list_add(self.pyobj_old_list, pygchdr) + pygchdr = next_old + + # now mark all pypy objects at the border, depending on the results + self._debug_check_consistency(print_label="end-mark-cyclic") + debug_print("use_cylicrc", use_cylicrc) + self.p_list_old.foreach(self._major_trace, (use_cylicrc, False)) + self._debug_check_consistency(print_label="end-mark") + self._discard_snapshot() + return True + + def _collect_roots(self, pygclist): + # Subtract all internal refcounts from the cyclic refcount + # of rawrefcounted objects + for i in range(0, self.total_objs): + obj = self.snapshot_objs[i] + for j in range(0, obj.refs_len): + addr = self.snapshot_refs[obj.refs_index + j] + obj_ref = llmemory.cast_adr_to_ptr(addr, + self.PYOBJ_SNAPSHOT_OBJ_PTR) + obj_ref.refcnt_external -= 1 + + # now all rawrefcounted roots or live border objects have a + # refcount > 0 + + def _mark_rawrefcount(self): + self._gc_list_init(self.pyobj_old_list) + # as long as new objects with cyclic a refcount > 0 or alive border + # objects are found, increment the refcount of all referenced objects + # of those newly found objects + found_alive = True + # + while found_alive: # TODO: working set to improve performance? + found_alive = False + for i in range(0, self.total_objs): + obj = self.snapshot_objs[i] + found_alive |= self._mark_rawrefcount_obj(obj) + # + # now all rawrefcounted objects, which are alive, have a cyclic + # refcount > 0 or are marked + + def _mark_rawrefcount_obj(self, snapobj): + if snapobj.refcnt == 0: # hack + return False + + alive = snapobj.refcnt_external > 0 + if snapobj.pypy_link <> 0: + intobj = snapobj.pypy_link + obj = llmemory.cast_int_to_adr(intobj) + if not alive and self.gc.header(obj).tid & ( + self.GCFLAG_VISITED | self.GCFLAG_NO_HEAP_PTRS): + alive = True + snapobj.refcnt_external += 1 + if alive: + # increment refcounts + for j in range(0, snapobj.refs_len): + addr = self.snapshot_refs[snapobj.refs_index + j] + obj_ref = llmemory.cast_adr_to_ptr(addr, + self.PYOBJ_SNAPSHOT_OBJ_PTR) + obj_ref.refcnt_external += 1 + # mark recursively, if it is a pypyobj + if snapobj.pypy_link <> 0: + self.gc.objects_to_trace.append(obj) + self.gc.visit_all_objects() + + # remove from old list, TODO: hack -> working set might be better + snapobj.refcnt = 0 + return alive + def _take_snapshot(self, pygclist): from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT @@ -26,6 +174,8 @@ self.snapshot_objs = lltype.malloc(self.PYOBJ_SNAPSHOT, total_objs, flavor='raw', track_allocation=False) + self.total_objs = total_objs + objs_index = 0 refs_index = 0 pygchdr = pygclist.c_gc_next @@ -36,17 +186,29 @@ refcnt -= REFCNT_FROM_PYPY_LIGHT elif refcnt >= REFCNT_FROM_PYPY: refcnt -= REFCNT_FROM_PYPY + if pyobj.c_ob_pypy_link != 0: + addr = llmemory.cast_int_to_adr(pyobj.c_ob_pypy_link) + if self.gc.header(addr).tid & (self.GCFLAG_VISITED | + self.GCFLAG_NO_HEAP_PTRS): + refcnt += 1 + pygchdr.c_gc_refs = objs_index + 1 obj = self.snapshot_objs[objs_index] obj.pyobj = llmemory.cast_ptr_to_adr(pyobj) - obj.refcnt = refcnt - obj.refcnt_internal = 0 + obj.refcnt = 1 + obj.refcnt_external = refcnt obj.refs_index = refs_index obj.refs_len = 0 + obj.pypy_link = pyobj.c_ob_pypy_link self.snapshot_curr = obj self._take_snapshot_traverse(pyobj) objs_index += 1 refs_index += obj.refs_len pygchdr = pygchdr.c_gc_next + for i in range(0, refs_index): + addr = self.snapshot_refs[i] + pyobj = llmemory.cast_adr_to_ptr(addr, self.PYOBJ_GC_HDR_PTR) + obj = self.snapshot_objs[pyobj.c_gc_refs - 1] + self.snapshot_refs[i] = llmemory.cast_ptr_to_adr(obj) def _take_snapshot_visit(pyobj, self_ptr): from rpython.rtyper.annlowlevel import cast_adr_to_nongc_instance @@ -62,7 +224,7 @@ pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: curr = self.snapshot_curr index = curr.refs_index + curr.refs_len - self.snapshot_refs[index] = llmemory.cast_ptr_to_adr(pyobj) + self.snapshot_refs[index] = llmemory.cast_ptr_to_adr(pygchdr) curr.refs_len += 1 def _take_snapshot_traverse(self, pyobj): diff --git a/rpython/memory/gc/rrc/mark.py b/rpython/memory/gc/rrc/mark.py --- a/rpython/memory/gc/rrc/mark.py +++ b/rpython/memory/gc/rrc/mark.py @@ -1,4 +1,62 @@ from rpython.memory.gc.rrc.base import RawRefCountBaseGC +from rpython.rlib.debug import ll_assert, debug_print, debug_start, debug_stop class RawRefCountMarkGC(RawRefCountBaseGC): - pass \ No newline at end of file + + def major_collection_trace_step(self): + if not self.cycle_enabled: + self._debug_check_consistency(print_label="begin-mark") + + if self.state == self.STATE_DEFAULT: + self.state = self.STATE_MARKING + + # First, untrack all tuples with only non-gc rrc objects and promote + # all other tuples to the pyobj_list + self._untrack_tuples() + + # Only trace and mark rawrefcounted object if we are not doing + # something special, like building gc.garbage. + if (self.state == self.STATE_MARKING and self.cycle_enabled): + merged_old_list = False + # check objects with finalizers from last collection cycle + if not self._gc_list_is_empty(self.pyobj_old_list): + merged_old_list = self._check_finalizer() + # collect all rawrefcounted roots + self._collect_roots(self.pyobj_list) + if merged_old_list: + # set all refcounts to zero for objects in dead list + # (might have been incremented) by fix_refcnt + gchdr = self.pyobj_dead_list.c_gc_next + while gchdr <> self.pyobj_dead_list: + gchdr.c_gc_refs = 0 + gchdr = gchdr.c_gc_next + self._debug_check_consistency(print_label="roots-marked") + # mark all objects reachable from rawrefcounted roots + self._mark_rawrefcount() + self._debug_check_consistency(print_label="before-fin") + self.state = self.STATE_GARBAGE_MARKING + if self._find_garbage(): # handle legacy finalizers + self._mark_garbage() + self._debug_check_consistency(print_label="end-legacy-fin") + self.state = self.STATE_MARKING + found_finalizer = self._find_finalizer() + if found_finalizer: + self._gc_list_move(self.pyobj_old_list, + self.pyobj_isolate_list) + use_cylicrc = not found_finalizer + self._debug_check_consistency(print_label="end-mark-cyclic") + else: + use_cylicrc = False # don't sweep any objects in cyclic isolates + + # now mark all pypy objects at the border, depending on the results + debug_print("use_cylicrc", use_cylicrc) + self.p_list_old.foreach(self._major_trace, (use_cylicrc, True)) + self._debug_check_consistency(print_label="end-mark") + + # fix refcnt back + self.refcnt_dict.foreach(self._fix_refcnt_back, None) + self.refcnt_dict.delete() + self.refcnt_dict = self.gc.AddressDict() + + self.state = self.STATE_DEFAULT + return True diff --git a/rpython/memory/gc/test/test_rawrefcount.py b/rpython/memory/gc/test/test_rawrefcount.py --- a/rpython/memory/gc/test/test_rawrefcount.py +++ b/rpython/memory/gc/test/test_rawrefcount.py @@ -3,6 +3,7 @@ from rpython.memory.gc.incminimark import IncrementalMiniMarkGC as IncMiniMark from rpython.memory.gc.rrc.base import RawRefCountBaseGC from rpython.memory.gc.rrc.mark import RawRefCountMarkGC +from rpython.memory.gc.rrc.incmark import RawRefCountIncMarkGC from rpython.memory.gc.test.test_direct import BaseDirectGCTest from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY, REFCNT_FROM_PYPY_LIGHT @@ -25,7 +26,8 @@ class TestRawRefCount(BaseDirectGCTest): GCClass = IncMiniMark - RRCGCClass = RawRefCountMarkGC + RRCGCClass = RawRefCountIncMarkGC + #RRCGCClass = RawRefCountMarkGC def setup_method(self, method): BaseDirectGCTest.setup_method(self, method) From pypy.commits at gmail.com Fri Aug 16 03:08:18 2019 From: pypy.commits at gmail.com (stevie_92) Date: Fri, 16 Aug 2019 00:08:18 -0700 (PDT) Subject: [pypy-commit] pypy cpyext-gc-cycle: WIP: adapted incremental rrc to use snapshot (legacy finalizers missing) Message-ID: <5d565662.1c69fb81.a2cd7.973d@mx.google.com> Author: Stefan Beyer Branch: cpyext-gc-cycle Changeset: r97190:7972e94ec0ac Date: 2019-08-15 13:44 +0200 http://bitbucket.org/pypy/pypy/changeset/7972e94ec0ac/ Log: WIP: adapted incremental rrc to use snapshot (legacy finalizers missing) diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -3161,7 +3161,7 @@ def rawrefcount_end_garbage(self): ll_assert(self.rrc_enabled, "rawrefcount.init not called") - self.rrc_gc.state = RawRefCountBaseGC.STATE_MARKING + self.rrc_gc.state = RawRefCountBaseGC.STATE_DEFAULT def rawrefcount_next_garbage_pypy(self): ll_assert(self.rrc_enabled, "rawrefcount.init not called") diff --git a/rpython/memory/gc/rrc/incmark.py b/rpython/memory/gc/rrc/incmark.py --- a/rpython/memory/gc/rrc/incmark.py +++ b/rpython/memory/gc/rrc/incmark.py @@ -61,31 +61,33 @@ # self._debug_check_consistency(print_label="end-legacy-fin") self.state = self.STATE_DEFAULT - # We are finished with marking, now finish things up - #found_finalizer = self._find_finalizer() # modern finalizers # TODO: from snapshot - #if found_finalizer: - # self._gc_list_move(self.pyobj_old_list, - # self.pyobj_isolate_list) - #use_cylicrc = not found_finalizer - use_cylicrc = True - # now move all dead objs still in pyob_list to garbage # dead -> pyobj_old_list # live -> set cyclic refcount to > 0 pygchdr = self.pyobj_list.c_gc_next while pygchdr <> self.pyobj_list: next_old = pygchdr.c_gc_next - snapobj = self.snapshot_objs[pygchdr.c_gc_refs - 1] - pygchdr.c_gc_refs = snapobj.refcnt_external - if snapobj.refcnt_external == 0: - # remove from old list - next = pygchdr.c_gc_next - next.c_gc_prev = pygchdr.c_gc_prev - pygchdr.c_gc_prev.c_gc_next = next - # add to new list (or not, if it is a tuple) - self._gc_list_add(self.pyobj_old_list, pygchdr) + if pygchdr.c_gc_refs > 0: + snapobj = self.snapshot_objs[pygchdr.c_gc_refs - 1] + pygchdr.c_gc_refs = snapobj.refcnt_external + if snapobj.refcnt_external == 0: + # remove from old list + next = pygchdr.c_gc_next + next.c_gc_prev = pygchdr.c_gc_prev + pygchdr.c_gc_prev.c_gc_next = next + # add to new list (or not, if it is a tuple) + self._gc_list_add(self.pyobj_old_list, pygchdr) + else: + pygchdr.c_gc_refs = 1 # new object, keep alive pygchdr = next_old + # We are finished with marking, now finish things up + found_finalizer = self._find_finalizer() # modern finalizers + if found_finalizer: + self._gc_list_move(self.pyobj_old_list, + self.pyobj_isolate_list) + use_cylicrc = not found_finalizer + # now mark all pypy objects at the border, depending on the results self._debug_check_consistency(print_label="end-mark-cyclic") debug_print("use_cylicrc", use_cylicrc) From pypy.commits at gmail.com Fri Aug 16 03:08:19 2019 From: pypy.commits at gmail.com (stevie_92) Date: Fri, 16 Aug 2019 00:08:19 -0700 (PDT) Subject: [pypy-commit] pypy cpyext-gc-cycle: Adapted incremental rrc to use snapshot Message-ID: <5d565663.1c69fb81.d9929.1a44@mx.google.com> Author: Stefan Beyer Branch: cpyext-gc-cycle Changeset: r97191:13bf4458e03a Date: 2019-08-15 14:28 +0200 http://bitbucket.org/pypy/pypy/changeset/13bf4458e03a/ Log: Adapted incremental rrc to use snapshot diff --git a/rpython/memory/gc/rrc/base.py b/rpython/memory/gc/rrc/base.py --- a/rpython/memory/gc/rrc/base.py +++ b/rpython/memory/gc/rrc/base.py @@ -40,7 +40,7 @@ _ADDRARRAY = lltype.Array(llmemory.Address, hints={'nolength': True}) PYOBJ_SNAPSHOT_OBJ = lltype.Struct('PyObject_Snapshot', ('pyobj', llmemory.Address), - ('refcnt', lltype.Signed), + ('status', lltype.Signed), ('refcnt_external', lltype.Signed), ('refs_index', lltype.Signed), ('refs_len', lltype.Signed), @@ -442,7 +442,6 @@ else: self._free(pyobject, True) - def _untrack_tuples(self): gchdr = self.tuple_list.c_gc_next while gchdr <> self.tuple_list: @@ -456,136 +455,7 @@ self._gc_list_add(self.pyobj_list, gchdr) gchdr = gchdr_next - def _collect_roots(self, pygclist): - # Initialize the cyclic refcount with the real refcount. - self._collect_roots_init_list(pygclist) - - # Save the real refcount of objects at border - self.p_list_old.foreach(self._obj_save_refcnt, None) - self.o_list_old.foreach(self._obj_save_refcnt, None) - - # Subtract all internal refcounts from the cyclic refcount - # of rawrefcounted objects - self._collect_roots_subtract_internal(pygclist) - - # For all non-gc pyobjects which have a refcount > 0, - # mark all reachable objects on the pypy side - self.p_list_old.foreach(self._major_trace_nongc, True) - - # For every object in this set, if it is marked, add 1 as a real - # refcount (p_list => pyobj stays alive if obj stays alive). - self.p_list_old.foreach(self._obj_fix_refcnt, None) - self.o_list_old.foreach(self._obj_fix_refcnt, None) - - # now all rawrefcounted roots or live border objects have a - # refcount > 0 - self._debug_check_consistency(print_label="rc-initialized") - - def _collect_roots_init_list(self, pygclist): - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY - from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT - pygchdr = pygclist.c_gc_next - while pygchdr <> pygclist: - refcnt = self.gc_as_pyobj(pygchdr).c_ob_refcnt - if refcnt >= REFCNT_FROM_PYPY_LIGHT: - refcnt -= REFCNT_FROM_PYPY_LIGHT - elif refcnt >= REFCNT_FROM_PYPY: - refcnt -= REFCNT_FROM_PYPY - self._pyobj_gc_refcnt_set(pygchdr, refcnt) - pygchdr = pygchdr.c_gc_next - - def _collect_roots_subtract_internal(self, pygclist): - pygchdr = pygclist.c_gc_next - while pygchdr <> pygclist: - pyobj = self.gc_as_pyobj(pygchdr) - self._traverse(pyobj, -1) - pygchdr = pygchdr.c_gc_next - - def _pyobj_gc_refcnt_set(self, pygchdr, refcnt): - pygchdr.c_gc_refs &= self.RAWREFCOUNT_REFS_MASK_FINALIZED - pygchdr.c_gc_refs |= refcnt << self.RAWREFCOUNT_REFS_SHIFT - - def _obj_save_refcnt(self, pyobject, ignore): - pyobj = self._pyobj(pyobject) - link = llmemory.cast_int_to_adr(pyobj.c_ob_pypy_link) - self.refcnt_dict.setitem(pyobject, link) - pyobj.c_ob_pypy_link = pyobj.c_ob_refcnt - - def _obj_fix_refcnt(self, pyobject, ignore): - pyobj = self._pyobj(pyobject) - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - obj = self.refcnt_dict.get(pyobject) - gchdr = self.pyobj_as_gc(pyobj) - if gchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): - rc = gchdr.c_gc_refs - refcnt = gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT - if rc == self.RAWREFCOUNT_REFS_UNTRACKED: - debug_print("gc obj not tracked", gchdr, ": obj", obj, - "cyclic-rc", rc) - else: - debug_print("gc obj tracked", gchdr, ": obj", obj, "real-rc", - refcnt, "gc-next", - gchdr.c_gc_next, "gc-prev", gchdr.c_gc_prev) - if self.gc.header(obj).tid & (self.GCFLAG_VISITED | - self.GCFLAG_NO_HEAP_PTRS): - refcnt += 1 - self._pyobj_gc_refcnt_set(gchdr, refcnt) - - def _mark_rawrefcount(self): - if self._gc_list_is_empty(self.pyobj_list): - self._gc_list_init(self.pyobj_old_list) - else: - self._gc_list_move(self.pyobj_list, self.pyobj_old_list) - # as long as new objects with cyclic a refcount > 0 or alive border - # objects are found, increment the refcount of all referenced objects - # of those newly found objects - found_alive = True - pyobj_old = self.pyobj_list - # - while found_alive: # TODO: working set to improve performance? - found_alive = False - gchdr = self.pyobj_old_list.c_gc_next - while gchdr <> self.pyobj_old_list: - next_old = gchdr.c_gc_next - found_alive |= self._mark_rawrefcount_obj(gchdr, pyobj_old) - gchdr = next_old - # - # now all rawrefcounted objects, which are alive, have a cyclic - # refcount > 0 or are marked - - def _mark_rawrefcount_obj(self, gchdr, gchdr_move): - alive = (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0 - pyobj = self.gc_as_pyobj(gchdr) - obj = llmemory.NULL - if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - pyobject = llmemory.cast_ptr_to_adr(pyobj) - obj = self.refcnt_dict.get(pyobject) - if not alive and self.gc.header(obj).tid & ( - self.GCFLAG_VISITED | self.GCFLAG_NO_HEAP_PTRS): - # add fake refcount, to mark it as live - gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT - alive = True - if alive: - # remove from old list - next = gchdr.c_gc_next - next.c_gc_prev = gchdr.c_gc_prev - gchdr.c_gc_prev.c_gc_next = next - # add to new list (or not, if it is a tuple) - self._gc_list_add(gchdr_move, gchdr) - # increment refcounts - self._traverse(pyobj, 1) - # mark recursively, if it is a pypyobj - if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - self.gc.objects_to_trace.append(obj) - self.gc.visit_all_objects() - return alive - - def _find_garbage(self): + def _find_garbage(self, use_dict): found_garbage = False gchdr = self.pyobj_old_list.c_gc_next while gchdr <> self.pyobj_old_list: @@ -593,12 +463,12 @@ garbage = self.finalizer_type(gchdr) == \ self.RAWREFCOUNT_FINALIZER_LEGACY if garbage: - self._move_to_garbage(gchdr) + self._move_to_garbage(gchdr, use_dict) found_garbage = True gchdr = next_old return found_garbage - def _mark_garbage(self): + def _mark_garbage(self, use_dict): found_garbage = True # while found_garbage: @@ -609,21 +479,23 @@ alive = (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0 pyobj = self.gc_as_pyobj(gchdr) if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - #obj = llmemory.cast_int_to_adr(intobj) - pyobject = llmemory.cast_ptr_to_adr(pyobj) - obj = self.refcnt_dict.get(pyobject) + if use_dict: + pyobject = llmemory.cast_ptr_to_adr(pyobj) + obj = self.refcnt_dict.get(pyobject) + else: + intobj = pyobj.c_ob_pypy_link + obj = llmemory.cast_int_to_adr(intobj) if not alive and self.gc.header(obj).tid & ( self.GCFLAG_VISITED | self.GCFLAG_NO_HEAP_PTRS): # add fake refcount, to mark it as live gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT alive = True if alive: - self._move_to_garbage(gchdr) + self._move_to_garbage(gchdr, use_dict) found_garbage = True gchdr = next_old - def _move_to_garbage(self, gchdr): + def _move_to_garbage(self, gchdr, use_dict): pyobj = self.gc_as_pyobj(gchdr) # remove from old list next = gchdr.c_gc_next @@ -642,10 +514,12 @@ # refcount of zero or an unmarked linked pypy object) self._traverse(pyobj, 1) if pyobj.c_ob_pypy_link <> 0: - #intobj = pyobj.c_ob_pypy_link - pyobject = llmemory.cast_ptr_to_adr(pyobj) - obj = self.refcnt_dict.get(pyobject) - #obj = llmemory.cast_int_to_adr(intobj) + if use_dict: + pyobject = llmemory.cast_ptr_to_adr(pyobj) + obj = self.refcnt_dict.get(pyobject) + else: + intobj = pyobj.c_ob_pypy_link + obj = llmemory.cast_int_to_adr(intobj) self.garbage_to_trace.append(obj) self.gc.objects_to_trace.append(obj) self.gc.visit_all_objects() diff --git a/rpython/memory/gc/rrc/incmark.py b/rpython/memory/gc/rrc/incmark.py --- a/rpython/memory/gc/rrc/incmark.py +++ b/rpython/memory/gc/rrc/incmark.py @@ -55,12 +55,6 @@ self.state = self.STATE_GARBAGE_MARKING return False - elif self.state == self.STATE_GARBAGE_MARKING: - #if self._find_garbage(): # handle legacy finalizers # TODO: from snapshot - # self._mark_garbage() # TODO: from snapshot - # self._debug_check_consistency(print_label="end-legacy-fin") - self.state = self.STATE_DEFAULT - # now move all dead objs still in pyob_list to garbage # dead -> pyobj_old_list # live -> set cyclic refcount to > 0 @@ -81,6 +75,11 @@ pygchdr.c_gc_refs = 1 # new object, keep alive pygchdr = next_old + if self._find_garbage(False): # handle legacy finalizers + self._mark_garbage(False) + self._debug_check_consistency(print_label="end-legacy-fin") + self.state = self.STATE_DEFAULT + # We are finished with marking, now finish things up found_finalizer = self._find_finalizer() # modern finalizers if found_finalizer: @@ -127,7 +126,7 @@ # refcount > 0 or are marked def _mark_rawrefcount_obj(self, snapobj): - if snapobj.refcnt == 0: # hack + if snapobj.status == 0: return False alive = snapobj.refcnt_external > 0 @@ -149,9 +148,8 @@ if snapobj.pypy_link <> 0: self.gc.objects_to_trace.append(obj) self.gc.visit_all_objects() - - # remove from old list, TODO: hack -> working set might be better - snapobj.refcnt = 0 + # mark as processed + snapobj.status = 0 return alive def _take_snapshot(self, pygclist): @@ -196,7 +194,7 @@ pygchdr.c_gc_refs = objs_index + 1 obj = self.snapshot_objs[objs_index] obj.pyobj = llmemory.cast_ptr_to_adr(pyobj) - obj.refcnt = 1 + obj.status = 1 obj.refcnt_external = refcnt obj.refs_index = refs_index obj.refs_len = 0 diff --git a/rpython/memory/gc/rrc/mark.py b/rpython/memory/gc/rrc/mark.py --- a/rpython/memory/gc/rrc/mark.py +++ b/rpython/memory/gc/rrc/mark.py @@ -1,4 +1,5 @@ from rpython.memory.gc.rrc.base import RawRefCountBaseGC +from rpython.rtyper.lltypesystem import lltype, llmemory, llgroup, rffi from rpython.rlib.debug import ll_assert, debug_print, debug_start, debug_stop class RawRefCountMarkGC(RawRefCountBaseGC): @@ -35,8 +36,8 @@ self._mark_rawrefcount() self._debug_check_consistency(print_label="before-fin") self.state = self.STATE_GARBAGE_MARKING - if self._find_garbage(): # handle legacy finalizers - self._mark_garbage() + if self._find_garbage(True): # handle legacy finalizers + self._mark_garbage(True) self._debug_check_consistency(print_label="end-legacy-fin") self.state = self.STATE_MARKING found_finalizer = self._find_finalizer() @@ -60,3 +61,132 @@ self.state = self.STATE_DEFAULT return True + + def _collect_roots(self, pygclist): + # Initialize the cyclic refcount with the real refcount. + self._collect_roots_init_list(pygclist) + + # Save the real refcount of objects at border + self.p_list_old.foreach(self._obj_save_refcnt, None) + self.o_list_old.foreach(self._obj_save_refcnt, None) + + # Subtract all internal refcounts from the cyclic refcount + # of rawrefcounted objects + self._collect_roots_subtract_internal(pygclist) + + # For all non-gc pyobjects which have a refcount > 0, + # mark all reachable objects on the pypy side + self.p_list_old.foreach(self._major_trace_nongc, True) + + # For every object in this set, if it is marked, add 1 as a real + # refcount (p_list => pyobj stays alive if obj stays alive). + self.p_list_old.foreach(self._obj_fix_refcnt, None) + self.o_list_old.foreach(self._obj_fix_refcnt, None) + + # now all rawrefcounted roots or live border objects have a + # refcount > 0 + self._debug_check_consistency(print_label="rc-initialized") + + def _collect_roots_init_list(self, pygclist): + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY + from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT + pygchdr = pygclist.c_gc_next + while pygchdr <> pygclist: + refcnt = self.gc_as_pyobj(pygchdr).c_ob_refcnt + if refcnt >= REFCNT_FROM_PYPY_LIGHT: + refcnt -= REFCNT_FROM_PYPY_LIGHT + elif refcnt >= REFCNT_FROM_PYPY: + refcnt -= REFCNT_FROM_PYPY + self._pyobj_gc_refcnt_set(pygchdr, refcnt) + pygchdr = pygchdr.c_gc_next + + def _collect_roots_subtract_internal(self, pygclist): + pygchdr = pygclist.c_gc_next + while pygchdr <> pygclist: + pyobj = self.gc_as_pyobj(pygchdr) + self._traverse(pyobj, -1) + pygchdr = pygchdr.c_gc_next + + def _pyobj_gc_refcnt_set(self, pygchdr, refcnt): + pygchdr.c_gc_refs &= self.RAWREFCOUNT_REFS_MASK_FINALIZED + pygchdr.c_gc_refs |= refcnt << self.RAWREFCOUNT_REFS_SHIFT + + def _obj_save_refcnt(self, pyobject, ignore): + pyobj = self._pyobj(pyobject) + link = llmemory.cast_int_to_adr(pyobj.c_ob_pypy_link) + self.refcnt_dict.setitem(pyobject, link) + pyobj.c_ob_pypy_link = pyobj.c_ob_refcnt + + def _obj_fix_refcnt(self, pyobject, ignore): + pyobj = self._pyobj(pyobject) + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + obj = self.refcnt_dict.get(pyobject) + gchdr = self.pyobj_as_gc(pyobj) + if gchdr <> lltype.nullptr(self.PYOBJ_GC_HDR): + rc = gchdr.c_gc_refs + refcnt = gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT + if rc == self.RAWREFCOUNT_REFS_UNTRACKED: + debug_print("gc obj not tracked", gchdr, ": obj", obj, + "cyclic-rc", rc) + else: + debug_print("gc obj tracked", gchdr, ": obj", obj, "real-rc", + refcnt, "gc-next", + gchdr.c_gc_next, "gc-prev", gchdr.c_gc_prev) + if self.gc.header(obj).tid & (self.GCFLAG_VISITED | + self.GCFLAG_NO_HEAP_PTRS): + refcnt += 1 + self._pyobj_gc_refcnt_set(gchdr, refcnt) + + def _mark_rawrefcount(self): + if self._gc_list_is_empty(self.pyobj_list): + self._gc_list_init(self.pyobj_old_list) + else: + self._gc_list_move(self.pyobj_list, self.pyobj_old_list) + # as long as new objects with cyclic a refcount > 0 or alive border + # objects are found, increment the refcount of all referenced objects + # of those newly found objects + found_alive = True + pyobj_old = self.pyobj_list + # + while found_alive: # TODO: working set to improve performance? + found_alive = False + gchdr = self.pyobj_old_list.c_gc_next + while gchdr <> self.pyobj_old_list: + next_old = gchdr.c_gc_next + found_alive |= self._mark_rawrefcount_obj(gchdr, pyobj_old) + gchdr = next_old + # + # now all rawrefcounted objects, which are alive, have a cyclic + # refcount > 0 or are marked + + def _mark_rawrefcount_obj(self, gchdr, gchdr_move): + alive = (gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT) > 0 + pyobj = self.gc_as_pyobj(gchdr) + obj = llmemory.NULL + if pyobj.c_ob_pypy_link <> 0: + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + pyobject = llmemory.cast_ptr_to_adr(pyobj) + obj = self.refcnt_dict.get(pyobject) + if not alive and self.gc.header(obj).tid & ( + self.GCFLAG_VISITED | self.GCFLAG_NO_HEAP_PTRS): + # add fake refcount, to mark it as live + gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT + alive = True + if alive: + # remove from old list + next = gchdr.c_gc_next + next.c_gc_prev = gchdr.c_gc_prev + gchdr.c_gc_prev.c_gc_next = next + # add to new list (or not, if it is a tuple) + self._gc_list_add(gchdr_move, gchdr) + # increment refcounts + self._traverse(pyobj, 1) + # mark recursively, if it is a pypyobj + if pyobj.c_ob_pypy_link <> 0: + #intobj = pyobj.c_ob_pypy_link + #obj = llmemory.cast_int_to_adr(intobj) + self.gc.objects_to_trace.append(obj) + self.gc.visit_all_objects() + return alive From pypy.commits at gmail.com Fri Aug 16 03:08:21 2019 From: pypy.commits at gmail.com (stevie_92) Date: Fri, 16 Aug 2019 00:08:21 -0700 (PDT) Subject: [pypy-commit] pypy cpyext-gc-cycle: Fixed compilation issues with rrc incmark Message-ID: <5d565665.1c69fb81.ef366.0d51@mx.google.com> Author: Stefan Beyer Branch: cpyext-gc-cycle Changeset: r97192:51bfe92174e1 Date: 2019-08-16 09:07 +0200 http://bitbucket.org/pypy/pypy/changeset/51bfe92174e1/ Log: Fixed compilation issues with rrc incmark diff --git a/rpython/memory/gc/rrc/incmark.py b/rpython/memory/gc/rrc/incmark.py --- a/rpython/memory/gc/rrc/incmark.py +++ b/rpython/memory/gc/rrc/incmark.py @@ -146,6 +146,8 @@ obj_ref.refcnt_external += 1 # mark recursively, if it is a pypyobj if snapobj.pypy_link <> 0: + intobj = snapobj.pypy_link + obj = llmemory.cast_int_to_adr(intobj) self.gc.objects_to_trace.append(obj) self.gc.visit_all_objects() # mark as processed @@ -215,7 +217,7 @@ # self_adr = rffi.cast(llmemory.Address, self_ptr) self = cast_adr_to_nongc_instance(RawRefCountIncMarkGC, self_adr) - self._rrc_visit_snapshot_action(pyobj, None) + self._take_snapshot_visit_action(pyobj, None) return rffi.cast(rffi.INT_real, 0) def _take_snapshot_visit_action(self, pyobj, ignore): From pypy.commits at gmail.com Fri Aug 16 05:18:00 2019 From: pypy.commits at gmail.com (mattip) Date: Fri, 16 Aug 2019 02:18:00 -0700 (PDT) Subject: [pypy-commit] pypy default: add more missing API functions that caused HMAC block_size error and test failure Message-ID: <5d5674c8.1c69fb81.fa359.5988@mx.google.com> Author: Matti Picus Branch: Changeset: r97193:9af25f100cea Date: 2019-08-16 12:10 +0300 http://bitbucket.org/pypy/pypy/changeset/9af25f100cea/ Log: add more missing API functions that caused HMAC block_size error and test failure diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py @@ -90,7 +90,6 @@ int EVP_DigestVerifyInit(EVP_MD_CTX *, EVP_PKEY_CTX **, const EVP_MD *, ENGINE *, EVP_PKEY *); - int PKCS5_PBKDF2_HMAC_SHA1(const char *, int, const unsigned char *, int, int, int, unsigned char *); @@ -147,6 +146,8 @@ EC_KEY *EVP_PKEY_get1_EC_KEY(EVP_PKEY *); int EVP_PKEY_set1_EC_KEY(EVP_PKEY *, EC_KEY *); +int EVP_MD_CTX_block_size(const EVP_MD_CTX *); +int EVP_CIPHER_CTX_block_size(const EVP_CIPHER_CTX *); int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *, int, int, void *); int PKCS5_PBKDF2_HMAC(const char *, int, const unsigned char *, int, int, From pypy.commits at gmail.com Fri Aug 16 05:18:01 2019 From: pypy.commits at gmail.com (mattip) Date: Fri, 16 Aug 2019 02:18:01 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: remove debug cruft Message-ID: <5d5674c9.1c69fb81.d5ed7.2459@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97194:7b2e8730917d Date: 2019-08-16 12:11 +0300 http://bitbucket.org/pypy/pypy/changeset/7b2e8730917d/ Log: remove debug cruft diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py --- a/lib-python/3/test/test_ssl.py +++ b/lib-python/3/test/test_ssl.py @@ -2844,10 +2844,6 @@ else: s.close() - def test_socketserver_urlib_uses_bisect(self): - b = urllib.request.bisect - raise ValueError('urllib.request.bisect is %s' % str(b)) - def test_socketserver(self): """Using socketserver to create and manage SSL connections.""" server = make_https_server(self, certfile=CERTFILE) From pypy.commits at gmail.com Fri Aug 16 05:18:03 2019 From: pypy.commits at gmail.com (mattip) Date: Fri, 16 Aug 2019 02:18:03 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default into branch Message-ID: <5d5674cb.1c69fb81.26ebf.b51c@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97195:06475a814c91 Date: 2019-08-16 12:17 +0300 http://bitbucket.org/pypy/pypy/changeset/06475a814c91/ Log: merge default into branch diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/evp.py @@ -90,7 +90,6 @@ int EVP_DigestVerifyInit(EVP_MD_CTX *, EVP_PKEY_CTX **, const EVP_MD *, ENGINE *, EVP_PKEY *); - int PKCS5_PBKDF2_HMAC_SHA1(const char *, int, const unsigned char *, int, int, int, unsigned char *); @@ -147,6 +146,8 @@ EC_KEY *EVP_PKEY_get1_EC_KEY(EVP_PKEY *); int EVP_PKEY_set1_EC_KEY(EVP_PKEY *, EC_KEY *); +int EVP_MD_CTX_block_size(const EVP_MD_CTX *); +int EVP_CIPHER_CTX_block_size(const EVP_CIPHER_CTX *); int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *, int, int, void *); int PKCS5_PBKDF2_HMAC(const char *, int, const unsigned char *, int, int, From pypy.commits at gmail.com Fri Aug 16 05:37:27 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 16 Aug 2019 02:37:27 -0700 (PDT) Subject: [pypy-commit] pypy default: Un-fix the broken attempt at emulating bug-to-bug compatibility, and instead Message-ID: <5d567957.1c69fb81.4d576.27fe@mx.google.com> Author: Armin Rigo Branch: Changeset: r97196:f48b6c5fe1ca Date: 2019-08-16 11:36 +0200 http://bitbucket.org/pypy/pypy/changeset/f48b6c5fe1ca/ Log: Un-fix the broken attempt at emulating bug-to-bug compatibility, and instead just give the "correct" result in all cases. diff --git a/extra_tests/test_json.py b/extra_tests/test_json.py --- a/extra_tests/test_json.py +++ b/extra_tests/test_json.py @@ -52,6 +52,10 @@ == '{"3": 4, "5": 6}' def test_boolean_as_dict_key(): - # it's this way in CPython 2.x. In 3.x it was fixed - assert json.dumps({True: 5}) == '{"True": 5}' # != '{"true": 5}' - assert json.dumps({False: 5}) == '{"False": 5}' + # In CPython 2.x, dumps({True:...}) gives {"True":...}. It should be + # "true" instead; it's a bug as far as I can tell. In 3.x it was fixed. + # BUT! if we call dumps() with sort_keys=True, then CPython (any version) + # gives "true" instead of "True". Surprize! + # I don't want to understand why, let's just not attempt to reproduce that. + assert json.dumps({True: 5}) == '{"true": 5}' + assert json.dumps({False: 5}) == '{"false": 5}' diff --git a/lib-python/2.7/json/encoder.py b/lib-python/2.7/json/encoder.py --- a/lib-python/2.7/json/encoder.py +++ b/lib-python/2.7/json/encoder.py @@ -301,9 +301,9 @@ elif isinstance(key, float): key = self.__floatstr(key) elif key is True: - key = 'True' # XXX != 'true', bug-to-bug compatibility + key = 'true' elif key is False: - key = 'False' # XXX != 'false', bug-to-bug compatibility + key = 'false' elif key is None: key = 'null' elif isinstance(key, (int, long)): From pypy.commits at gmail.com Fri Aug 16 06:02:00 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 16 Aug 2019 03:02:00 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: issue #3055 Message-ID: <5d567f18.1c69fb81.f67e3.eaa2@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97197:72fdf69a0e6d Date: 2019-08-16 12:01 +0200 http://bitbucket.org/pypy/pypy/changeset/72fdf69a0e6d/ Log: issue #3055 fix if we see a PyTypeObject with tp_doc=="" diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py --- a/pypy/module/cpyext/test/test_typeobject.py +++ b/pypy/module/cpyext/test/test_typeobject.py @@ -1624,6 +1624,28 @@ assert module.get_number() == 4043 raises(AttributeError, "del foo.bar") + def test_tp_doc_issue3055(self): + module = self.import_extension('foo', [ + ("new_obj", "METH_NOARGS", + ''' + PyObject *obj; + obj = PyObject_New(PyObject, &Foo_Type); + return obj; + ''' + )], prologue=''' + static PyTypeObject Foo_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + "foo.foo", + sizeof(PyObject), + }; + ''', more_init = ''' + Foo_Type.tp_flags = Py_TPFLAGS_DEFAULT; + Foo_Type.tp_doc = ""; + if (PyType_Ready(&Foo_Type) < 0) INITERROR; + ''') + obj = module.new_obj() + assert type(obj).__doc__ is None + class AppTestHashable(AppTestCpythonExtensionBase): def test_unhashable(self): diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -381,7 +381,7 @@ dict_w[method_name] = w_obj if pto.c_tp_doc: raw_doc = rffi.charp2str(cts.cast('char*', pto.c_tp_doc)) - dict_w['__doc__'] = space.newtext(extract_doc(raw_doc, name)) + dict_w['__doc__'] = space.newtext_or_none(extract_doc(raw_doc, name)) if pto.c_tp_new: add_tp_new_wrapper(space, dict_w, pto) From pypy.commits at gmail.com Fri Aug 16 07:07:12 2019 From: pypy.commits at gmail.com (mattip) Date: Fri, 16 Aug 2019 04:07:12 -0700 (PDT) Subject: [pypy-commit] buildbot default: only allow a single aarch64 build at a time Message-ID: <5d568e60.1c69fb81.5b6a1.41a1@mx.google.com> Author: Matti Picus Branch: Changeset: r1092:d6d16988b8b9 Date: 2019-08-16 14:06 +0300 http://bitbucket.org/pypy/buildbot/changeset/d6d16988b8b9/ Log: only allow a single aarch64 build at a time diff --git a/bot2/pypybuildbot/builds.py b/bot2/pypybuildbot/builds.py --- a/bot2/pypybuildbot/builds.py +++ b/bot2/pypybuildbot/builds.py @@ -30,7 +30,7 @@ #SpeedOldLock = locks.MasterLock('speed_old_lock', maxCount=2) # bencher4 has 8 cores, 32 GB RAM Bencher4Lock = locks.MasterLock('bencher4_lock', maxCount=4) -AARCH64Lock = locks.MasterLock('aarch64_lock', maxCount=2) +AARCH64Lock = locks.MasterLock('aarch64_lock', maxCount=1) # The cross translation machine can accomodate 2 jobs at the same time ARMCrossLock = locks.MasterLock('arm_cpu', maxCount=2) From pypy.commits at gmail.com Fri Aug 16 07:35:06 2019 From: pypy.commits at gmail.com (mattip) Date: Fri, 16 Aug 2019 04:35:06 -0700 (PDT) Subject: [pypy-commit] pypy default: force linking against openssl 1.0.2 on win32. CPython changed this for v3.7 Message-ID: <5d5694ea.1c69fb81.8d877.9afa@mx.google.com> Author: Matti Picus Branch: Changeset: r97198:7a38750c11b1 Date: 2019-08-16 14:25 +0300 http://bitbucket.org/pypy/pypy/changeset/7a38750c11b1/ Log: force linking against openssl 1.0.2 on win32. CPython changed this for v3.7 diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py --- a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py @@ -20,7 +20,7 @@ windows_link_legacy_openssl = os.environ.get( "CRYPTOGRAPHY_WINDOWS_LINK_LEGACY_OPENSSL", None ) - if windows_link_legacy_openssl is None: + if 0 and windows_link_legacy_openssl is None: # Link against the 1.1.0 names libs = ["libssl", "libcrypto"] else: From pypy.commits at gmail.com Sat Aug 17 02:36:13 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 16 Aug 2019 23:36:13 -0700 (PDT) Subject: [pypy-commit] cffi default: Give a RuntimeError when we try to call a null function pointer, similar to how Message-ID: <5d57a05d.1c69fb81.5bc75.9165@mx.google.com> Author: Armin Rigo Branch: Changeset: r3285:74f57a76ed53 Date: 2019-08-17 08:35 +0200 http://bitbucket.org/cffi/cffi/changeset/74f57a76ed53/ Log: Give a RuntimeError when we try to call a null function pointer, similar to how we get a RuntimeError when trying to read or write through a null pointer. diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c --- a/c/_cffi_backend.c +++ b/c/_cffi_backend.c @@ -2997,6 +2997,12 @@ cd->c_type->ct_name); return NULL; } + if (cd->c_data == NULL) { + PyErr_Format(PyExc_RuntimeError, + "cannot call null pointer pointer from cdata '%s'", + cd->c_type->ct_name); + return NULL; + } if (kwds != NULL && PyDict_Size(kwds) != 0) { PyErr_SetString(PyExc_TypeError, "a cdata function cannot be called with keyword arguments"); diff --git a/c/test_c.py b/c/test_c.py --- a/c/test_c.py +++ b/c/test_c.py @@ -4438,3 +4438,10 @@ float(cast(BBool, 42)) with pytest.raises(TypeError): complex(cast(BBool, 42)) + +def test_cannot_call_null_function_pointer(): + BInt = new_primitive_type("int") + BFunc = new_function_type((BInt, BInt), BInt, False) + f = cast(BFunc, 0) + with pytest.raises(RuntimeError): + f(40, 2) From pypy.commits at gmail.com Sat Aug 17 02:42:39 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 16 Aug 2019 23:42:39 -0700 (PDT) Subject: [pypy-commit] pypy default: update to cffi/74f57a76ed53 Message-ID: <5d57a1df.1c69fb81.3e31b.d0c6@mx.google.com> Author: Armin Rigo Branch: Changeset: r97199:0dadf2ae75d9 Date: 2019-08-17 08:42 +0200 http://bitbucket.org/pypy/pypy/changeset/0dadf2ae75d9/ Log: update to cffi/74f57a76ed53 diff --git a/extra_tests/cffi_tests/cffi1/test_recompiler.py b/extra_tests/cffi_tests/cffi1/test_recompiler.py --- a/extra_tests/cffi_tests/cffi1/test_recompiler.py +++ b/extra_tests/cffi_tests/cffi1/test_recompiler.py @@ -2414,6 +2414,18 @@ assert ffi.sizeof(a[0]) == ffi.sizeof("unsigned") assert ffi.sizeof(b[0]) == ffi.sizeof(a[0]) +def test_struct_with_func_with_struct_pointer_arg(): + ffi = FFI() + ffi.cdef("""struct BinaryTree { + int (* CompareKey)(struct BinaryTree *tree); + };""") + lib = verify(ffi, "test_struct_with_func_with_struct_pointer_arg", """ + struct BinaryTree { + int (* CompareKey)(struct BinaryTree *tree); + }; + """) + ffi.new("struct BinaryTree *") + def test_struct_with_func_with_struct_arg(): ffi = FFI() ffi.cdef("""struct BinaryTree { diff --git a/pypy/module/_cffi_backend/ctypefunc.py b/pypy/module/_cffi_backend/ctypefunc.py --- a/pypy/module/_cffi_backend/ctypefunc.py +++ b/pypy/module/_cffi_backend/ctypefunc.py @@ -128,6 +128,10 @@ return W_CTypePtrBase._fget(self, attrchar) def call(self, funcaddr, args_w): + if not funcaddr: + raise oefmt(self.space.w_RuntimeError, + "cannot call null function pointer from cdata '%s'", + self.name) if self.cif_descr: # regular case: this function does not take '...' arguments self = jit.promote(self) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -4425,3 +4425,10 @@ float(cast(BBool, 42)) with pytest.raises(TypeError): complex(cast(BBool, 42)) + +def test_cannot_call_null_function_pointer(): + BInt = new_primitive_type("int") + BFunc = new_function_type((BInt, BInt), BInt, False) + f = cast(BFunc, 0) + with pytest.raises(RuntimeError): + f(40, 2) From pypy.commits at gmail.com Sat Aug 17 02:43:52 2019 From: pypy.commits at gmail.com (arigo) Date: Fri, 16 Aug 2019 23:43:52 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d57a228.1c69fb81.ed27a.7190@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97200:9122fab2808f Date: 2019-08-17 08:43 +0200 http://bitbucket.org/pypy/pypy/changeset/9122fab2808f/ Log: hg merge default diff --git a/extra_tests/cffi_tests/cffi1/test_recompiler.py b/extra_tests/cffi_tests/cffi1/test_recompiler.py --- a/extra_tests/cffi_tests/cffi1/test_recompiler.py +++ b/extra_tests/cffi_tests/cffi1/test_recompiler.py @@ -2414,6 +2414,18 @@ assert ffi.sizeof(a[0]) == ffi.sizeof("unsigned") assert ffi.sizeof(b[0]) == ffi.sizeof(a[0]) +def test_struct_with_func_with_struct_pointer_arg(): + ffi = FFI() + ffi.cdef("""struct BinaryTree { + int (* CompareKey)(struct BinaryTree *tree); + };""") + lib = verify(ffi, "test_struct_with_func_with_struct_pointer_arg", """ + struct BinaryTree { + int (* CompareKey)(struct BinaryTree *tree); + }; + """) + ffi.new("struct BinaryTree *") + def test_struct_with_func_with_struct_arg(): ffi = FFI() ffi.cdef("""struct BinaryTree { diff --git a/extra_tests/test_json.py b/extra_tests/test_json.py --- a/extra_tests/test_json.py +++ b/extra_tests/test_json.py @@ -48,5 +48,10 @@ == '{"3": 4, "5": 6}' def test_boolean_as_dict_key(): + # In CPython 2.x, dumps({True:...}) gives {"True":...}. It should be + # "true" instead; it's a bug as far as I can tell. In 3.x it was fixed. + # BUT! if we call dumps() with sort_keys=True, then CPython (any version) + # gives "true" instead of "True". Surprize! + # I don't want to understand why, let's just not attempt to reproduce that. assert json.dumps({True: 5}) == '{"true": 5}' assert json.dumps({False: 5}) == '{"false": 5}' diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py --- a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py @@ -20,7 +20,7 @@ windows_link_legacy_openssl = os.environ.get( "CRYPTOGRAPHY_WINDOWS_LINK_LEGACY_OPENSSL", None ) - if windows_link_legacy_openssl is None: + if 0 and windows_link_legacy_openssl is None: # Link against the 1.1.0 names libs = ["libssl", "libcrypto"] else: diff --git a/pypy/module/_cffi_backend/ctypefunc.py b/pypy/module/_cffi_backend/ctypefunc.py --- a/pypy/module/_cffi_backend/ctypefunc.py +++ b/pypy/module/_cffi_backend/ctypefunc.py @@ -128,6 +128,10 @@ return W_CTypePtrBase._fget(self, attrchar) def call(self, funcaddr, args_w): + if not funcaddr: + raise oefmt(self.space.w_RuntimeError, + "cannot call null function pointer from cdata '%s'", + self.name) if self.cif_descr: # regular case: this function does not take '...' arguments self = jit.promote(self) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -4425,3 +4425,10 @@ float(cast(BBool, 42)) with pytest.raises(TypeError): complex(cast(BBool, 42)) + +def test_cannot_call_null_function_pointer(): + BInt = new_primitive_type("int") + BFunc = new_function_type((BInt, BInt), BInt, False) + f = cast(BFunc, 0) + with pytest.raises(RuntimeError): + f(40, 2) From pypy.commits at gmail.com Sat Aug 17 04:21:51 2019 From: pypy.commits at gmail.com (arigo) Date: Sat, 17 Aug 2019 01:21:51 -0700 (PDT) Subject: [pypy-commit] pypy default: Copy the logic from bindings.py that only calls Message-ID: <5d57b91f.1c69fb81.d9929.5204@mx.google.com> Author: Armin Rigo Branch: Changeset: r97201:aa592738d692 Date: 2019-08-17 10:21 +0200 http://bitbucket.org/pypy/pypy/changeset/aa592738d692/ Log: Copy the logic from bindings.py that only calls Cryptography_setup_ssl_threads() in some cases. In other cases the function might be NULL. diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -120,7 +120,9 @@ # init open ssl lib.SSL_load_error_strings() lib.SSL_library_init() -lib.Cryptography_setup_ssl_threads() +if (lib.Cryptography_HAS_LOCKING_CALLBACKS and + lib.CRYPTO_get_locking_callback() == ffi.NULL): + lib.Cryptography_setup_ssl_threads() lib.OpenSSL_add_all_algorithms() def check_signals(): From pypy.commits at gmail.com Sat Aug 17 04:22:21 2019 From: pypy.commits at gmail.com (arigo) Date: Sat, 17 Aug 2019 01:22:21 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d57b93d.1c69fb81.bea40.abb6@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97202:5b574c103316 Date: 2019-08-17 10:21 +0200 http://bitbucket.org/pypy/pypy/changeset/5b574c103316/ Log: hg merge default diff --git a/lib_pypy/_cffi_ssl/_stdssl/__init__.py b/lib_pypy/_cffi_ssl/_stdssl/__init__.py --- a/lib_pypy/_cffi_ssl/_stdssl/__init__.py +++ b/lib_pypy/_cffi_ssl/_stdssl/__init__.py @@ -121,7 +121,9 @@ # init open ssl lib.SSL_load_error_strings() lib.SSL_library_init() -lib.Cryptography_setup_ssl_threads() +if (lib.Cryptography_HAS_LOCKING_CALLBACKS and + lib.CRYPTO_get_locking_callback() == ffi.NULL): + lib.Cryptography_setup_ssl_threads() lib.OpenSSL_add_all_algorithms() def check_signals(): From pypy.commits at gmail.com Sat Aug 17 04:26:27 2019 From: pypy.commits at gmail.com (arigo) Date: Sat, 17 Aug 2019 01:26:27 -0700 (PDT) Subject: [pypy-commit] cffi default: whatsnew for 74f57a76ed53 Message-ID: <5d57ba33.1c69fb81.4d1d8.375b@mx.google.com> Author: Armin Rigo Branch: Changeset: r3286:e4bfcc3ae975 Date: 2019-08-17 10:26 +0200 http://bitbucket.org/cffi/cffi/changeset/e4bfcc3ae975/ Log: whatsnew for 74f57a76ed53 diff --git a/doc/source/whatsnew.rst b/doc/source/whatsnew.rst --- a/doc/source/whatsnew.rst +++ b/doc/source/whatsnew.rst @@ -13,6 +13,9 @@ * fix for structs containing unnamed bitfields like ``int : 1;``. +* when calling cdata of "function pointer" type, give a RuntimeError instead + of a crash if the pointer happens to be NULL + v1.12.3 ======= From pypy.commits at gmail.com Sat Aug 17 15:04:13 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 17 Aug 2019 12:04:13 -0700 (PDT) Subject: [pypy-commit] pypy default: MACROS is no longer part of the string fed to ffi.cdef, use TYPES instead Message-ID: <5d584fad.1c69fb81.571e7.da12@mx.google.com> Author: Matti Picus Branch: Changeset: r97203:87a095fb0cdf Date: 2019-08-17 21:47 +0300 http://bitbucket.org/pypy/pypy/changeset/87a095fb0cdf/ Log: MACROS is no longer part of the string fed to ffi.cdef, use TYPES instead diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py @@ -68,7 +68,9 @@ ); """ -MACROS = """ +# cryptography does not use MACROS anymore +# MACROS = """ +TYPES += """ #define CERT_STORE_READONLY_FLAG ... #define CERT_SYSTEM_STORE_LOCAL_MACHINE ... #define CRYPT_E_NOT_FOUND ... From pypy.commits at gmail.com Sat Aug 17 15:04:15 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 17 Aug 2019 12:04:15 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default into py3.6 Message-ID: <5d584faf.1c69fb81.89bd9.3f47@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97204:d2f8e364e52d Date: 2019-08-17 22:04 +0300 http://bitbucket.org/pypy/pypy/changeset/d2f8e364e52d/ Log: merge default into py3.6 diff --git a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py --- a/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py +++ b/lib_pypy/_cffi_ssl/_cffi_src/openssl/pypy_win32_extra.py @@ -68,7 +68,9 @@ ); """ -MACROS = """ +# cryptography does not use MACROS anymore +# MACROS = """ +TYPES += """ #define CERT_STORE_READONLY_FLAG ... #define CERT_SYSTEM_STORE_LOCAL_MACHINE ... #define CRYPT_E_NOT_FOUND ... From pypy.commits at gmail.com Sun Aug 18 00:30:30 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 17 Aug 2019 21:30:30 -0700 (PDT) Subject: [pypy-commit] buildbot default: change scheduling to reduce overlap on win32, aarch64 Message-ID: <5d58d466.1c69fb81.10efc.21e1@mx.google.com> Author: Matti Picus Branch: Changeset: r1093:e388d803194c Date: 2019-08-18 07:30 +0300 http://bitbucket.org/pypy/buildbot/changeset/e388d803194c/ Log: change scheduling to reduce overlap on win32, aarch64 diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -300,12 +300,10 @@ # the benchmarks run on benchmarker and (planned) speed-old.python.org. # 64 bit linux tests run on bencher4.soft-dev.org. # 32 bit linux tests run on benchmarker. - Nightly("nightly-0-00", [ + Nightly("nightly-translate", [ # linux tests LINUX32OWN, # on benchmarker4_32, uses all cores LINUX64OWN, # on bencher4, uses all cores - AARCH64OWN, - WIN32OWN, # on SalsaSalsa LINUX_S390XOWN, JITLINUX32, # on benchmarker4_32, uses 1 core JITLINUX64, # on bencher4, uses 1 core @@ -326,13 +324,21 @@ onlyIfChanged=True, ), + Nightly("nightly-own", [ + # linux tests + AARCH64OWN, + WIN32OWN, # on SalsaSalsa + ], branch='default', hour=3, minute=0, + onlyIfChanged=True, + ), + Nightly("nightly-0-01", [ LINUX32RPYTHON, # on benchermarker_32, uses all cores LINUX64RPYTHON, # on bencher4, uses all cores AARCH64RPYTHON, WIN32RPYTHON, # on SalsaSalsa LINUX_S390XRPYTHON, - ], branch='default', hour=0, minute=0, onlyIfChanged=True, + ], branch='default', hour=3, minute=0, onlyIfChanged=True, fileIsImportant=isRPython, change_filter=filter.ChangeFilter(branch='default'), ), @@ -357,16 +363,15 @@ #Nightly("nightly-3-01-py3.5", [LINUX64, JITLINUX64,], # branch="py3.5", hour=3, minute=0), - Nightly("nightly-3-00-py3.6", [ + Nightly("nightly-py3.6", [ LINUX32OWN, # on bencher4_32, uses all cores JITLINUX32, # on bencher4_32, uses 1 core LINUX64OWN, # on bencher4, uses all cores - AARCH64OWN, JITLINUX64, # on bencher4, uses 1 core JITAARCH64, JITMACOSX64, # on xerxes JITWIN32, # on SalsaSalsa - ], branch="py3.6", hour=3, minute=0, + ], branch="py3.6", hour=7, minute=0, # onlyIfChanged=True, # doesn't work - no builds are triggered 2019-04-23 ), From pypy.commits at gmail.com Sun Aug 18 02:47:38 2019 From: pypy.commits at gmail.com (arigo) Date: Sat, 17 Aug 2019 23:47:38 -0700 (PDT) Subject: [pypy-commit] pypy default: fix (uh, why support instance_ptr_eq but miss instance_ptr_ne??) Message-ID: <5d58f48a.1c69fb81.5bc75.6d28@mx.google.com> Author: Armin Rigo Branch: Changeset: r97205:7200528f8f4d Date: 2019-08-18 08:47 +0200 http://bitbucket.org/pypy/pypy/changeset/7200528f8f4d/ Log: fix (uh, why support instance_ptr_eq but miss instance_ptr_ne??) diff --git a/rpython/jit/backend/aarch64/regalloc.py b/rpython/jit/backend/aarch64/regalloc.py --- a/rpython/jit/backend/aarch64/regalloc.py +++ b/rpython/jit/backend/aarch64/regalloc.py @@ -419,7 +419,7 @@ prepare_comp_op_int_ne = prepare_int_cmp prepare_comp_op_int_eq = prepare_int_cmp prepare_comp_op_ptr_eq = prepare_comp_op_instance_ptr_eq = prepare_int_cmp - prepare_comp_op_ptr_ne = prepare_int_cmp + prepare_comp_op_ptr_ne = prepare_comp_op_instance_ptr_ne = prepare_int_cmp prepare_comp_op_uint_lt = prepare_int_cmp prepare_comp_op_uint_le = prepare_int_cmp prepare_comp_op_uint_ge = prepare_int_cmp From pypy.commits at gmail.com Sun Aug 18 02:49:43 2019 From: pypy.commits at gmail.com (arigo) Date: Sat, 17 Aug 2019 23:49:43 -0700 (PDT) Subject: [pypy-commit] pypy default: bah, one more Message-ID: <5d58f507.1c69fb81.7990e.09f7@mx.google.com> Author: Armin Rigo Branch: Changeset: r97206:df974ce2019b Date: 2019-08-18 08:49 +0200 http://bitbucket.org/pypy/pypy/changeset/df974ce2019b/ Log: bah, one more diff --git a/rpython/jit/backend/aarch64/opassembler.py b/rpython/jit/backend/aarch64/opassembler.py --- a/rpython/jit/backend/aarch64/opassembler.py +++ b/rpython/jit/backend/aarch64/opassembler.py @@ -168,7 +168,7 @@ self.emit_int_comp_op(op, arglocs[0], arglocs[1]) return c.NE - emit_comp_op_ptr_ne = emit_comp_op_int_ne + emit_comp_op_ptr_ne = emit_comp_op_instance_ptr_ne = emit_comp_op_int_ne def emit_comp_op_uint_lt(self, op, arglocs): self.emit_int_comp_op(op, arglocs[0], arglocs[1]) From pypy.commits at gmail.com Sun Aug 18 02:59:12 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 17 Aug 2019 23:59:12 -0700 (PDT) Subject: [pypy-commit] buildbot default: Backed out changeset: e388d803194c Message-ID: <5d58f740.1c69fb81.63fa2.8ab5@mx.google.com> Author: Matti Picus Branch: Changeset: r1094:ba7a21c88b5a Date: 2019-08-18 09:58 +0300 http://bitbucket.org/pypy/buildbot/changeset/ba7a21c88b5a/ Log: Backed out changeset: e388d803194c diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -300,10 +300,12 @@ # the benchmarks run on benchmarker and (planned) speed-old.python.org. # 64 bit linux tests run on bencher4.soft-dev.org. # 32 bit linux tests run on benchmarker. - Nightly("nightly-translate", [ + Nightly("nightly-0-00", [ # linux tests LINUX32OWN, # on benchmarker4_32, uses all cores LINUX64OWN, # on bencher4, uses all cores + AARCH64OWN, + WIN32OWN, # on SalsaSalsa LINUX_S390XOWN, JITLINUX32, # on benchmarker4_32, uses 1 core JITLINUX64, # on bencher4, uses 1 core @@ -324,21 +326,13 @@ onlyIfChanged=True, ), - Nightly("nightly-own", [ - # linux tests - AARCH64OWN, - WIN32OWN, # on SalsaSalsa - ], branch='default', hour=3, minute=0, - onlyIfChanged=True, - ), - Nightly("nightly-0-01", [ LINUX32RPYTHON, # on benchermarker_32, uses all cores LINUX64RPYTHON, # on bencher4, uses all cores AARCH64RPYTHON, WIN32RPYTHON, # on SalsaSalsa LINUX_S390XRPYTHON, - ], branch='default', hour=3, minute=0, onlyIfChanged=True, + ], branch='default', hour=0, minute=0, onlyIfChanged=True, fileIsImportant=isRPython, change_filter=filter.ChangeFilter(branch='default'), ), @@ -363,15 +357,16 @@ #Nightly("nightly-3-01-py3.5", [LINUX64, JITLINUX64,], # branch="py3.5", hour=3, minute=0), - Nightly("nightly-py3.6", [ + Nightly("nightly-3-00-py3.6", [ LINUX32OWN, # on bencher4_32, uses all cores JITLINUX32, # on bencher4_32, uses 1 core LINUX64OWN, # on bencher4, uses all cores + AARCH64OWN, JITLINUX64, # on bencher4, uses 1 core JITAARCH64, JITMACOSX64, # on xerxes JITWIN32, # on SalsaSalsa - ], branch="py3.6", hour=7, minute=0, + ], branch="py3.6", hour=3, minute=0, # onlyIfChanged=True, # doesn't work - no builds are triggered 2019-04-23 ), From pypy.commits at gmail.com Sun Aug 18 04:42:36 2019 From: pypy.commits at gmail.com (mattip) Date: Sun, 18 Aug 2019 01:42:36 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: test, fix error message for CPython3 compatibility Message-ID: <5d590f7c.1c69fb81.618a0.41a2@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97207:2e89e7ae99c0 Date: 2019-08-18 10:27 +0300 http://bitbucket.org/pypy/pypy/changeset/2e89e7ae99c0/ Log: test, fix error message for CPython3 compatibility diff --git a/pypy/interpreter/test/test_typedef.py b/pypy/interpreter/test/test_typedef.py --- a/pypy/interpreter/test/test_typedef.py +++ b/pypy/interpreter/test/test_typedef.py @@ -182,7 +182,7 @@ self.space.appexec([w_obj], """(obj): assert type(obj).__hash__ is None err = raises(TypeError, hash, obj) - assert str(err.value) == "'some_type' objects are unhashable" + assert str(err.value) == "unhashable type: 'some_type'" """) def test_destructor(self): diff --git a/pypy/objspace/descroperation.py b/pypy/objspace/descroperation.py --- a/pypy/objspace/descroperation.py +++ b/pypy/objspace/descroperation.py @@ -438,7 +438,7 @@ return default_identity_hash(space, w_obj) if space.is_w(w_hash, space.w_None): raise oefmt(space.w_TypeError, - "'%T' objects are unhashable", w_obj) + "unhashable type: '%T'", w_obj) w_result = space.get_and_call_function(w_hash, w_obj) if not space.isinstance_w(w_result, space.w_int): raise oefmt(space.w_TypeError, diff --git a/testrunner/get_info.py b/testrunner/get_info.py --- a/testrunner/get_info.py +++ b/testrunner/get_info.py @@ -13,7 +13,6 @@ # PyPy uses bin as of PR https://github.com/pypa/virtualenv/pull/1400 TARGET_DIR = 'bin' else: - TARGET_NAME = 'pypy-c' TARGET_NAME = 'pypy3-c' TARGET_DIR = 'bin' VENV_DIR = 'pypy-venv' From pypy.commits at gmail.com Sun Aug 18 04:42:38 2019 From: pypy.commits at gmail.com (mattip) Date: Sun, 18 Aug 2019 01:42:38 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix test (do not override baseclass setup_class, remove redundant skip clause) Message-ID: <5d590f7e.1c69fb81.6ed08.9694@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97208:283ba49cc4e4 Date: 2019-08-18 10:55 +0300 http://bitbucket.org/pypy/pypy/changeset/283ba49cc4e4/ Log: fix test (do not override baseclass setup_class, remove redundant skip clause) diff --git a/pypy/module/__pypy__/test/test_signal.py b/pypy/module/__pypy__/test/test_signal.py --- a/pypy/module/__pypy__/test/test_signal.py +++ b/pypy/module/__pypy__/test/test_signal.py @@ -16,9 +16,6 @@ class AppTestThreadSignal(GenericTestThread): spaceconfig = dict(usemodules=['__pypy__', 'thread', 'signal', 'time']) - def setup_class(cls): - cls.w_runappdirect = cls.space.wrap(cls.runappdirect) - def test_exit_twice(self): import __pypy__, _thread __pypy__.thread._signals_exit() @@ -109,8 +106,7 @@ spaceconfig = dict(usemodules=['__pypy__', 'thread', 'signal']) def setup_class(cls): - if (not cls.runappdirect or - '__pypy__' not in sys.builtin_module_names): + if (not cls.runappdirect): import py py.test.skip("this is only a test for -A runs on top of pypy") From pypy.commits at gmail.com Sun Aug 18 04:42:39 2019 From: pypy.commits at gmail.com (mattip) Date: Sun, 18 Aug 2019 01:42:39 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: add gc.collect to make tests pass skip test that uses gc.threshold Message-ID: <5d590f7f.1c69fb81.84b59.7208@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97209:a8ced5af1cae Date: 2019-08-18 11:40 +0300 http://bitbucket.org/pypy/pypy/changeset/a8ced5af1cae/ Log: add gc.collect to make tests pass skip test that uses gc.threshold diff --git a/lib-python/3/test/_test_multiprocessing.py b/lib-python/3/test/_test_multiprocessing.py --- a/lib-python/3/test/_test_multiprocessing.py +++ b/lib-python/3/test/_test_multiprocessing.py @@ -422,6 +422,8 @@ del c p.start() p.join() + for i in range(3): + gc.collect() self.assertIs(wr(), None) self.assertEqual(q.get(), 5) close_queue(q) @@ -2283,6 +2285,8 @@ self.pool.map(identity, objs) del objs + for i in range(3): + gc.collect() time.sleep(DELTA) # let threaded cleanup code run self.assertEqual(set(wr() for wr in refs), {None}) # With a process pool, copies of the objects are returned, check @@ -3276,6 +3280,8 @@ util._finalizer_registry.clear() def tearDown(self): + for i in range(3): + gc.collect() self.assertFalse(util._finalizer_registry) util._finalizer_registry.update(self.registry_backup) @@ -3330,6 +3336,7 @@ result = [obj for obj in iter(conn.recv, 'STOP')] self.assertEqual(result, ['a', 'b', 'd10', 'd03', 'd02', 'd01', 'e']) + @test.support.cpython_only def test_thread_safety(self): # bpo-24484: _run_finalizers() should be thread-safe def cb(): From pypy.commits at gmail.com Sun Aug 18 07:19:54 2019 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 18 Aug 2019 04:19:54 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: ImportError exposes a .msg now Message-ID: <5d59345a.1c69fb81.85d30.874b@mx.google.com> Author: Carl Friedrich Bolz-Tereick Branch: py3.6 Changeset: r97210:ba0a52e09be0 Date: 2019-08-18 13:03 +0200 http://bitbucket.org/pypy/pypy/changeset/ba0a52e09be0/ Log: ImportError exposes a .msg now diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -389,6 +389,11 @@ info = py.test.raises(SyntaxError, self.parse, "def f:\n print 1") assert "(expected '(')" in info.value.msg + def test_error_print_without_parens(self): + import pdb; pdb.set_trace() + info = py.test.raises(SyntaxError, self.parse, "print 1") + assert "Missing parentheses in call to 'print'" in info.value.msg + class TestPythonParserRevDB(TestPythonParser): spaceconfig = {"translation.reverse_debugger": True} diff --git a/pypy/module/exceptions/interp_exceptions.py b/pypy/module/exceptions/interp_exceptions.py --- a/pypy/module/exceptions/interp_exceptions.py +++ b/pypy/module/exceptions/interp_exceptions.py @@ -322,6 +322,7 @@ """Import can't find module, or can't find name in module.""" w_name = None w_path = None + w_msg = None @jit.unroll_safe def descr_init(self, space, __args__): @@ -335,6 +336,10 @@ space.w_TypeError, "'%s' is an invalid keyword argument for this function", keyword) + if len(args_w) == 1: + self.w_msg = args_w[0] + else: + self.w_msg = space.w_None W_Exception.descr_init(self, space, args_w) @@ -347,6 +352,7 @@ __init__ = interp2app(W_ImportError.descr_init), name = readwrite_attrproperty_w('w_name', W_ImportError), path = readwrite_attrproperty_w('w_path', W_ImportError), + msg = readwrite_attrproperty_w('w_msg', W_ImportError), ) diff --git a/pypy/module/exceptions/test/test_exc.py b/pypy/module/exceptions/test/test_exc.py --- a/pypy/module/exceptions/test/test_exc.py +++ b/pypy/module/exceptions/test/test_exc.py @@ -298,6 +298,11 @@ assert ImportError("message", name="x").name == "x" assert ImportError("message", path="y").path == "y" raises(TypeError, ImportError, invalid="z") + assert ImportError("message").msg == "message" + assert ImportError("message").args == ("message", ) + assert ImportError("message", "foo").msg is None + assert ImportError("message", "foo").args == ("message", "foo") + def test_modulenotfounderror(self): assert ModuleNotFoundError("message").name is None From pypy.commits at gmail.com Sun Aug 18 07:19:56 2019 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 18 Aug 2019 04:19:56 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: adopt changes in BaseException.__repr__ Message-ID: <5d59345c.1c69fb81.ff580.6109@mx.google.com> Author: Carl Friedrich Bolz-Tereick Branch: py3.6 Changeset: r97211:6d08856e48f3 Date: 2019-08-18 13:19 +0200 http://bitbucket.org/pypy/pypy/changeset/6d08856e48f3/ Log: adopt changes in BaseException.__repr__ diff --git a/pypy/module/exceptions/interp_exceptions.py b/pypy/module/exceptions/interp_exceptions.py --- a/pypy/module/exceptions/interp_exceptions.py +++ b/pypy/module/exceptions/interp_exceptions.py @@ -150,8 +150,11 @@ def descr_repr(self, space): if self.args_w: - args_repr = space.utf8_w( - space.repr(space.newtuple(self.args_w))) + if len(self.args_w) == 1: + args_repr = b"(%s)" % (space.utf8_w(space.repr(self.args_w[0])), ) + else: + args_repr = space.utf8_w( + space.repr(space.newtuple(self.args_w))) else: args_repr = b"()" clsname = self.getclass(space).getname(space) diff --git a/pypy/module/exceptions/test/test_exc.py b/pypy/module/exceptions/test/test_exc.py --- a/pypy/module/exceptions/test/test_exc.py +++ b/pypy/module/exceptions/test/test_exc.py @@ -10,7 +10,7 @@ assert repr(BaseException()) == 'BaseException()' raises(AttributeError, getattr, BaseException(), 'message') raises(AttributeError, getattr, BaseException(3), 'message') - assert repr(BaseException(3)) == 'BaseException(3,)' + assert repr(BaseException(3)) == 'BaseException(3)' assert str(BaseException(3)) == '3' assert BaseException().args == () assert BaseException(3).args == (3,) @@ -310,7 +310,7 @@ assert ModuleNotFoundError("message", name="x").name == "x" assert ModuleNotFoundError("message", path="y").path == "y" raises(TypeError, ModuleNotFoundError, invalid="z") - assert repr(ModuleNotFoundError('test')) == "ModuleNotFoundError('test',)" + assert repr(ModuleNotFoundError('test')) == "ModuleNotFoundError('test')" def test_blockingioerror(self): args = ("a", "b", "c", "d", "e") From pypy.commits at gmail.com Mon Aug 19 03:05:09 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 19 Aug 2019 00:05:09 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: remove pdb from test Message-ID: <5d5a4a25.1c69fb81.f7495.65fa@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97212:7f4a628f9d50 Date: 2019-08-19 09:03 +0300 http://bitbucket.org/pypy/pypy/changeset/7f4a628f9d50/ Log: remove pdb from test diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -390,7 +390,6 @@ assert "(expected '(')" in info.value.msg def test_error_print_without_parens(self): - import pdb; pdb.set_trace() info = py.test.raises(SyntaxError, self.parse, "print 1") assert "Missing parentheses in call to 'print'" in info.value.msg From pypy.commits at gmail.com Mon Aug 19 03:05:11 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 19 Aug 2019 00:05:11 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: add some time.sleep to hellp thread emulation Message-ID: <5d5a4a27.1c69fb81.32a6d.b485@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97213:3891e175ae0e Date: 2019-08-19 10:02 +0300 http://bitbucket.org/pypy/pypy/changeset/3891e175ae0e/ Log: add some time.sleep to hellp thread emulation diff --git a/pypy/module/__pypy__/test/test_signal.py b/pypy/module/__pypy__/test/test_signal.py --- a/pypy/module/__pypy__/test/test_signal.py +++ b/pypy/module/__pypy__/test/test_signal.py @@ -72,19 +72,19 @@ def test_thread_fork_signals(self): import __pypy__ - import os, _thread, signal + import os, _thread, signal, time if not hasattr(os, 'fork'): skip("No fork on this platform") def fork(): + time.sleep(0.1) with __pypy__.thread.signals_enabled: return os.fork() def threadfunction(): pid = fork() if pid == 0: - print('in child') # signal() only works from the 'main' thread signal.signal(signal.SIGUSR1, signal.SIG_IGN) os._exit(42) @@ -95,6 +95,7 @@ feedback = [] _thread.start_new_thread(threadfunction, ()) + time.sleep(3) self.waitfor(lambda: feedback) # if 0, an (unraisable) exception was raised from the forked thread. # if 9, process was killed by timer. From pypy.commits at gmail.com Mon Aug 19 03:05:12 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 19 Aug 2019 00:05:12 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: back out changeset 6d08856e48f3 which is for python3.7 Message-ID: <5d5a4a28.1c69fb81.1db4f.4f7e@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97214:ec4c8dc20895 Date: 2019-08-19 10:04 +0300 http://bitbucket.org/pypy/pypy/changeset/ec4c8dc20895/ Log: back out changeset 6d08856e48f3 which is for python3.7 diff --git a/pypy/module/exceptions/interp_exceptions.py b/pypy/module/exceptions/interp_exceptions.py --- a/pypy/module/exceptions/interp_exceptions.py +++ b/pypy/module/exceptions/interp_exceptions.py @@ -150,11 +150,8 @@ def descr_repr(self, space): if self.args_w: - if len(self.args_w) == 1: - args_repr = b"(%s)" % (space.utf8_w(space.repr(self.args_w[0])), ) - else: - args_repr = space.utf8_w( - space.repr(space.newtuple(self.args_w))) + args_repr = space.utf8_w( + space.repr(space.newtuple(self.args_w))) else: args_repr = b"()" clsname = self.getclass(space).getname(space) diff --git a/pypy/module/exceptions/test/test_exc.py b/pypy/module/exceptions/test/test_exc.py --- a/pypy/module/exceptions/test/test_exc.py +++ b/pypy/module/exceptions/test/test_exc.py @@ -10,7 +10,7 @@ assert repr(BaseException()) == 'BaseException()' raises(AttributeError, getattr, BaseException(), 'message') raises(AttributeError, getattr, BaseException(3), 'message') - assert repr(BaseException(3)) == 'BaseException(3)' + assert repr(BaseException(3)) == 'BaseException(3,)' assert str(BaseException(3)) == '3' assert BaseException().args == () assert BaseException(3).args == (3,) @@ -310,7 +310,7 @@ assert ModuleNotFoundError("message", name="x").name == "x" assert ModuleNotFoundError("message", path="y").path == "y" raises(TypeError, ModuleNotFoundError, invalid="z") - assert repr(ModuleNotFoundError('test')) == "ModuleNotFoundError('test')" + assert repr(ModuleNotFoundError('test')) == "ModuleNotFoundError('test',)" def test_blockingioerror(self): args = ("a", "b", "c", "d", "e") From pypy.commits at gmail.com Mon Aug 19 03:18:20 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 19 Aug 2019 00:18:20 -0700 (PDT) Subject: [pypy-commit] pypy default: resync vmprof Message-ID: <5d5a4d3c.1c69fb81.84b59.212f@mx.google.com> Author: Matti Picus Branch: Changeset: r97215:8837d155d951 Date: 2019-08-19 10:17 +0300 http://bitbucket.org/pypy/pypy/changeset/8837d155d951/ Log: resync vmprof diff --git a/rpython/rlib/rvmprof/src/shared/vmprof_common.c b/rpython/rlib/rvmprof/src/shared/vmprof_common.c --- a/rpython/rlib/rvmprof/src/shared/vmprof_common.c +++ b/rpython/rlib/rvmprof/src/shared/vmprof_common.c @@ -244,9 +244,9 @@ return -1; if (thread_count == threads_size) { threads_size += threads_size_step; - threads = realloc(threads, sizeof(pid_t) * threads_size); + threads = realloc(threads, sizeof(pthread_t) * threads_size); assert(threads != NULL); - memset(threads + thread_count, 0, sizeof(pid_t) * threads_size_step); + memset(threads + thread_count, 0, sizeof(pthread_t) * threads_size_step); } threads[thread_count++] = tid; return thread_count; From pypy.commits at gmail.com Mon Aug 19 03:18:22 2019 From: pypy.commits at gmail.com (mattip) Date: Mon, 19 Aug 2019 00:18:22 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default into py3.6 Message-ID: <5d5a4d3e.1c69fb81.a452d.1aba@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97216:8df39cb01740 Date: 2019-08-19 10:17 +0300 http://bitbucket.org/pypy/pypy/changeset/8df39cb01740/ Log: merge default into py3.6 diff --git a/rpython/jit/backend/aarch64/opassembler.py b/rpython/jit/backend/aarch64/opassembler.py --- a/rpython/jit/backend/aarch64/opassembler.py +++ b/rpython/jit/backend/aarch64/opassembler.py @@ -168,7 +168,7 @@ self.emit_int_comp_op(op, arglocs[0], arglocs[1]) return c.NE - emit_comp_op_ptr_ne = emit_comp_op_int_ne + emit_comp_op_ptr_ne = emit_comp_op_instance_ptr_ne = emit_comp_op_int_ne def emit_comp_op_uint_lt(self, op, arglocs): self.emit_int_comp_op(op, arglocs[0], arglocs[1]) diff --git a/rpython/jit/backend/aarch64/regalloc.py b/rpython/jit/backend/aarch64/regalloc.py --- a/rpython/jit/backend/aarch64/regalloc.py +++ b/rpython/jit/backend/aarch64/regalloc.py @@ -419,7 +419,7 @@ prepare_comp_op_int_ne = prepare_int_cmp prepare_comp_op_int_eq = prepare_int_cmp prepare_comp_op_ptr_eq = prepare_comp_op_instance_ptr_eq = prepare_int_cmp - prepare_comp_op_ptr_ne = prepare_int_cmp + prepare_comp_op_ptr_ne = prepare_comp_op_instance_ptr_ne = prepare_int_cmp prepare_comp_op_uint_lt = prepare_int_cmp prepare_comp_op_uint_le = prepare_int_cmp prepare_comp_op_uint_ge = prepare_int_cmp diff --git a/rpython/rlib/rvmprof/src/shared/vmprof_common.c b/rpython/rlib/rvmprof/src/shared/vmprof_common.c --- a/rpython/rlib/rvmprof/src/shared/vmprof_common.c +++ b/rpython/rlib/rvmprof/src/shared/vmprof_common.c @@ -244,9 +244,9 @@ return -1; if (thread_count == threads_size) { threads_size += threads_size_step; - threads = realloc(threads, sizeof(pid_t) * threads_size); + threads = realloc(threads, sizeof(pthread_t) * threads_size); assert(threads != NULL); - memset(threads + thread_count, 0, sizeof(pid_t) * threads_size_step); + memset(threads + thread_count, 0, sizeof(pthread_t) * threads_size_step); } threads[thread_count++] = tid; return thread_count; From pypy.commits at gmail.com Mon Aug 19 16:20:10 2019 From: pypy.commits at gmail.com (arigo) Date: Mon, 19 Aug 2019 13:20:10 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: tweak Message-ID: <5d5b047a.1c69fb81.ed3eb.142b@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97219:2dec2639c2f9 Date: 2019-08-19 21:13 +0200 http://bitbucket.org/pypy/pypy/changeset/2dec2639c2f9/ Log: tweak diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -299,7 +299,7 @@ return space.newfloat( widen(timeval.c_tv_sec) + widen(timeval.c_tv_usec) * 1e-6) - if HAVE_FTIME: + if HAVE_FTIME and not space.config.translation.sandbox: with lltype.scoped_alloc(TIMEB) as t: c_ftime(t) result = (widen(t.c_time) + From pypy.commits at gmail.com Mon Aug 19 16:20:12 2019 From: pypy.commits at gmail.com (arigo) Date: Mon, 19 Aug 2019 13:20:12 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: Don't call syscall() from the sandbox Message-ID: <5d5b047c.1c69fb81.f67e3.3e72@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97220:0ae0d48891a6 Date: 2019-08-19 22:19 +0200 http://bitbucket.org/pypy/pypy/changeset/0ae0d48891a6/ Log: Don't call syscall() from the sandbox diff --git a/rpython/rlib/rurandom.py b/rpython/rlib/rurandom.py --- a/rpython/rlib/rurandom.py +++ b/rpython/rlib/rurandom.py @@ -6,7 +6,7 @@ import errno from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rlib.objectmodel import not_rpython +from rpython.rlib.objectmodel import not_rpython, fetch_translated_config from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.rtyper.tool import rffi_platform @@ -148,7 +148,9 @@ # initialize the random seed of string hashes result = [] if SYS_getrandom is not None: - n = _getrandom(n, result, signal_checker) + config = fetch_translated_config() + if config is None or not config.translation.sandbox: + n = _getrandom(n, result, signal_checker) if n <= 0: return ''.join(result) From pypy.commits at gmail.com Tue Aug 20 04:34:47 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 20 Aug 2019 01:34:47 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: Remove old tests Message-ID: <5d5bb0a7.1c69fb81.d2f28.5d61@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97221:150338122bc4 Date: 2019-08-20 10:17 +0200 http://bitbucket.org/pypy/pypy/changeset/150338122bc4/ Log: Remove old tests diff --git a/rpython/translator/sandbox/test/test_sandlib.py b/rpython/translator/sandbox/test/test_sandlib.py deleted file mode 100644 --- a/rpython/translator/sandbox/test/test_sandlib.py +++ /dev/null @@ -1,267 +0,0 @@ -import py -import errno, os, StringIO -from rpython.tool.sourcetools import func_with_new_name -from rpython.rtyper.lltypesystem import rffi -from rpython.translator.sandbox.sandlib import SandboxedProc -from rpython.translator.sandbox.sandlib import SimpleIOSandboxedProc -from rpython.translator.sandbox.sandlib import VirtualizedSandboxedProc -from rpython.translator.sandbox.sandlib import VirtualizedSocketProc -from rpython.translator.sandbox.test.test_sandbox import compile -from rpython.translator.sandbox.vfs import Dir, File, RealDir, RealFile - - -class MockSandboxedProc(SandboxedProc): - """A sandbox process wrapper that replays expected syscalls.""" - - def __init__(self, args, expected): - SandboxedProc.__init__(self, args) - self.expected = expected - self.seen = 0 - - def _make_method(name): - def do_xxx(self, *input): - print "decoded from subprocess: %s%r" % (name, input) - expectedmsg, expectedinput, output = self.expected[self.seen] - assert name == expectedmsg - assert input == expectedinput - self.seen += 1 - if isinstance(output, Exception): - raise output - return output - return func_with_new_name(do_xxx, 'do_%s' % name) - - do_ll_os__ll_os_open = _make_method("open") - do_ll_os__ll_os_read = _make_method("read") - do_ll_os__ll_os_write = _make_method("write") - do_ll_os__ll_os_close = _make_method("close") - - -def test_lib(): - def entry_point(argv): - fd = os.open("/tmp/foobar", os.O_RDONLY, 0777) - assert fd == 77 - res = os.read(fd, 123) - assert res == "he\x00llo" - count = os.write(fd, "world\x00!\x00") - assert count == 42 - for arg in argv: - count = os.write(fd, arg) - assert count == 61 - os.close(fd) - return 0 - exe = compile(entry_point) - - proc = MockSandboxedProc([exe, 'x1', 'y2'], expected = [ - ("open", ("/tmp/foobar", os.O_RDONLY, 0777), 77), - ("read", (77, 123), "he\x00llo"), - ("write", (77, "world\x00!\x00"), 42), - ("write", (77, exe), 61), - ("write", (77, "x1"), 61), - ("write", (77, "y2"), 61), - ("close", (77,), None), - ]) - proc.handle_forever() - assert proc.seen == len(proc.expected) - -def test_foobar(): - py.test.skip("to be updated") - foobar = rffi.llexternal("foobar", [rffi.CCHARP], rffi.LONG) - def entry_point(argv): - s = rffi.str2charp(argv[1]); n = foobar(s); rffi.free_charp(s) - s = rffi.str2charp(argv[n]); n = foobar(s); rffi.free_charp(s) - return n - exe = compile(entry_point) - - proc = MockSandboxedProc([exe, 'spam', 'egg'], expected = [ - ("foobar", ("spam",), 2), - ("foobar", ("egg",), 0), - ]) - proc.handle_forever() - assert proc.seen == len(proc.expected) - -def test_simpleio(): - def entry_point(argv): - print "Please enter a number:" - buf = "" - while True: - t = os.read(0, 1) # 1 character from stdin - if not t: - raise EOFError - if t == '\n': - break - buf += t - num = int(buf) - print "The double is:", num * 2 - return 0 - exe = compile(entry_point) - - proc = SimpleIOSandboxedProc([exe, 'x1', 'y2']) - output, error = proc.communicate("21\n") - assert output == "Please enter a number:\nThe double is: 42\n" - assert error == "" - -def test_socketio(): - class SocketProc(VirtualizedSocketProc, SimpleIOSandboxedProc): - def build_virtual_root(self): - pass - - def entry_point(argv): - fd = os.open("tcp://python.org:80", os.O_RDONLY, 0777) - os.write(fd, 'GET /\n') - print os.read(fd, 50) - return 0 - exe = compile(entry_point) - - proc = SocketProc([exe]) - output, error = proc.communicate("") - assert output.startswith('HTTP/1.0 400 Bad request') - -def test_oserror(): - def entry_point(argv): - try: - os.open("/tmp/foobar", os.O_RDONLY, 0777) - except OSError as e: - os.close(e.errno) # nonsense, just to see outside - return 0 - exe = compile(entry_point) - - proc = MockSandboxedProc([exe], expected = [ - ("open", ("/tmp/foobar", os.O_RDONLY, 0777), OSError(-42, "baz")), - ("close", (-42,), None), - ]) - proc.handle_forever() - assert proc.seen == len(proc.expected) - - -class SandboxedProcWithFiles(VirtualizedSandboxedProc, SimpleIOSandboxedProc): - """A sandboxed process with a simple virtualized filesystem. - - For testing file operations. - - """ - def build_virtual_root(self): - return Dir({ - 'hi.txt': File("Hello, world!\n"), - 'this.pyc': RealFile(__file__), - }) - -def test_too_many_opens(): - def entry_point(argv): - try: - open_files = [] - for i in range(500): - fd = os.open('/hi.txt', os.O_RDONLY, 0777) - open_files.append(fd) - txt = os.read(fd, 100) - if txt != "Hello, world!\n": - print "Wrong content: %s" % txt - except OSError as e: - # We expect to get EMFILE, for opening too many files. - if e.errno != errno.EMFILE: - print "OSError: %s!" % (e.errno,) - else: - print "We opened 500 fake files! Shouldn't have been able to." - - for fd in open_files: - os.close(fd) - - try: - open_files = [] - for i in range(500): - fd = os.open('/this.pyc', os.O_RDONLY, 0777) - open_files.append(fd) - except OSError as e: - # We expect to get EMFILE, for opening too many files. - if e.errno != errno.EMFILE: - print "OSError: %s!" % (e.errno,) - else: - print "We opened 500 real files! Shouldn't have been able to." - - print "All ok!" - return 0 - exe = compile(entry_point) - - proc = SandboxedProcWithFiles([exe]) - output, error = proc.communicate("") - assert output == "All ok!\n" - assert error == "" - -def test_fstat(): - def compare(a, b, i): - if a != b: - print "stat and fstat differ @%d: %s != %s" % (i, a, b) - - def entry_point(argv): - try: - # Open a file, and compare stat and fstat - fd = os.open('/hi.txt', os.O_RDONLY, 0777) - st = os.stat('/hi.txt') - fs = os.fstat(fd) - # RPython requires the index for stat to be a constant.. :( - compare(st[0], fs[0], 0) - compare(st[1], fs[1], 1) - compare(st[2], fs[2], 2) - compare(st[3], fs[3], 3) - compare(st[4], fs[4], 4) - compare(st[5], fs[5], 5) - compare(st[6], fs[6], 6) - compare(st[7], fs[7], 7) - compare(st[8], fs[8], 8) - compare(st[9], fs[9], 9) - except OSError as e: - print "OSError: %s" % (e.errno,) - print "All ok!" - return 0 - exe = compile(entry_point) - - proc = SandboxedProcWithFiles([exe]) - output, error = proc.communicate("") - assert output == "All ok!\n" - assert error == "" - -def test_lseek(): - def char_should_be(c, should): - if c != should: - print "Wrong char: '%s' should be '%s'" % (c, should) - - def entry_point(argv): - fd = os.open('/hi.txt', os.O_RDONLY, 0777) - char_should_be(os.read(fd, 1), "H") - new = os.lseek(fd, 3, os.SEEK_CUR) - if new != 4: - print "Wrong offset, %d should be 4" % new - char_should_be(os.read(fd, 1), "o") - new = os.lseek(fd, -3, os.SEEK_END) - if new != 11: - print "Wrong offset, %d should be 11" % new - char_should_be(os.read(fd, 1), "d") - new = os.lseek(fd, 7, os.SEEK_SET) - if new != 7: - print "Wrong offset, %d should be 7" % new - char_should_be(os.read(fd, 1), "w") - print "All ok!" - return 0 - exe = compile(entry_point) - - proc = SandboxedProcWithFiles([exe]) - output, error = proc.communicate("") - assert output == "All ok!\n" - assert error == "" - -def test_getuid(): - if not hasattr(os, 'getuid'): - py.test.skip("posix only") - - def entry_point(argv): - import os - print "uid is %s" % os.getuid() - print "euid is %s" % os.geteuid() - print "gid is %s" % os.getgid() - print "egid is %s" % os.getegid() - return 0 - exe = compile(entry_point) - - proc = SandboxedProcWithFiles([exe]) - output, error = proc.communicate("") - assert output == "uid is 1000\neuid is 1000\ngid is 1000\negid is 1000\n" - assert error == "" diff --git a/rpython/translator/sandbox/test/test_vfs.py b/rpython/translator/sandbox/test/test_vfs.py deleted file mode 100644 --- a/rpython/translator/sandbox/test/test_vfs.py +++ /dev/null @@ -1,114 +0,0 @@ -import py -import sys, stat, os -from rpython.translator.sandbox.vfs import * -from rpython.tool.udir import udir - -HASLINK = hasattr(os, 'symlink') - -def setup_module(mod): - d = udir.ensure('test_vfs', dir=1) - d.join('file1').write('somedata1') - d.join('file2').write('somelongerdata2') - os.chmod(str(d.join('file2')), stat.S_IWUSR) # unreadable - d.join('.hidden').write('secret') - d.ensure('subdir1', dir=1).join('subfile1').write('spam') - d.ensure('.subdir2', dir=1).join('subfile2').write('secret as well') - if HASLINK: - d.join('symlink1').mksymlinkto(str(d.join('subdir1'))) - d.join('symlink2').mksymlinkto('.hidden') - d.join('symlink3').mksymlinkto('BROKEN') - - -def test_dir(): - d = Dir({'foo': Dir()}) - assert d.keys() == ['foo'] - py.test.raises(OSError, d.open) - assert 0 <= d.getsize() <= sys.maxint - d1 = d.join('foo') - assert stat.S_ISDIR(d1.kind) - assert d1.keys() == [] - py.test.raises(OSError, d.join, 'bar') - st = d.stat() - assert stat.S_ISDIR(st.st_mode) - assert d.access(os.R_OK | os.X_OK) - assert not d.access(os.W_OK) - -def test_file(): - f = File('hello world') - assert stat.S_ISREG(f.kind) - py.test.raises(OSError, f.keys) - assert f.getsize() == 11 - h = f.open() - data = h.read() - assert data == 'hello world' - h.close() - st = f.stat() - assert stat.S_ISREG(st.st_mode) - assert st.st_size == 11 - assert f.access(os.R_OK) - assert not f.access(os.W_OK) - -def test_realdir_realfile(): - for show_dotfiles in [False, True]: - for follow_links in [False, True]: - v_udir = RealDir(str(udir), show_dotfiles = show_dotfiles, - follow_links = follow_links) - v_test_vfs = v_udir.join('test_vfs') - names = v_test_vfs.keys() - names.sort() - assert names == (show_dotfiles * ['.hidden', '.subdir2'] + - ['file1', 'file2', 'subdir1'] + - HASLINK * ['symlink1', 'symlink2', 'symlink3']) - py.test.raises(OSError, v_test_vfs.open) - assert 0 <= v_test_vfs.getsize() <= sys.maxint - - f = v_test_vfs.join('file1') - assert f.open().read() == 'somedata1' - - f = v_test_vfs.join('file2') - assert f.getsize() == len('somelongerdata2') - if os.name != 'nt': # can't have unreadable files there? - py.test.raises(OSError, f.open) - - py.test.raises(OSError, v_test_vfs.join, 'does_not_exist') - py.test.raises(OSError, v_test_vfs.join, 'symlink3') - if follow_links and HASLINK: - d = v_test_vfs.join('symlink1') - assert stat.S_ISDIR(d.stat().st_mode) - assert d.keys() == ['subfile1'] - assert d.join('subfile1').open().read() == 'spam' - - f = v_test_vfs.join('symlink2') - assert stat.S_ISREG(f.stat().st_mode) - assert f.access(os.R_OK) - assert f.open().read() == 'secret' - else: - py.test.raises(OSError, v_test_vfs.join, 'symlink1') - py.test.raises(OSError, v_test_vfs.join, 'symlink2') - - if show_dotfiles: - f = v_test_vfs.join('.hidden') - assert f.open().read() == 'secret' - - d = v_test_vfs.join('.subdir2') - assert d.keys() == ['subfile2'] - assert d.join('subfile2').open().read() == 'secret as well' - else: - py.test.raises(OSError, v_test_vfs.join, '.hidden') - py.test.raises(OSError, v_test_vfs.join, '.subdir2') - -def test_realdir_exclude(): - xdir = udir.ensure('test_realdir_exclude', dir=1) - xdir.ensure('test_realdir_exclude.yes') - xdir.ensure('test_realdir_exclude.no') - v_udir = RealDir(str(udir), exclude=['.no']) - v_xdir = v_udir.join('test_realdir_exclude') - assert 'test_realdir_exclude.yes' in v_xdir.keys() - assert 'test_realdir_exclude.no' not in v_xdir.keys() - v_xdir.join('test_realdir_exclude.yes') # works - py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.no') - # Windows and Mac tests, for the case - py.test.raises(OSError, v_xdir.join, 'Test_RealDir_Exclude.no') - py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.No') - py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.nO') - py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.NO') From pypy.commits at gmail.com Tue Aug 20 04:34:49 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 20 Aug 2019 01:34:49 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: Compile the sandbox with the _socket module Message-ID: <5d5bb0a9.1c69fb81.6ed08.4d9c@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97222:4b3a0b8bf44b Date: 2019-08-20 10:34 +0200 http://bitbucket.org/pypy/pypy/changeset/4b3a0b8bf44b/ Log: Compile the sandbox with the _socket module diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -47,7 +47,7 @@ # --sandbox sandbox_modules = default_modules.copy() sandbox_modules.update([ - "array", "binascii", + "array", "binascii", "_socket", ]) import rpython.rlib.rvmprof.cintf diff --git a/rpython/rlib/_rsocket_rffi.py b/rpython/rlib/_rsocket_rffi.py --- a/rpython/rlib/_rsocket_rffi.py +++ b/rpython/rlib/_rsocket_rffi.py @@ -1217,21 +1217,21 @@ size_t, CCHARP, size_t, rffi.INT], rffi.INT) if sys.platform.startswith("openbsd") or sys.platform.startswith("darwin"): - htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True) - htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True) - ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True) - ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True) + htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True, sandboxsafe=True) + htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True, sandboxsafe=True) + ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True, sandboxsafe=True) + ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True, sandboxsafe=True) else: - htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False) - htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False) - ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False) - ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False) + htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False, sandboxsafe=True) + htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False, sandboxsafe=True) + ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False, sandboxsafe=True) + ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False, sandboxsafe=True) if _POSIX: inet_aton = external('inet_aton', [CCHARP, lltype.Ptr(in_addr)], - rffi.INT) + rffi.INT, sandboxsafe=True) -inet_ntoa = external('inet_ntoa', [in_addr], rffi.CCHARP) +inet_ntoa = external('inet_ntoa', [in_addr], rffi.CCHARP, sandboxsafe=True) inet_pton = external('inet_pton', [rffi.INT, rffi.CCHARP, @@ -1242,7 +1242,7 @@ socklen_t], CCHARP, save_err=SAVE_ERR) -inet_addr = external('inet_addr', [rffi.CCHARP], rffi.UINT) +inet_addr = external('inet_addr', [rffi.CCHARP], rffi.UINT, sandboxsafe=True) socklen_t_ptr = lltype.Ptr(rffi.CFixedArray(socklen_t, 1)) socketaccept = external('accept', [socketfd_type, sockaddr_ptr, socklen_t_ptr], socketfd_type, From pypy.commits at gmail.com Tue Aug 20 05:42:05 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 20 Aug 2019 02:42:05 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: Use the select module too Message-ID: <5d5bc06d.1c69fb81.a2cd7.5245@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97223:4997ac74778b Date: 2019-08-20 11:41 +0200 http://bitbucket.org/pypy/pypy/changeset/4997ac74778b/ Log: Use the select module too diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -47,7 +47,7 @@ # --sandbox sandbox_modules = default_modules.copy() sandbox_modules.update([ - "array", "binascii", "_socket", + "array", "binascii", "_socket", "select", ]) import rpython.rlib.rvmprof.cintf diff --git a/pypy/module/select/moduledef.py b/pypy/module/select/moduledef.py --- a/pypy/module/select/moduledef.py +++ b/pypy/module/select/moduledef.py @@ -43,3 +43,8 @@ Module.interpleveldefs[name] = "space.wrap(%r)" % value super(Module, cls).buildloaders() buildloaders = classmethod(buildloaders) + + def __init__(self, space, w_name): + if space.config.translating and space.config.translation.sandbox: + self.__class__.interpleveldefs.pop('epoll', None) + super(Module, self).__init__(space, w_name) diff --git a/rpython/rlib/_rsocket_rffi.py b/rpython/rlib/_rsocket_rffi.py --- a/rpython/rlib/_rsocket_rffi.py +++ b/rpython/rlib/_rsocket_rffi.py @@ -1185,7 +1185,7 @@ if _POSIX: dup = external('dup', [socketfd_type], socketfd_type, save_err=SAVE_ERR) - gai_strerror = external('gai_strerror', [rffi.INT], CCHARP) + gai_strerror = external('gai_strerror', [rffi.INT], CCHARP, sandboxsafe=True) #h_errno = c_int.in_dll(socketdll, 'h_errno') # @@ -1333,10 +1333,10 @@ rffi.INT, save_err=SAVE_ERR) -FD_CLR = external_c('FD_CLR', [rffi.INT, fd_set], lltype.Void, macro=True) -FD_ISSET = external_c('FD_ISSET', [rffi.INT, fd_set], rffi.INT, macro=True) -FD_SET = external_c('FD_SET', [rffi.INT, fd_set], lltype.Void, macro=True) -FD_ZERO = external_c('FD_ZERO', [fd_set], lltype.Void, macro=True) +FD_CLR = external_c('FD_CLR', [rffi.INT, fd_set], lltype.Void, macro=True, sandboxsafe=True) +FD_ISSET = external_c('FD_ISSET', [rffi.INT, fd_set], rffi.INT, macro=True, sandboxsafe=True) +FD_SET = external_c('FD_SET', [rffi.INT, fd_set], lltype.Void, macro=True, sandboxsafe=True) +FD_ZERO = external_c('FD_ZERO', [fd_set], lltype.Void, macro=True, sandboxsafe=True) if _POSIX: pollfdarray = rffi.CArray(pollfd) diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -1172,7 +1172,7 @@ return rffi.charp2str(l_name) c_strerror = external('strerror', [rffi.INT], rffi.CCHARP, - releasegil=False) + releasegil=False, sandboxsafe=True) @replace_os_function('strerror') def strerror(errnum): From pypy.commits at gmail.com Tue Aug 20 06:45:15 2019 From: pypy.commits at gmail.com (arigo) Date: Tue, 20 Aug 2019 03:45:15 -0700 (PDT) Subject: [pypy-commit] pypy sandbox-2: backport from py3.6-sandbox-2 Message-ID: <5d5bcf3b.1c69fb81.ccf6c.83f7@mx.google.com> Author: Armin Rigo Branch: sandbox-2 Changeset: r97224:3f34199b0d1a Date: 2019-08-20 12:44 +0200 http://bitbucket.org/pypy/pypy/changeset/3f34199b0d1a/ Log: backport from py3.6-sandbox-2 diff --git a/rpython/rlib/_rsocket_rffi.py b/rpython/rlib/_rsocket_rffi.py --- a/rpython/rlib/_rsocket_rffi.py +++ b/rpython/rlib/_rsocket_rffi.py @@ -1185,7 +1185,7 @@ if _POSIX: dup = external('dup', [socketfd_type], socketfd_type, save_err=SAVE_ERR) - gai_strerror = external('gai_strerror', [rffi.INT], CCHARP) + gai_strerror = external('gai_strerror', [rffi.INT], CCHARP, sandboxsafe=True) #h_errno = c_int.in_dll(socketdll, 'h_errno') # @@ -1217,21 +1217,21 @@ size_t, CCHARP, size_t, rffi.INT], rffi.INT) if sys.platform.startswith("openbsd") or sys.platform.startswith("darwin"): - htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True) - htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True) - ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True) - ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True) + htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True, sandboxsafe=True) + htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True, sandboxsafe=True) + ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False, macro=True, sandboxsafe=True) + ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False, macro=True, sandboxsafe=True) else: - htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False) - htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False) - ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False) - ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False) + htonl = external('htonl', [rffi.UINT], rffi.UINT, releasegil=False, sandboxsafe=True) + htons = external('htons', [rffi.USHORT], rffi.USHORT, releasegil=False, sandboxsafe=True) + ntohl = external('ntohl', [rffi.UINT], rffi.UINT, releasegil=False, sandboxsafe=True) + ntohs = external('ntohs', [rffi.USHORT], rffi.USHORT, releasegil=False, sandboxsafe=True) if _POSIX: inet_aton = external('inet_aton', [CCHARP, lltype.Ptr(in_addr)], - rffi.INT) + rffi.INT, sandboxsafe=True) -inet_ntoa = external('inet_ntoa', [in_addr], rffi.CCHARP) +inet_ntoa = external('inet_ntoa', [in_addr], rffi.CCHARP, sandboxsafe=True) inet_pton = external('inet_pton', [rffi.INT, rffi.CCHARP, @@ -1242,7 +1242,7 @@ socklen_t], CCHARP, save_err=SAVE_ERR) -inet_addr = external('inet_addr', [rffi.CCHARP], rffi.UINT) +inet_addr = external('inet_addr', [rffi.CCHARP], rffi.UINT, sandboxsafe=True) socklen_t_ptr = lltype.Ptr(rffi.CFixedArray(socklen_t, 1)) socketaccept = external('accept', [socketfd_type, sockaddr_ptr, socklen_t_ptr], socketfd_type, @@ -1333,10 +1333,10 @@ rffi.INT, save_err=SAVE_ERR) -FD_CLR = external_c('FD_CLR', [rffi.INT, fd_set], lltype.Void, macro=True) -FD_ISSET = external_c('FD_ISSET', [rffi.INT, fd_set], rffi.INT, macro=True) -FD_SET = external_c('FD_SET', [rffi.INT, fd_set], lltype.Void, macro=True) -FD_ZERO = external_c('FD_ZERO', [fd_set], lltype.Void, macro=True) +FD_CLR = external_c('FD_CLR', [rffi.INT, fd_set], lltype.Void, macro=True, sandboxsafe=True) +FD_ISSET = external_c('FD_ISSET', [rffi.INT, fd_set], rffi.INT, macro=True, sandboxsafe=True) +FD_SET = external_c('FD_SET', [rffi.INT, fd_set], lltype.Void, macro=True, sandboxsafe=True) +FD_ZERO = external_c('FD_ZERO', [fd_set], lltype.Void, macro=True, sandboxsafe=True) if _POSIX: pollfdarray = rffi.CArray(pollfd) diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -1172,7 +1172,7 @@ return rffi.charp2str(l_name) c_strerror = external('strerror', [rffi.INT], rffi.CCHARP, - releasegil=False) + releasegil=False, sandboxsafe=True) @replace_os_function('strerror') def strerror(errnum): diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py --- a/rpython/rlib/rsiphash.py +++ b/rpython/rlib/rsiphash.py @@ -139,6 +139,8 @@ translator = hop.rtyper.annotator.translator if translator.config.translation.reverse_debugger: return # ignore and use the regular hash, with reverse-debugger + if translator.config.translation.sandbox: + return # ignore and use the regular hash, with sandboxing bk = hop.rtyper.annotator.bookkeeper s_callable = bk.immutablevalue(initialize_from_env) r_callable = hop.rtyper.getrepr(s_callable) diff --git a/rpython/rlib/rurandom.py b/rpython/rlib/rurandom.py --- a/rpython/rlib/rurandom.py +++ b/rpython/rlib/rurandom.py @@ -6,7 +6,7 @@ import errno from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rlib.objectmodel import not_rpython +from rpython.rlib.objectmodel import not_rpython, fetch_translated_config from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.rtyper.tool import rffi_platform @@ -148,7 +148,9 @@ # initialize the random seed of string hashes result = [] if SYS_getrandom is not None: - n = _getrandom(n, result, signal_checker) + config = fetch_translated_config() + if config is None or not config.translation.sandbox: + n = _getrandom(n, result, signal_checker) if n <= 0: return ''.join(result) From pypy.commits at gmail.com Tue Aug 20 10:47:51 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 20 Aug 2019 07:47:51 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix tests Message-ID: <5d5c0817.1c69fb81.aeb18.0cc9@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97226:63c246992f08 Date: 2019-08-20 15:46 +0100 http://bitbucket.org/pypy/pypy/changeset/63c246992f08/ Log: fix tests diff --git a/lib-python/3/test/test_dict.py b/lib-python/3/test/test_dict.py --- a/lib-python/3/test/test_dict.py +++ b/lib-python/3/test/test_dict.py @@ -1097,6 +1097,7 @@ support.check_free_after_iterating(self, lambda d: iter(d.values()), dict) support.check_free_after_iterating(self, lambda d: iter(d.items()), dict) + @support.cpython_only def test_equal_operator_modifying_operand(self): # test fix for seg fault reported in issue 27945 part 3. class X(): @@ -1179,6 +1180,7 @@ for result in d.items(): if result[0] == 2: d[2] = None # free d[2] --> X(2).__del__ was called + gc.collect() self.assertRaises(RuntimeError, iter_and_mutate) From pypy.commits at gmail.com Tue Aug 20 15:43:04 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 20 Aug 2019 12:43:04 -0700 (PDT) Subject: [pypy-commit] pypy default: Avoid RuntimeError in repr() of recursive dictviews (bpo-18533) Message-ID: <5d5c4d48.1c69fb81.ce5b9.93e4@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97227:3ea4a70584df Date: 2019-08-20 20:42 +0100 http://bitbucket.org/pypy/pypy/changeset/3ea4a70584df/ Log: Avoid RuntimeError in repr() of recursive dictviews (bpo-18533) diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -446,9 +446,23 @@ del currently_in_repr[d] except: pass + + def viewrepr(currently_in_repr, view): + if view in currently_in_repr: + return '...' + currently_in_repr[view] = 1 + try: + return (type(view).__name__ + "([" + + ", ".join([repr(x) for x in view]) + '])') + finally: + try: + del currently_in_repr[view] + except: + pass ''', filename=__file__) dictrepr = app.interphook("dictrepr") +viewrepr = app.interphook("viewrepr") W_DictMultiObject.typedef = TypeDef("dict", @@ -1515,7 +1529,9 @@ self.w_dict = w_dict def descr_repr(self, space): + return viewrepr(space, space.get_objects_in_repr(), self) w_seq = space.call_function(space.w_list, self) + w_repr = space.repr(w_seq) return space.newtext("%s(%s)" % (space.type(self).getname(space), space.text_w(w_repr))) diff --git a/pypy/objspace/std/test/test_dictmultiobject.py b/pypy/objspace/std/test/test_dictmultiobject.py --- a/pypy/objspace/std/test/test_dictmultiobject.py +++ b/pypy/objspace/std/test/test_dictmultiobject.py @@ -917,6 +917,12 @@ assert (r == "dict_values(['ABC', 10])" or r == "dict_values([10, 'ABC'])") + def test_recursive_repr(self): + d = {1: 2} + d[2] = d.viewvalues() + print repr(d) + assert repr(d) == '{1: 2, 2: dict_values([2, ...])}' + def test_keys_set_operations(self): d1 = {'a': 1, 'b': 2} d2 = {'b': 3, 'c': 2} From pypy.commits at gmail.com Tue Aug 20 15:48:36 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 20 Aug 2019 12:48:36 -0700 (PDT) Subject: [pypy-commit] pypy default: remove dead code Message-ID: <5d5c4e94.1c69fb81.bcd74.4b71@mx.google.com> Author: Ronan Lamy Branch: Changeset: r97228:863f304d4d70 Date: 2019-08-20 20:44 +0100 http://bitbucket.org/pypy/pypy/changeset/863f304d4d70/ Log: remove dead code diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1530,11 +1530,6 @@ def descr_repr(self, space): return viewrepr(space, space.get_objects_in_repr(), self) - w_seq = space.call_function(space.w_list, self) - - w_repr = space.repr(w_seq) - return space.newtext("%s(%s)" % (space.type(self).getname(space), - space.text_w(w_repr))) def descr_len(self, space): return space.len(self.w_dict) From pypy.commits at gmail.com Tue Aug 20 15:48:37 2019 From: pypy.commits at gmail.com (rlamy) Date: Tue, 20 Aug 2019 12:48:37 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d5c4e95.1c69fb81.aadee.dacd@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97229:aa1858313621 Date: 2019-08-20 20:47 +0100 http://bitbucket.org/pypy/pypy/changeset/aa1858313621/ Log: hg merge default diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -381,9 +381,23 @@ del currently_in_repr[d] except: pass + + def viewrepr(currently_in_repr, view): + if view in currently_in_repr: + return '...' + currently_in_repr[view] = 1 + try: + return (type(view).__name__ + "([" + + ", ".join([repr(x) for x in view]) + '])') + finally: + try: + del currently_in_repr[view] + except: + pass ''', filename=__file__) dictrepr = app.interphook("dictrepr") +viewrepr = app.interphook("viewrepr") W_DictMultiObject.typedef = TypeDef("dict", @@ -1431,10 +1445,7 @@ self.w_dict = w_dict def descr_repr(self, space): - typename = space.type(self).getname(space) - w_seq = space.call_function(space.w_list, self) - seq_repr = space.utf8_w(space.repr(w_seq)) - return space.newtext("%s(%s)" % (typename, seq_repr)) + return viewrepr(space, space.get_objects_in_repr(), self) def descr_len(self, space): return space.len(self.w_dict) diff --git a/pypy/objspace/std/test/test_dictmultiobject.py b/pypy/objspace/std/test/test_dictmultiobject.py --- a/pypy/objspace/std/test/test_dictmultiobject.py +++ b/pypy/objspace/std/test/test_dictmultiobject.py @@ -895,6 +895,11 @@ d = {'日本': '日本国'} assert repr(d.items()) == "dict_items([('日本', '日本国')])" + def test_recursive_repr(self): + d = {1: 2} + d[2] = d.values() + assert repr(d) == '{1: 2, 2: dict_values([2, ...])}' + def test_keys_set_operations(self): d1 = {'a': 1, 'b': 2} d2 = {'b': 3, 'c': 2} From pypy.commits at gmail.com Wed Aug 21 06:32:08 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 21 Aug 2019 03:32:08 -0700 (PDT) Subject: [pypy-commit] pypy default: update test for 2.7.15, sync with forward port of bpo-18533 in 3ea4a70584df Message-ID: <5d5d1da8.1c69fb81.48c50.d3b8@mx.google.com> Author: Matti Picus Branch: Changeset: r97231:ca952577d5b5 Date: 2019-08-21 13:31 +0300 http://bitbucket.org/pypy/pypy/changeset/ca952577d5b5/ Log: update test for 2.7.15, sync with forward port of bpo-18533 in 3ea4a70584df diff --git a/lib-python/2.7/test/test_dictviews.py b/lib-python/2.7/test/test_dictviews.py --- a/lib-python/2.7/test/test_dictviews.py +++ b/lib-python/2.7/test/test_dictviews.py @@ -169,6 +169,20 @@ def test_recursive_repr(self): d = {} d[42] = d.viewvalues() + r = repr(d) + # Cannot perform a stronger test, as the contents of the repr + # are implementation-dependent. All we can say is that we + # want a str result, not an exception of any sort. + self.assertIsInstance(r, str) + d[42] = d.viewitems() + r = repr(d) + # Again. + self.assertIsInstance(r, str) + + def test_deeply_nested_repr(self): + d = {} + for i in range(sys.getrecursionlimit() + 100): + d = {42: d.viewvalues()} self.assertRaises(RuntimeError, repr, d) def test_abc_registry(self): From pypy.commits at gmail.com Wed Aug 21 12:05:57 2019 From: pypy.commits at gmail.com (rlamy) Date: Wed, 21 Aug 2019 09:05:57 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Fix handling of 1st argument to hashlib.blake2{b, s}() to match CPython 3.6.9 (bpo-33729) Message-ID: <5d5d6be5.1c69fb81.2ede3.3b70@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97232:50a7b57689ea Date: 2019-08-21 16:51 +0100 http://bitbucket.org/pypy/pypy/changeset/50a7b57689ea/ Log: Fix handling of 1st argument to hashlib.blake2{b,s}() to match CPython 3.6.9 (bpo-33729) diff --git a/lib_pypy/_blake2/__init__.py b/lib_pypy/_blake2/__init__.py --- a/lib_pypy/_blake2/__init__.py +++ b/lib_pypy/_blake2/__init__.py @@ -10,7 +10,7 @@ MAX_KEY_SIZE = _lib.BLAKE_KEYBYTES MAX_DIGEST_SIZE = _lib.BLAKE_OUTBYTES - def __new__(cls, string=None, *, digest_size=MAX_DIGEST_SIZE, + def __new__(cls, _string=None, *, digest_size=MAX_DIGEST_SIZE, key=None, salt=None, person=None, fanout=1, depth=1, leaf_size=None, node_offset=None, node_depth=0, inner_size=0, last_node=False): @@ -101,8 +101,8 @@ _lib.blake_update(self._state, block, len(block)) # secure_zero_memory(block, sizeof(block) - if string: - self.update(string) + if _string is not None: + self.update(_string) return self @property From pypy.commits at gmail.com Wed Aug 21 12:22:26 2019 From: pypy.commits at gmail.com (rlamy) Date: Wed, 21 Aug 2019 09:22:26 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Prevent overflow in digest() (bpo-34922) Message-ID: <5d5d6fc2.1c69fb81.821ae.1418@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97233:4239b16d9453 Date: 2019-08-21 17:21 +0100 http://bitbucket.org/pypy/pypy/changeset/4239b16d9453/ Log: Prevent overflow in digest() (bpo-34922) diff --git a/lib_pypy/_sha3/__init__.py b/lib_pypy/_sha3/__init__.py --- a/lib_pypy/_sha3/__init__.py +++ b/lib_pypy/_sha3/__init__.py @@ -64,6 +64,8 @@ class _shake(_sha3): def digest(self, length): + if length >= (1 << 29): + raise ValueError("length is too large") # ExtractLane needs at least SHA3_MAX_DIGESTSIZE + SHA3_LANESIZE and # SHA_LANESIZE extra space. digest = _ffi.new("char[]", length + SHA3_LANESIZE) @@ -79,7 +81,7 @@ def hexdigest(self, length): return codecs.encode(self.digest(length), 'hex').decode() - + class sha3_224(_sha3): name = "sha3_224" From pypy.commits at gmail.com Wed Aug 21 13:06:59 2019 From: pypy.commits at gmail.com (rlamy) Date: Wed, 21 Aug 2019 10:06:59 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix error message Message-ID: <5d5d7a33.1c69fb81.759b9.515c@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97234:63e7c409be9a Date: 2019-08-21 18:00 +0100 http://bitbucket.org/pypy/pypy/changeset/63e7c409be9a/ Log: fix error message diff --git a/pypy/objspace/std/intobject.py b/pypy/objspace/std/intobject.py --- a/pypy/objspace/std/intobject.py +++ b/pypy/objspace/std/intobject.py @@ -964,7 +964,7 @@ s = unicode_to_decimal_w(space, w_value) except Exception: raise oefmt(space.w_ValueError, - 'invalid literal for int() with base %d: %S', + 'invalid literal for int() with base %d: %R', base, w_value) elif (space.isinstance_w(w_value, space.w_bytes) or space.isinstance_w(w_value, space.w_bytearray)): diff --git a/pypy/objspace/std/test/test_intobject.py b/pypy/objspace/std/test/test_intobject.py --- a/pypy/objspace/std/test/test_intobject.py +++ b/pypy/objspace/std/test/test_intobject.py @@ -589,6 +589,8 @@ value = u'123\ud800' e = raises(ValueError, int, value) assert str(e.value) == u"invalid literal for int() with base 10: %r" % value + e = raises(ValueError, int, value, 10) + assert str(e.value) == u"invalid literal for int() with base 10: %r" % value def test_non_numeric_input_types(self): # Test possible non-numeric types for the argument x, including From pypy.commits at gmail.com Wed Aug 21 13:37:20 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 21 Aug 2019 10:37:20 -0700 (PDT) Subject: [pypy-commit] pypy default: clean out lib-python/3* Message-ID: <5d5d8150.1c69fb81.a452d.3d8f@mx.google.com> Author: Matti Picus Branch: Changeset: r97235:b75bc7eac637 Date: 2019-08-21 20:11 +0300 http://bitbucket.org/pypy/pypy/changeset/b75bc7eac637/ Log: clean out lib-python/3* diff too long, truncating to 2000 out of 5659 lines diff --git a/lib-python/3.2/test/test_tools.py b/lib-python/3.2/test/test_tools.py deleted file mode 100644 --- a/lib-python/3.2/test/test_tools.py +++ /dev/null @@ -1,433 +0,0 @@ -"""Tests for scripts in the Tools directory. - -This file contains regression tests for some of the scripts found in the -Tools directory of a Python checkout or tarball, such as reindent.py. -""" - -import os -import sys -import imp -import unittest -import shutil -import subprocess -import sysconfig -import tempfile -import textwrap -from test import support -from test.script_helper import assert_python_ok, temp_dir - -if not sysconfig.is_python_build(): - # XXX some installers do contain the tools, should we detect that - # and run the tests in that case too? - raise unittest.SkipTest('test irrelevant for an installed Python') - -basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), - 'Tools') -scriptsdir = os.path.join(basepath, 'scripts') - - -class ReindentTests(unittest.TestCase): - script = os.path.join(scriptsdir, 'reindent.py') - - def test_noargs(self): - assert_python_ok(self.script) - - def test_help(self): - rc, out, err = assert_python_ok(self.script, '-h') - self.assertEqual(out, b'') - self.assertGreater(err, b'') - - -class PindentTests(unittest.TestCase): - script = os.path.join(scriptsdir, 'pindent.py') - - def assertFileEqual(self, fn1, fn2): - with open(fn1) as f1, open(fn2) as f2: - self.assertEqual(f1.readlines(), f2.readlines()) - - def pindent(self, source, *args): - with subprocess.Popen( - (sys.executable, self.script) + args, - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - universal_newlines=True) as proc: - out, err = proc.communicate(source) - self.assertIsNone(err) - return out - - def lstriplines(self, data): - return '\n'.join(line.lstrip() for line in data.splitlines()) + '\n' - - def test_selftest(self): - self.maxDiff = None - with temp_dir() as directory: - data_path = os.path.join(directory, '_test.py') - with open(self.script) as f: - closed = f.read() - with open(data_path, 'w') as f: - f.write(closed) - - rc, out, err = assert_python_ok(self.script, '-d', data_path) - self.assertEqual(out, b'') - self.assertEqual(err, b'') - backup = data_path + '~' - self.assertTrue(os.path.exists(backup)) - with open(backup) as f: - self.assertEqual(f.read(), closed) - with open(data_path) as f: - clean = f.read() - compile(clean, '_test.py', 'exec') - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - - rc, out, err = assert_python_ok(self.script, '-c', data_path) - self.assertEqual(out, b'') - self.assertEqual(err, b'') - with open(backup) as f: - self.assertEqual(f.read(), clean) - with open(data_path) as f: - self.assertEqual(f.read(), closed) - - broken = self.lstriplines(closed) - with open(data_path, 'w') as f: - f.write(broken) - rc, out, err = assert_python_ok(self.script, '-r', data_path) - self.assertEqual(out, b'') - self.assertEqual(err, b'') - with open(backup) as f: - self.assertEqual(f.read(), broken) - with open(data_path) as f: - indented = f.read() - compile(indented, '_test.py', 'exec') - self.assertEqual(self.pindent(broken, '-r'), indented) - - def pindent_test(self, clean, closed): - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - broken = self.lstriplines(closed) - self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '4'), closed) - - def test_statements(self): - clean = textwrap.dedent("""\ - if a: - pass - - if a: - pass - else: - pass - - if a: - pass - elif: - pass - else: - pass - - while a: - break - - while a: - break - else: - pass - - for i in a: - break - - for i in a: - break - else: - pass - - try: - pass - finally: - pass - - try: - pass - except TypeError: - pass - except ValueError: - pass - else: - pass - - try: - pass - except TypeError: - pass - except ValueError: - pass - finally: - pass - - with a: - pass - - class A: - pass - - def f(): - pass - """) - - closed = textwrap.dedent("""\ - if a: - pass - # end if - - if a: - pass - else: - pass - # end if - - if a: - pass - elif: - pass - else: - pass - # end if - - while a: - break - # end while - - while a: - break - else: - pass - # end while - - for i in a: - break - # end for - - for i in a: - break - else: - pass - # end for - - try: - pass - finally: - pass - # end try - - try: - pass - except TypeError: - pass - except ValueError: - pass - else: - pass - # end try - - try: - pass - except TypeError: - pass - except ValueError: - pass - finally: - pass - # end try - - with a: - pass - # end with - - class A: - pass - # end class A - - def f(): - pass - # end def f - """) - self.pindent_test(clean, closed) - - def test_multilevel(self): - clean = textwrap.dedent("""\ - def foobar(a, b): - if a == b: - a = a+1 - elif a < b: - b = b-1 - if b > a: a = a-1 - else: - print 'oops!' - """) - closed = textwrap.dedent("""\ - def foobar(a, b): - if a == b: - a = a+1 - elif a < b: - b = b-1 - if b > a: a = a-1 - # end if - else: - print 'oops!' - # end if - # end def foobar - """) - self.pindent_test(clean, closed) - - def test_preserve_indents(self): - clean = textwrap.dedent("""\ - if a: - if b: - pass - """) - closed = textwrap.dedent("""\ - if a: - if b: - pass - # end if - # end if - """) - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - broken = self.lstriplines(closed) - self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '9'), closed) - clean = textwrap.dedent("""\ - if a: - \tif b: - \t\tpass - """) - closed = textwrap.dedent("""\ - if a: - \tif b: - \t\tpass - \t# end if - # end if - """) - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - broken = self.lstriplines(closed) - self.assertEqual(self.pindent(broken, '-r'), closed) - - def test_escaped_newline(self): - clean = textwrap.dedent("""\ - class\\ - \\ - A: - def\ - \\ - f: - pass - """) - closed = textwrap.dedent("""\ - class\\ - \\ - A: - def\ - \\ - f: - pass - # end def f - # end class A - """) - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - - def test_empty_line(self): - clean = textwrap.dedent("""\ - if a: - - pass - """) - closed = textwrap.dedent("""\ - if a: - - pass - # end if - """) - self.pindent_test(clean, closed) - - def test_oneline(self): - clean = textwrap.dedent("""\ - if a: pass - """) - closed = textwrap.dedent("""\ - if a: pass - # end if - """) - self.pindent_test(clean, closed) - - -class TestSundryScripts(unittest.TestCase): - # At least make sure the rest don't have syntax errors. When tests are - # added for a script it should be added to the whitelist below. - - # scripts that have independent tests. - whitelist = ['reindent.py'] - # scripts that can't be imported without running - blacklist = ['make_ctype.py'] - # scripts that use windows-only modules - windows_only = ['win_add2path.py'] - # blacklisted for other reasons - other = ['analyze_dxp.py'] - - skiplist = blacklist + whitelist + windows_only + other - - def setUp(self): - cm = support.DirsOnSysPath(scriptsdir) - cm.__enter__() - self.addCleanup(cm.__exit__) - - def test_sundry(self): - for fn in os.listdir(scriptsdir): - if fn.endswith('.py') and fn not in self.skiplist: - __import__(fn[:-3]) - - @unittest.skipIf(sys.platform != "win32", "Windows-only test") - def test_sundry_windows(self): - for fn in self.windows_only: - __import__(fn[:-3]) - - @unittest.skipIf(not support.threading, "test requires _thread module") - def test_analyze_dxp_import(self): - if hasattr(sys, 'getdxp'): - import analyze_dxp - else: - with self.assertRaises(RuntimeError): - import analyze_dxp - - -class PdepsTests(unittest.TestCase): - - @classmethod - def setUpClass(self): - path = os.path.join(scriptsdir, 'pdeps.py') - self.pdeps = imp.load_source('pdeps', path) - - @classmethod - def tearDownClass(self): - if 'pdeps' in sys.modules: - del sys.modules['pdeps'] - - def test_process_errors(self): - # Issue #14492: m_import.match(line) can be None. - with tempfile.TemporaryDirectory() as tmpdir: - fn = os.path.join(tmpdir, 'foo') - with open(fn, 'w') as stream: - stream.write("#!/this/will/fail") - self.pdeps.process(fn, {}) - - def test_inverse_attribute_error(self): - # Issue #14492: this used to fail with an AttributeError. - self.pdeps.inverse({'a': []}) - - -def test_main(): - support.run_unittest(*[obj for obj in globals().values() - if isinstance(obj, type)]) - - -if __name__ == '__main__': - unittest.main() diff --git a/lib-python/3/_osx_support.py b/lib-python/3/_osx_support.py deleted file mode 100644 --- a/lib-python/3/_osx_support.py +++ /dev/null @@ -1,488 +0,0 @@ -"""Shared OS X support functions.""" - -import os -import re -import sys - -__all__ = [ - 'compiler_fixup', - 'customize_config_vars', - 'customize_compiler', - 'get_platform_osx', -] - -# configuration variables that may contain universal build flags, -# like "-arch" or "-isdkroot", that may need customization for -# the user environment -_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', - 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', - 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', - 'PY_CORE_CFLAGS') - -# configuration variables that may contain compiler calls -_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') - -# prefix added to original configuration variable names -_INITPRE = '_OSX_SUPPORT_INITIAL_' - - -def _find_executable(executable, path=None): - """Tries to find 'executable' in the directories listed in 'path'. - - A string listing directories separated by 'os.pathsep'; defaults to - os.environ['PATH']. Returns the complete filename or None if not found. - """ - if path is None: - path = os.environ['PATH'] - - paths = path.split(os.pathsep) - base, ext = os.path.splitext(executable) - - if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'): - executable = executable + '.exe' - - if not os.path.isfile(executable): - for p in paths: - f = os.path.join(p, executable) - if os.path.isfile(f): - # the file exists, we have a shot at spawn working - return f - return None - else: - return executable - - -def _read_output(commandstring): - """Output from succesful command execution or None""" - # Similar to os.popen(commandstring, "r").read(), - # but without actually using os.popen because that - # function is not usable during python bootstrap. - # tempfile is also not available then. - import contextlib - try: - import tempfile - fp = tempfile.NamedTemporaryFile() - except ImportError: - fp = open("/tmp/_osx_support.%s"%( - os.getpid(),), "w+b") - - with contextlib.closing(fp) as fp: - cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name) - return fp.read().decode('utf-8').strip() if not os.system(cmd) else None - - -def _find_build_tool(toolname): - """Find a build tool on current path or using xcrun""" - return (_find_executable(toolname) - or _read_output("/usr/bin/xcrun -find %s" % (toolname,)) - or '' - ) - -_SYSTEM_VERSION = None - -def _get_system_version(): - """Return the OS X system version as a string""" - # Reading this plist is a documented way to get the system - # version (see the documentation for the Gestalt Manager) - # We avoid using platform.mac_ver to avoid possible bootstrap issues during - # the build of Python itself (distutils is used to build standard library - # extensions). - - global _SYSTEM_VERSION - - if _SYSTEM_VERSION is None: - _SYSTEM_VERSION = '' - try: - f = open('/System/Library/CoreServices/SystemVersion.plist') - except IOError: - # We're on a plain darwin box, fall back to the default - # behaviour. - pass - else: - try: - m = re.search(r'ProductUserVisibleVersion\s*' - r'(.*?)', f.read()) - finally: - f.close() - if m is not None: - _SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2]) - # else: fall back to the default behaviour - - return _SYSTEM_VERSION - -def _remove_original_values(_config_vars): - """Remove original unmodified values for testing""" - # This is needed for higher-level cross-platform tests of get_platform. - for k in list(_config_vars): - if k.startswith(_INITPRE): - del _config_vars[k] - -def _save_modified_value(_config_vars, cv, newvalue): - """Save modified and original unmodified value of configuration var""" - - oldvalue = _config_vars.get(cv, '') - if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars): - _config_vars[_INITPRE + cv] = oldvalue - _config_vars[cv] = newvalue - -def _supports_universal_builds(): - """Returns True if universal builds are supported on this system""" - # As an approximation, we assume that if we are running on 10.4 or above, - # then we are running with an Xcode environment that supports universal - # builds, in particular -isysroot and -arch arguments to the compiler. This - # is in support of allowing 10.4 universal builds to run on 10.3.x systems. - - osx_version = _get_system_version() - if osx_version: - try: - osx_version = tuple(int(i) for i in osx_version.split('.')) - except ValueError: - osx_version = '' - return bool(osx_version >= (10, 4)) if osx_version else False - - -def _find_appropriate_compiler(_config_vars): - """Find appropriate C compiler for extension module builds""" - - # Issue #13590: - # The OSX location for the compiler varies between OSX - # (or rather Xcode) releases. With older releases (up-to 10.5) - # the compiler is in /usr/bin, with newer releases the compiler - # can only be found inside Xcode.app if the "Command Line Tools" - # are not installed. - # - # Futhermore, the compiler that can be used varies between - # Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2' - # as the compiler, after that 'clang' should be used because - # gcc-4.2 is either not present, or a copy of 'llvm-gcc' that - # miscompiles Python. - - # skip checks if the compiler was overriden with a CC env variable - if 'CC' in os.environ: - return _config_vars - - # The CC config var might contain additional arguments. - # Ignore them while searching. - cc = oldcc = _config_vars['CC'].split()[0] - if not _find_executable(cc): - # Compiler is not found on the shell search PATH. - # Now search for clang, first on PATH (if the Command LIne - # Tools have been installed in / or if the user has provided - # another location via CC). If not found, try using xcrun - # to find an uninstalled clang (within a selected Xcode). - - # NOTE: Cannot use subprocess here because of bootstrap - # issues when building Python itself (and os.popen is - # implemented on top of subprocess and is therefore not - # usable as well) - - cc = _find_build_tool('clang') - - elif os.path.basename(cc).startswith('gcc'): - # Compiler is GCC, check if it is LLVM-GCC - data = _read_output("'%s' --version" - % (cc.replace("'", "'\"'\"'"),)) - if 'llvm-gcc' in data: - # Found LLVM-GCC, fall back to clang - cc = _find_build_tool('clang') - - if not cc: - raise SystemError( - "Cannot locate working compiler") - - if cc != oldcc: - # Found a replacement compiler. - # Modify config vars using new compiler, if not already explictly - # overriden by an env variable, preserving additional arguments. - for cv in _COMPILER_CONFIG_VARS: - if cv in _config_vars and cv not in os.environ: - cv_split = _config_vars[cv].split() - cv_split[0] = cc if cv != 'CXX' else cc + '++' - _save_modified_value(_config_vars, cv, ' '.join(cv_split)) - - return _config_vars - - -def _remove_universal_flags(_config_vars): - """Remove all universal build arguments from config vars""" - - for cv in _UNIVERSAL_CONFIG_VARS: - # Do not alter a config var explicitly overriden by env var - if cv in _config_vars and cv not in os.environ: - flags = _config_vars[cv] - flags = re.sub('-arch\s+\w+\s', ' ', flags, re.ASCII) - flags = re.sub('-isysroot [^ \t]*', ' ', flags) - _save_modified_value(_config_vars, cv, flags) - - return _config_vars - - -def _remove_unsupported_archs(_config_vars): - """Remove any unsupported archs from config vars""" - # Different Xcode releases support different sets for '-arch' - # flags. In particular, Xcode 4.x no longer supports the - # PPC architectures. - # - # This code automatically removes '-arch ppc' and '-arch ppc64' - # when these are not supported. That makes it possible to - # build extensions on OSX 10.7 and later with the prebuilt - # 32-bit installer on the python.org website. - - # skip checks if the compiler was overriden with a CC env variable - if 'CC' in os.environ: - return _config_vars - - if re.search('-arch\s+ppc', _config_vars['CFLAGS']) is not None: - # NOTE: Cannot use subprocess here because of bootstrap - # issues when building Python itself - status = os.system("'%s' -arch ppc -x c /dev/null 2>/dev/null"%( - _config_vars['CC'].replace("'", "'\"'\"'"),)) - # The Apple compiler drivers return status 255 if no PPC - if (status >> 8) == 255: - # Compiler doesn't support PPC, remove the related - # '-arch' flags if not explicitly overridden by an - # environment variable - for cv in _UNIVERSAL_CONFIG_VARS: - if cv in _config_vars and cv not in os.environ: - flags = _config_vars[cv] - flags = re.sub('-arch\s+ppc\w*\s', ' ', flags) - _save_modified_value(_config_vars, cv, flags) - - return _config_vars - - -def _override_all_archs(_config_vars): - """Allow override of all archs with ARCHFLAGS env var""" - # NOTE: This name was introduced by Apple in OSX 10.5 and - # is used by several scripting languages distributed with - # that OS release. - if 'ARCHFLAGS' in os.environ: - arch = os.environ['ARCHFLAGS'] - for cv in _UNIVERSAL_CONFIG_VARS: - if cv in _config_vars and '-arch' in _config_vars[cv]: - flags = _config_vars[cv] - flags = re.sub('-arch\s+\w+\s', ' ', flags) - flags = flags + ' ' + arch - _save_modified_value(_config_vars, cv, flags) - - return _config_vars - - -def _check_for_unavailable_sdk(_config_vars): - """Remove references to any SDKs not available""" - # If we're on OSX 10.5 or later and the user tries to - # compile an extension using an SDK that is not present - # on the current machine it is better to not use an SDK - # than to fail. This is particularly important with - # the standalong Command Line Tools alternative to a - # full-blown Xcode install since the CLT packages do not - # provide SDKs. If the SDK is not present, it is assumed - # that the header files and dev libs have been installed - # to /usr and /System/Library by either a standalone CLT - # package or the CLT component within Xcode. - cflags = _config_vars.get('CFLAGS', '') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is not None: - sdk = m.group(1) - if not os.path.exists(sdk): - for cv in _UNIVERSAL_CONFIG_VARS: - # Do not alter a config var explicitly overriden by env var - if cv in _config_vars and cv not in os.environ: - flags = _config_vars[cv] - flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags) - _save_modified_value(_config_vars, cv, flags) - - return _config_vars - - -def compiler_fixup(compiler_so, cc_args): - """ - This function will strip '-isysroot PATH' and '-arch ARCH' from the - compile flags if the user has specified one them in extra_compile_flags. - - This is needed because '-arch ARCH' adds another architecture to the - build, without a way to remove an architecture. Furthermore GCC will - barf if multiple '-isysroot' arguments are present. - """ - stripArch = stripSysroot = False - - compiler_so = list(compiler_so) - - if not _supports_universal_builds(): - # OSX before 10.4.0, these don't support -arch and -isysroot at - # all. - stripArch = stripSysroot = True - else: - stripArch = '-arch' in cc_args - stripSysroot = '-isysroot' in cc_args - - if stripArch or 'ARCHFLAGS' in os.environ: - while True: - try: - index = compiler_so.index('-arch') - # Strip this argument and the next one: - del compiler_so[index:index+2] - except ValueError: - break - - if 'ARCHFLAGS' in os.environ and not stripArch: - # User specified different -arch flags in the environ, - # see also distutils.sysconfig - compiler_so = compiler_so + os.environ['ARCHFLAGS'].split() - - if stripSysroot: - while True: - try: - index = compiler_so.index('-isysroot') - # Strip this argument and the next one: - del compiler_so[index:index+2] - except ValueError: - break - - # Check if the SDK that is used during compilation actually exists, - # the universal build requires the usage of a universal SDK and not all - # users have that installed by default. - sysroot = None - if '-isysroot' in cc_args: - idx = cc_args.index('-isysroot') - sysroot = cc_args[idx+1] - elif '-isysroot' in compiler_so: - idx = compiler_so.index('-isysroot') - sysroot = compiler_so[idx+1] - - if sysroot and not os.path.isdir(sysroot): - from distutils import log - log.warn("Compiling with an SDK that doesn't seem to exist: %s", - sysroot) - log.warn("Please check your Xcode installation") - - return compiler_so - - -def customize_config_vars(_config_vars): - """Customize Python build configuration variables. - - Called internally from sysconfig with a mutable mapping - containing name/value pairs parsed from the configured - makefile used to build this interpreter. Returns - the mapping updated as needed to reflect the environment - in which the interpreter is running; in the case of - a Python from a binary installer, the installed - environment may be very different from the build - environment, i.e. different OS levels, different - built tools, different available CPU architectures. - - This customization is performed whenever - distutils.sysconfig.get_config_vars() is first - called. It may be used in environments where no - compilers are present, i.e. when installing pure - Python dists. Customization of compiler paths - and detection of unavailable archs is deferred - until the first extention module build is - requested (in distutils.sysconfig.customize_compiler). - - Currently called from distutils.sysconfig - """ - - if not _supports_universal_builds(): - # On Mac OS X before 10.4, check if -arch and -isysroot - # are in CFLAGS or LDFLAGS and remove them if they are. - # This is needed when building extensions on a 10.3 system - # using a universal build of python. - _remove_universal_flags(_config_vars) - - # Allow user to override all archs with ARCHFLAGS env var - _override_all_archs(_config_vars) - - # Remove references to sdks that are not found - _check_for_unavailable_sdk(_config_vars) - - return _config_vars - - -def customize_compiler(_config_vars): - """Customize compiler path and configuration variables. - - This customization is performed when the first - extension module build is requested - in distutils.sysconfig.customize_compiler). - """ - - # Find a compiler to use for extension module builds - _find_appropriate_compiler(_config_vars) - - # Remove ppc arch flags if not supported here - _remove_unsupported_archs(_config_vars) - - # Allow user to override all archs with ARCHFLAGS env var - _override_all_archs(_config_vars) - - return _config_vars - - -def get_platform_osx(_config_vars, osname, release, machine): - """Filter values for get_platform()""" - # called from get_platform() in sysconfig and distutils.util - # - # For our purposes, we'll assume that the system version from - # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set - # to. This makes the compatibility story a bit more sane because the - # machine is going to compile and link as if it were - # MACOSX_DEPLOYMENT_TARGET. - - macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '') - macrelease = _get_system_version() or macver - macver = macver or macrelease - - if macver: - release = macver - osname = "macosx" - - # Use the original CFLAGS value, if available, so that we - # return the same machine type for the platform string. - # Otherwise, distutils may consider this a cross-compiling - # case and disallow installs. - cflags = _config_vars.get(_INITPRE+'CFLAGS', - _config_vars.get('CFLAGS', '')) - if ((macrelease + '.') >= '10.4.' and - '-arch' in cflags.strip()): - # The universal build will build fat binaries, but not on - # systems before 10.4 - - machine = 'fat' - - archs = re.findall('-arch\s+(\S+)', cflags) - archs = tuple(sorted(set(archs))) - - if len(archs) == 1: - machine = archs[0] - elif archs == ('i386', 'ppc'): - machine = 'fat' - elif archs == ('i386', 'x86_64'): - machine = 'intel' - elif archs == ('i386', 'ppc', 'x86_64'): - machine = 'fat3' - elif archs == ('ppc64', 'x86_64'): - machine = 'fat64' - elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): - machine = 'universal' - else: - raise ValueError( - "Don't know machine value for archs=%r" % (archs,)) - - elif machine == 'i386': - # On OSX the machine type returned by uname is always the - # 32-bit variant, even if the executable architecture is - # the 64-bit variant - if sys.maxsize >= 2**32: - machine = 'x86_64' - - elif machine in ('PowerPC', 'Power_Macintosh'): - # Pick a sane name for the PPC architecture. - # See 'i386' case - if sys.maxsize >= 2**32: - machine = 'ppc64' - else: - machine = 'ppc' - - return (osname, release, machine) diff --git a/lib-python/3/test/crashers/trace_at_recursion_limit.py b/lib-python/3/test/crashers/trace_at_recursion_limit.py deleted file mode 100644 --- a/lib-python/3/test/crashers/trace_at_recursion_limit.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -From http://bugs.python.org/issue6717 - -A misbehaving trace hook can trigger a segfault by exceeding the recursion -limit. -""" -import sys - - -def x(): - pass - -def g(*args): - if True: # change to True to crash interpreter - try: - x() - except: - pass - return g - -def f(): - print(sys.getrecursionlimit()) - f() - -sys.settrace(g) - -f() diff --git a/lib-python/3/test/json_tests/test_tool.py b/lib-python/3/test/json_tests/test_tool.py deleted file mode 100644 --- a/lib-python/3/test/json_tests/test_tool.py +++ /dev/null @@ -1,69 +0,0 @@ -import os -import sys -import textwrap -import unittest -import subprocess -from test import support -from test.script_helper import assert_python_ok - -class TestTool(unittest.TestCase): - data = """ - - [["blorpie"],[ "whoops" ] , [ - ],\t"d-shtaeou",\r"d-nthiouh", - "i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field" - :"yes"} ] - """ - - expect = textwrap.dedent("""\ - [ - [ - "blorpie" - ], - [ - "whoops" - ], - [], - "d-shtaeou", - "d-nthiouh", - "i-vhbjkhnth", - { - "nifty": 87 - }, - { - "field": "yes", - "morefield": false - } - ] - """) - - def test_stdin_stdout(self): - with subprocess.Popen( - (sys.executable, '-m', 'json.tool'), - stdin=subprocess.PIPE, stdout=subprocess.PIPE) as proc: - out, err = proc.communicate(self.data.encode()) - self.assertEqual(out.splitlines(), self.expect.encode().splitlines()) - self.assertEqual(err, None) - - def _create_infile(self): - infile = support.TESTFN - with open(infile, "w") as fp: - self.addCleanup(os.remove, infile) - fp.write(self.data) - return infile - - def test_infile_stdout(self): - infile = self._create_infile() - rc, out, err = assert_python_ok('-m', 'json.tool', infile) - self.assertEqual(out.splitlines(), self.expect.encode().splitlines()) - self.assertEqual(err, b'') - - def test_infile_outfile(self): - infile = self._create_infile() - outfile = support.TESTFN + '.out' - rc, out, err = assert_python_ok('-m', 'json.tool', infile, outfile) - self.addCleanup(os.remove, outfile) - with open(outfile, "r") as fp: - self.assertEqual(fp.read(), self.expect) - self.assertEqual(out, b'') - self.assertEqual(err, b'') diff --git a/lib-python/3/test/mp_fork_bomb.py b/lib-python/3/test/mp_fork_bomb.py deleted file mode 100644 --- a/lib-python/3/test/mp_fork_bomb.py +++ /dev/null @@ -1,13 +0,0 @@ -import multiprocessing, sys - -def foo(): - print("123") - -# Because "if __name__ == '__main__'" is missing this will not work -# correctly on Windows. However, we should get a RuntimeError rather -# than the Windows equivalent of a fork bomb. - -p = multiprocessing.Process(target=foo) -p.start() -p.join() -sys.exit(p.exitcode) diff --git a/lib-python/3/test/sample_doctest_no_docstrings.py b/lib-python/3/test/sample_doctest_no_docstrings.py deleted file mode 100644 --- a/lib-python/3/test/sample_doctest_no_docstrings.py +++ /dev/null @@ -1,12 +0,0 @@ -# This is a sample module used for testing doctest. -# -# This module is for testing how doctest handles a module with no -# docstrings. - - -class Foo(object): - - # A class with no docstring. - - def __init__(self): - pass diff --git a/lib-python/3/test/sample_doctest_no_doctests.py b/lib-python/3/test/sample_doctest_no_doctests.py deleted file mode 100644 --- a/lib-python/3/test/sample_doctest_no_doctests.py +++ /dev/null @@ -1,15 +0,0 @@ -"""This is a sample module used for testing doctest. - -This module is for testing how doctest handles a module with docstrings -but no doctest examples. - -""" - - -class Foo(object): - """A docstring with no doctest examples. - - """ - - def __init__(self): - pass diff --git a/lib-python/3/test/test__osx_support.py b/lib-python/3/test/test__osx_support.py deleted file mode 100644 --- a/lib-python/3/test/test__osx_support.py +++ /dev/null @@ -1,279 +0,0 @@ -""" -Test suite for _osx_support: shared OS X support functions. -""" - -import os -import platform -import shutil -import stat -import sys -import unittest - -import test.support - -import _osx_support - - at unittest.skipUnless(sys.platform.startswith("darwin"), "requires OS X") -class Test_OSXSupport(unittest.TestCase): - - def setUp(self): - self.maxDiff = None - self.prog_name = 'bogus_program_xxxx' - self.temp_path_dir = os.path.abspath(os.getcwd()) - self.env = test.support.EnvironmentVarGuard() - self.addCleanup(self.env.__exit__) - for cv in ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', - 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC', - 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', - 'PY_CORE_CFLAGS'): - if cv in self.env: - self.env.unset(cv) - - def add_expected_saved_initial_values(self, config_vars, expected_vars): - # Ensure that the initial values for all modified config vars - # are also saved with modified keys. - expected_vars.update(('_OSX_SUPPORT_INITIAL_'+ k, - config_vars[k]) for k in config_vars - if config_vars[k] != expected_vars[k]) - - def test__find_executable(self): - if self.env['PATH']: - self.env['PATH'] = self.env['PATH'] + ':' - self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir) - test.support.unlink(self.prog_name) - self.assertIsNone(_osx_support._find_executable(self.prog_name)) - self.addCleanup(test.support.unlink, self.prog_name) - with open(self.prog_name, 'w') as f: - f.write("#!/bin/sh\n/bin/echo OK\n") - os.chmod(self.prog_name, stat.S_IRWXU) - self.assertEqual(self.prog_name, - _osx_support._find_executable(self.prog_name)) - - def test__read_output(self): - if self.env['PATH']: - self.env['PATH'] = self.env['PATH'] + ':' - self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir) - test.support.unlink(self.prog_name) - self.addCleanup(test.support.unlink, self.prog_name) - with open(self.prog_name, 'w') as f: - f.write("#!/bin/sh\n/bin/echo ExpectedOutput\n") - os.chmod(self.prog_name, stat.S_IRWXU) - self.assertEqual('ExpectedOutput', - _osx_support._read_output(self.prog_name)) - - def test__find_build_tool(self): - out = _osx_support._find_build_tool('cc') - self.assertTrue(os.path.isfile(out), - 'cc not found - check xcode-select') - - def test__get_system_version(self): - self.assertTrue(platform.mac_ver()[0].startswith( - _osx_support._get_system_version())) - - def test__remove_original_values(self): - config_vars = { - 'CC': 'gcc-test -pthreads', - } - expected_vars = { - 'CC': 'clang -pthreads', - } - cv = 'CC' - newvalue = 'clang -pthreads' - _osx_support._save_modified_value(config_vars, cv, newvalue) - self.assertNotEqual(expected_vars, config_vars) - _osx_support._remove_original_values(config_vars) - self.assertEqual(expected_vars, config_vars) - - def test__save_modified_value(self): - config_vars = { - 'CC': 'gcc-test -pthreads', - } - expected_vars = { - 'CC': 'clang -pthreads', - } - self.add_expected_saved_initial_values(config_vars, expected_vars) - cv = 'CC' - newvalue = 'clang -pthreads' - _osx_support._save_modified_value(config_vars, cv, newvalue) - self.assertEqual(expected_vars, config_vars) - - def test__save_modified_value_unchanged(self): - config_vars = { - 'CC': 'gcc-test -pthreads', - } - expected_vars = config_vars.copy() - cv = 'CC' - newvalue = 'gcc-test -pthreads' - _osx_support._save_modified_value(config_vars, cv, newvalue) - self.assertEqual(expected_vars, config_vars) - - def test__supports_universal_builds(self): - import platform - self.assertEqual(platform.mac_ver()[0].split('.') >= ['10', '4'], - _osx_support._supports_universal_builds()) - - def test__find_appropriate_compiler(self): - compilers = ( - ('gcc-test', 'i686-apple-darwin11-llvm-gcc-4.2'), - ('clang', 'clang version 3.1'), - ) - config_vars = { - 'CC': 'gcc-test -pthreads', - 'CXX': 'cc++-test', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', - 'LDFLAGS': '-arch ppc -arch i386 -g', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', - 'BLDSHARED': 'gcc-test -bundle -arch ppc -arch i386 -g', - 'LDSHARED': 'gcc-test -bundle -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', - } - expected_vars = { - 'CC': 'clang -pthreads', - 'CXX': 'clang++', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', - 'LDFLAGS': '-arch ppc -arch i386 -g', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', - 'BLDSHARED': 'clang -bundle -arch ppc -arch i386 -g', - 'LDSHARED': 'clang -bundle -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', - } - self.add_expected_saved_initial_values(config_vars, expected_vars) - - suffix = (':' + self.env['PATH']) if self.env['PATH'] else '' - self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix - for c_name, c_output in compilers: - test.support.unlink(c_name) - self.addCleanup(test.support.unlink, c_name) - with open(c_name, 'w') as f: - f.write("#!/bin/sh\n/bin/echo " + c_output) - os.chmod(c_name, stat.S_IRWXU) - self.assertEqual(expected_vars, - _osx_support._find_appropriate_compiler( - config_vars)) - - def test__remove_universal_flags(self): - config_vars = { - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', - 'LDFLAGS': '-arch ppc -arch i386 -g', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', - 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', - 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', - } - expected_vars = { - 'CFLAGS': '-fno-strict-aliasing -g -O3 ', - 'LDFLAGS': ' -g', - 'CPPFLAGS': '-I. ', - 'BLDSHARED': 'gcc-4.0 -bundle -g', - 'LDSHARED': 'gcc-4.0 -bundle -g', - } - self.add_expected_saved_initial_values(config_vars, expected_vars) - - self.assertEqual(expected_vars, - _osx_support._remove_universal_flags( - config_vars)) - - def test__remove_unsupported_archs(self): - config_vars = { - 'CC': 'clang', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', - 'LDFLAGS': '-arch ppc -arch i386 -g', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', - 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', - 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', - } - expected_vars = { - 'CC': 'clang', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch i386 ', - 'LDFLAGS': ' -arch i386 -g', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', - 'BLDSHARED': 'gcc-4.0 -bundle -arch i386 -g', - 'LDSHARED': 'gcc-4.0 -bundle -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', - } - self.add_expected_saved_initial_values(config_vars, expected_vars) - - suffix = (':' + self.env['PATH']) if self.env['PATH'] else '' - self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix - c_name = 'clang' - test.support.unlink(c_name) - self.addCleanup(test.support.unlink, c_name) - # exit status 255 means no PPC support in this compiler chain - with open(c_name, 'w') as f: - f.write("#!/bin/sh\nexit 255") - os.chmod(c_name, stat.S_IRWXU) - self.assertEqual(expected_vars, - _osx_support._remove_unsupported_archs( - config_vars)) - - def test__override_all_archs(self): - self.env['ARCHFLAGS'] = '-arch x86_64' - config_vars = { - 'CC': 'clang', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ', - 'LDFLAGS': '-arch ppc -arch i386 -g', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', - 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', - 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g', - } - expected_vars = { - 'CC': 'clang', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch x86_64', - 'LDFLAGS': ' -g -arch x86_64', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk', - 'BLDSHARED': 'gcc-4.0 -bundle -g -arch x86_64', - 'LDSHARED': 'gcc-4.0 -bundle -isysroot ' - '/Developer/SDKs/MacOSX10.4u.sdk -g -arch x86_64', - } - self.add_expected_saved_initial_values(config_vars, expected_vars) - - self.assertEqual(expected_vars, - _osx_support._override_all_archs( - config_vars)) - - def test__check_for_unavailable_sdk(self): - config_vars = { - 'CC': 'clang', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.1.sdk', - 'LDFLAGS': '-arch ppc -arch i386 -g', - 'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.1.sdk', - 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', - 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.1.sdk -g', - } - expected_vars = { - 'CC': 'clang', - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' - ' ', - 'LDFLAGS': '-arch ppc -arch i386 -g', - 'CPPFLAGS': '-I. ', - 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g', - 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 ' - ' -g', - } - self.add_expected_saved_initial_values(config_vars, expected_vars) - - self.assertEqual(expected_vars, - _osx_support._check_for_unavailable_sdk( - config_vars)) - - def test_get_platform_osx(self): - # Note, get_platform_osx is currently tested more extensively - # indirectly by test_sysconfig and test_distutils - config_vars = { - 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ' - '-isysroot /Developer/SDKs/MacOSX10.1.sdk', - 'MACOSX_DEPLOYMENT_TARGET': '10.6', - } - result = _osx_support.get_platform_osx(config_vars, ' ', ' ', ' ') - self.assertEqual(('macosx', '10.6', 'fat'), result) - -def test_main(): - if sys.platform == 'darwin': - test.support.run_unittest(Test_OSXSupport) - -if __name__ == "__main__": - test_main() diff --git a/lib-python/3/test/test_file_eintr.py b/lib-python/3/test/test_file_eintr.py deleted file mode 100644 --- a/lib-python/3/test/test_file_eintr.py +++ /dev/null @@ -1,236 +0,0 @@ -# Written to test interrupted system calls interfering with our many buffered -# IO implementations. http://bugs.python.org/issue12268 -# -# It was suggested that this code could be merged into test_io and the tests -# made to work using the same method as the existing signal tests in test_io. -# I was unable to get single process tests using alarm or setitimer that way -# to reproduce the EINTR problems. This process based test suite reproduces -# the problems prior to the issue12268 patch reliably on Linux and OSX. -# - gregory.p.smith - -import os -import select -import signal -import subprocess -import sys -from test.support import run_unittest -import time -import unittest - -# Test import all of the things we're about to try testing up front. -from _io import FileIO - - - at unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.') -class TestFileIOSignalInterrupt(unittest.TestCase): - def setUp(self): - self._process = None - - def tearDown(self): - if self._process and self._process.poll() is None: - try: - self._process.kill() - except OSError: - pass - - def _generate_infile_setup_code(self): - """Returns the infile = ... line of code for the reader process. - - subclasseses should override this to test different IO objects. - """ - return ('import _io ;' - 'infile = _io.FileIO(sys.stdin.fileno(), "rb")') - - def fail_with_process_info(self, why, stdout=b'', stderr=b'', - communicate=True): - """A common way to cleanup and fail with useful debug output. - - Kills the process if it is still running, collects remaining output - and fails the test with an error message including the output. - - Args: - why: Text to go after "Error from IO process" in the message. - stdout, stderr: standard output and error from the process so - far to include in the error message. - communicate: bool, when True we call communicate() on the process - after killing it to gather additional output. - """ - if self._process.poll() is None: - time.sleep(0.1) # give it time to finish printing the error. - try: - self._process.terminate() # Ensure it dies. - except OSError: - pass - if communicate: - stdout_end, stderr_end = self._process.communicate() - stdout += stdout_end - stderr += stderr_end - self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' % - (why, stdout.decode(), stderr.decode())) - - def _test_reading(self, data_to_write, read_and_verify_code): - """Generic buffered read method test harness to validate EINTR behavior. - - Also validates that Python signal handlers are run during the read. - - Args: - data_to_write: String to write to the child process for reading - before sending it a signal, confirming the signal was handled, - writing a final newline and closing the infile pipe. - read_and_verify_code: Single "line" of code to read from a file - object named 'infile' and validate the result. This will be - executed as part of a python subprocess fed data_to_write. - """ - infile_setup_code = self._generate_infile_setup_code() - # Total pipe IO in this function is smaller than the minimum posix OS - # pipe buffer size of 512 bytes. No writer should block. - assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.' - - # Start a subprocess to call our read method while handling a signal. - self._process = subprocess.Popen( - [sys.executable, '-u', '-c', - 'import signal, sys ;' - 'signal.signal(signal.SIGINT, ' - 'lambda s, f: sys.stderr.write("$\\n")) ;' - + infile_setup_code + ' ;' + - 'sys.stderr.write("Worm Sign!\\n") ;' - + read_and_verify_code + ' ;' + - 'infile.close()' - ], - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - # Wait for the signal handler to be installed. - worm_sign = self._process.stderr.read(len(b'Worm Sign!\n')) - if worm_sign != b'Worm Sign!\n': # See also, Dune by Frank Herbert. - self.fail_with_process_info('while awaiting a sign', - stderr=worm_sign) - self._process.stdin.write(data_to_write) - - signals_sent = 0 - rlist = [] - # We don't know when the read_and_verify_code in our child is actually - # executing within the read system call we want to interrupt. This - # loop waits for a bit before sending the first signal to increase - # the likelihood of that. Implementations without correct EINTR - # and signal handling usually fail this test. - while not rlist: - rlist, _, _ = select.select([self._process.stderr], (), (), 0.05) - self._process.send_signal(signal.SIGINT) - signals_sent += 1 - if signals_sent > 200: - self._process.kill() - self.fail('reader process failed to handle our signals.') - # This assumes anything unexpected that writes to stderr will also - # write a newline. That is true of the traceback printing code. - signal_line = self._process.stderr.readline() - if signal_line != b'$\n': - self.fail_with_process_info('while awaiting signal', - stderr=signal_line) - - # We append a newline to our input so that a readline call can - # end on its own before the EOF is seen and so that we're testing - # the read call that was interrupted by a signal before the end of - # the data stream has been reached. - stdout, stderr = self._process.communicate(input=b'\n') - if self._process.returncode: - self.fail_with_process_info( - 'exited rc=%d' % self._process.returncode, - stdout, stderr, communicate=False) - # PASS! - - # String format for the read_and_verify_code used by read methods. - _READING_CODE_TEMPLATE = ( - 'got = infile.{read_method_name}() ;' - 'expected = {expected!r} ;' - 'assert got == expected, (' - '"{read_method_name} returned wrong data.\\n"' - '"got data %r\\nexpected %r" % (got, expected))' - ) - - def test_readline(self): - """readline() must handle signals and not lose data.""" - self._test_reading( - data_to_write=b'hello, world!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='readline', - expected=b'hello, world!\n')) - - def test_readlines(self): - """readlines() must handle signals and not lose data.""" - self._test_reading( - data_to_write=b'hello\nworld!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='readlines', - expected=[b'hello\n', b'world!\n'])) - - def test_readall(self): - """readall() must handle signals and not lose data.""" - self._test_reading( - data_to_write=b'hello\nworld!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='readall', - expected=b'hello\nworld!\n')) - # read() is the same thing as readall(). - self._test_reading( - data_to_write=b'hello\nworld!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='read', - expected=b'hello\nworld!\n')) - - -class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt): - def _generate_infile_setup_code(self): - """Returns the infile = ... line of code to make a BufferedReader.""" - return ('infile = open(sys.stdin.fileno(), "rb") ;' - 'import _io ;assert isinstance(infile, _io.BufferedReader)') - - def test_readall(self): - """BufferedReader.read() must handle signals and not lose data.""" - self._test_reading( - data_to_write=b'hello\nworld!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='read', - expected=b'hello\nworld!\n')) - - -class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt): - def _generate_infile_setup_code(self): - """Returns the infile = ... line of code to make a TextIOWrapper.""" - return ('infile = open(sys.stdin.fileno(), "rt", newline=None) ;' - 'import _io ;assert isinstance(infile, _io.TextIOWrapper)') - - def test_readline(self): - """readline() must handle signals and not lose data.""" - self._test_reading( - data_to_write=b'hello, world!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='readline', - expected='hello, world!\n')) - - def test_readlines(self): - """readlines() must handle signals and not lose data.""" - self._test_reading( - data_to_write=b'hello\r\nworld!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='readlines', - expected=['hello\n', 'world!\n'])) - - def test_readall(self): - """read() must handle signals and not lose data.""" - self._test_reading( - data_to_write=b'hello\nworld!', - read_and_verify_code=self._READING_CODE_TEMPLATE.format( - read_method_name='read', - expected="hello\nworld!\n")) - - -def test_main(): - test_cases = [ - tc for tc in globals().values() - if isinstance(tc, type) and issubclass(tc, unittest.TestCase)] - run_unittest(*test_cases) - - -if __name__ == '__main__': - test_main() diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py deleted file mode 100644 --- a/lib-python/3/test/test_ssl.py +++ /dev/null @@ -1,3987 +0,0 @@ -# Test the support for SSL and sockets - -import sys -import unittest -from test import support -import socket -import select -import time -import datetime -import gc -import os -import errno -import pprint -import tempfile -import urllib.request -import traceback -import asyncore -import weakref -import platform -import re -import functools -try: - import ctypes -except ImportError: - ctypes = None - -ssl = support.import_module("ssl") - -try: - import threading -except ImportError: - _have_threads = False -else: - _have_threads = True - -PROTOCOLS = sorted(ssl._PROTOCOL_NAMES) -HOST = support.HOST -IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL') -IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0) - - -def data_file(*name): - return os.path.join(os.path.dirname(__file__), *name) - -# The custom key and certificate files used in test_ssl are generated -# using Lib/test/make_ssl_certs.py. -# Other certificates are simply fetched from the Internet servers they -# are meant to authenticate. - -CERTFILE = data_file("keycert.pem") -BYTES_CERTFILE = os.fsencode(CERTFILE) -ONLYCERT = data_file("ssl_cert.pem") -ONLYKEY = data_file("ssl_key.pem") -BYTES_ONLYCERT = os.fsencode(ONLYCERT) -BYTES_ONLYKEY = os.fsencode(ONLYKEY) -CERTFILE_PROTECTED = data_file("keycert.passwd.pem") -ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem") -KEY_PASSWORD = "somepass" -CAPATH = data_file("capath") -BYTES_CAPATH = os.fsencode(CAPATH) -CAFILE_NEURONIO = data_file("capath", "4e1295a3.0") -CAFILE_CACERT = data_file("capath", "5ed36f99.0") - -# empty CRL -CRLFILE = data_file("revocation.crl") - -# Two keys and certs signed by the same CA (for SNI tests) -SIGNED_CERTFILE = data_file("keycert3.pem") -SIGNED_CERTFILE2 = data_file("keycert4.pem") -# Same certificate as pycacert.pem, but without extra text in file -SIGNING_CA = data_file("capath", "ceff1710.0") -# cert with all kinds of subject alt names -ALLSANFILE = data_file("allsans.pem") -# cert with all kinds of subject alt names -ALLSANFILE = data_file("allsans.pem") - -REMOTE_HOST = "self-signed.pythontest.net" - -EMPTYCERT = data_file("nullcert.pem") -BADCERT = data_file("badcert.pem") -NONEXISTINGCERT = data_file("XXXnonexisting.pem") -BADKEY = data_file("badkey.pem") -NOKIACERT = data_file("nokia.pem") -NULLBYTECERT = data_file("nullbytecert.pem") -TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem") - -DHFILE = data_file("ffdh3072.pem") -BYTES_DHFILE = os.fsencode(DHFILE) - -# Not defined in all versions of OpenSSL -OP_NO_COMPRESSION = getattr(ssl, "OP_NO_COMPRESSION", 0) -OP_SINGLE_DH_USE = getattr(ssl, "OP_SINGLE_DH_USE", 0) -OP_SINGLE_ECDH_USE = getattr(ssl, "OP_SINGLE_ECDH_USE", 0) -OP_CIPHER_SERVER_PREFERENCE = getattr(ssl, "OP_CIPHER_SERVER_PREFERENCE", 0) -OP_ENABLE_MIDDLEBOX_COMPAT = getattr(ssl, "OP_ENABLE_MIDDLEBOX_COMPAT", 0) - - -def handle_error(prefix): - exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) - if support.verbose: - sys.stdout.write(prefix + exc_format) - -def can_clear_options(): - # 0.9.8m or higher - return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15) - -def no_sslv2_implies_sslv3_hello(): - # 0.9.7h or higher - return ssl.OPENSSL_VERSION_INFO >= (0, 9, 7, 8, 15) - -def have_verify_flags(): - # 0.9.8 or higher - return ssl.OPENSSL_VERSION_INFO >= (0, 9, 8, 0, 15) - -def utc_offset(): #NOTE: ignore issues like #1647654 - # local time = utc time + utc offset - if time.daylight and time.localtime().tm_isdst > 0: - return -time.altzone # seconds - return -time.timezone - -def asn1time(cert_time): - # Some versions of OpenSSL ignore seconds, see #18207 - # 0.9.8.i - if ssl._OPENSSL_API_VERSION == (0, 9, 8, 9, 15): - fmt = "%b %d %H:%M:%S %Y GMT" - dt = datetime.datetime.strptime(cert_time, fmt) - dt = dt.replace(second=0) - cert_time = dt.strftime(fmt) - # %d adds leading zero but ASN1_TIME_print() uses leading space - if cert_time[4] == "0": - cert_time = cert_time[:4] + " " + cert_time[5:] - - return cert_time - -# Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2 -def skip_if_broken_ubuntu_ssl(func): - if hasattr(ssl, 'PROTOCOL_SSLv2'): - @functools.wraps(func) - def f(*args, **kwargs): - try: - ssl.SSLContext(ssl.PROTOCOL_SSLv2) - except ssl.SSLError: - if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and - platform.linux_distribution() == ('debian', 'squeeze/sid', '')): - raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour") - return func(*args, **kwargs) - return f - else: - return func - -def skip_if_openssl_cnf_minprotocol_gt_tls1(func): - """Skip a test if the OpenSSL config MinProtocol is > TLSv1. - - OS distros with an /etc/ssl/openssl.cnf and MinProtocol set often do so to - require TLSv1.2 or higher (Debian Buster). Some of our tests for older - protocol versions will fail under such a config. - - Alternative workaround: Run this test in a process with - OPENSSL_CONF=/dev/null in the environment. - """ - @functools.wraps(func) - def f(*args, **kwargs): - openssl_cnf = os.environ.get("OPENSSL_CONF", "/etc/ssl/openssl.cnf") - try: - with open(openssl_cnf, "r") as config: - for line in config: - match = re.match(r"MinProtocol\s*=\s*(TLSv\d+\S*)", line) - if match: - tls_ver = match.group(1) - if tls_ver > "TLSv1": - raise unittest.SkipTest( - "%s has MinProtocol = %s which is > TLSv1." % - (openssl_cnf, tls_ver)) - except (EnvironmentError, UnicodeDecodeError) as err: - # no config file found, etc. - if support.verbose: - sys.stdout.write("\n Could not scan %s for MinProtocol: %s\n" - % (openssl_cnf, err)) - return func(*args, **kwargs) - return f - - -needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test") - - -def test_wrap_socket(sock, ssl_version=ssl.PROTOCOL_TLS, *, - cert_reqs=ssl.CERT_NONE, ca_certs=None, - ciphers=None, certfile=None, keyfile=None, - **kwargs): - context = ssl.SSLContext(ssl_version) - if cert_reqs is not None: - context.verify_mode = cert_reqs - if ca_certs is not None: - context.load_verify_locations(ca_certs) - if certfile is not None or keyfile is not None: - context.load_cert_chain(certfile, keyfile) - if ciphers is not None: - context.set_ciphers(ciphers) - return context.wrap_socket(sock, **kwargs) - -class BasicSocketTests(unittest.TestCase): - - def test_constants(self): - ssl.CERT_NONE - ssl.CERT_OPTIONAL - ssl.CERT_REQUIRED - ssl.OP_CIPHER_SERVER_PREFERENCE - ssl.OP_SINGLE_DH_USE - if ssl.HAS_ECDH: - ssl.OP_SINGLE_ECDH_USE - if ssl.OPENSSL_VERSION_INFO >= (1, 0): - ssl.OP_NO_COMPRESSION - self.assertIn(ssl.HAS_SNI, {True, False}) - self.assertIn(ssl.HAS_ECDH, {True, False}) - ssl.OP_NO_SSLv2 - ssl.OP_NO_SSLv3 - ssl.OP_NO_TLSv1 - ssl.OP_NO_TLSv1_3 - if ssl.OPENSSL_VERSION_INFO >= (1, 0, 1): - ssl.OP_NO_TLSv1_1 - ssl.OP_NO_TLSv1_2 - - def test_str_for_enums(self): - # Make sure that the PROTOCOL_* constants have enum-like string - # reprs. - proto = ssl.PROTOCOL_TLS - self.assertEqual(str(proto), '_SSLMethod.PROTOCOL_TLS') - ctx = ssl.SSLContext(proto) - self.assertIs(ctx.protocol, proto) - - def test_random(self): - v = ssl.RAND_status() - if support.verbose: - sys.stdout.write("\n RAND_status is %d (%s)\n" - % (v, (v and "sufficient randomness") or - "insufficient randomness")) - - data, is_cryptographic = ssl.RAND_pseudo_bytes(16) - self.assertEqual(len(data), 16) - self.assertEqual(is_cryptographic, v == 1) - if v: - data = ssl.RAND_bytes(16) - self.assertEqual(len(data), 16) - else: - self.assertRaises(ssl.SSLError, ssl.RAND_bytes, 16) - - # negative num is invalid - self.assertRaises(ValueError, ssl.RAND_bytes, -5) - self.assertRaises(ValueError, ssl.RAND_pseudo_bytes, -5) - - if hasattr(ssl, 'RAND_egd'): - self.assertRaises(TypeError, ssl.RAND_egd, 1) - self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1) - ssl.RAND_add("this is a random string", 75.0) - ssl.RAND_add(b"this is a random bytes object", 75.0) - ssl.RAND_add(bytearray(b"this is a random bytearray object"), 75.0) - - @unittest.skipUnless(os.name == 'posix', 'requires posix') - def test_random_fork(self): - status = ssl.RAND_status() - if not status: - self.fail("OpenSSL's PRNG has insufficient randomness") - - rfd, wfd = os.pipe() - pid = os.fork() - if pid == 0: - try: - os.close(rfd) - child_random = ssl.RAND_pseudo_bytes(16)[0] - self.assertEqual(len(child_random), 16) - os.write(wfd, child_random) - os.close(wfd) - except BaseException: - os._exit(1) - else: - os._exit(0) - else: - os.close(wfd) - self.addCleanup(os.close, rfd) - _, status = os.waitpid(pid, 0) - self.assertEqual(status, 0) - - child_random = os.read(rfd, 16) - self.assertEqual(len(child_random), 16) - parent_random = ssl.RAND_pseudo_bytes(16)[0] - self.assertEqual(len(parent_random), 16) - - self.assertNotEqual(child_random, parent_random) - - maxDiff = None - - def test_parse_cert(self): - # note that this uses an 'unofficial' function in _ssl.c, - # provided solely for this test, to exercise the certificate - # parsing code - p = ssl._ssl._test_decode_cert(CERTFILE) - if support.verbose: - sys.stdout.write("\n" + pprint.pformat(p) + "\n") - self.assertEqual(p['issuer'], - ((('countryName', 'XY'),), - (('localityName', 'Castle Anthrax'),), - (('organizationName', 'Python Software Foundation'),), - (('commonName', 'localhost'),)) - ) - # Note the next three asserts will fail if the keys are regenerated - self.assertEqual(p['notAfter'], asn1time('Aug 26 14:23:15 2028 GMT')) - self.assertEqual(p['notBefore'], asn1time('Aug 29 14:23:15 2018 GMT')) - self.assertEqual(p['serialNumber'], '98A7CF88C74A32ED') - self.assertEqual(p['subject'], - ((('countryName', 'XY'),), - (('localityName', 'Castle Anthrax'),), - (('organizationName', 'Python Software Foundation'),), - (('commonName', 'localhost'),)) - ) - self.assertEqual(p['subjectAltName'], (('DNS', 'localhost'),)) - # Issue #13034: the subjectAltName in some certificates - # (notably projects.developer.nokia.com:443) wasn't parsed - p = ssl._ssl._test_decode_cert(NOKIACERT) - if support.verbose: - sys.stdout.write("\n" + pprint.pformat(p) + "\n") - self.assertEqual(p['subjectAltName'], - (('DNS', 'projects.developer.nokia.com'), - ('DNS', 'projects.forum.nokia.com')) - ) - # extra OCSP and AIA fields - self.assertEqual(p['OCSP'], ('http://ocsp.verisign.com',)) - self.assertEqual(p['caIssuers'], - ('http://SVRIntl-G3-aia.verisign.com/SVRIntlG3.cer',)) - self.assertEqual(p['crlDistributionPoints'], - ('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',)) - - def test_parse_cert_CVE_2019_5010(self): - p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP) - if support.verbose: - sys.stdout.write("\n" + pprint.pformat(p) + "\n") - self.assertEqual( - p, - { - 'issuer': ( - (('countryName', 'UK'),), (('commonName', 'cody-ca'),)), - 'notAfter': 'Jun 14 18:00:58 2028 GMT', - 'notBefore': 'Jun 18 18:00:58 2018 GMT', - 'serialNumber': '02', - 'subject': ((('countryName', 'UK'),), - (('commonName', - 'codenomicon-vm-2.test.lal.cisco.com'),)), - 'subjectAltName': ( - ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),), - 'version': 3 - } - ) - - def test_parse_cert_CVE_2013_4238(self): - p = ssl._ssl._test_decode_cert(NULLBYTECERT) - if support.verbose: - sys.stdout.write("\n" + pprint.pformat(p) + "\n") - subject = ((('countryName', 'US'),), - (('stateOrProvinceName', 'Oregon'),), - (('localityName', 'Beaverton'),), - (('organizationName', 'Python Software Foundation'),), - (('organizationalUnitName', 'Python Core Development'),), - (('commonName', 'null.python.org\x00example.org'),), - (('emailAddress', 'python-dev at python.org'),)) - self.assertEqual(p['subject'], subject) - self.assertEqual(p['issuer'], subject) - if ssl._OPENSSL_API_VERSION >= (0, 9, 8): - san = (('DNS', 'altnull.python.org\x00example.com'), - ('email', 'null at python.org\x00user at example.org'), - ('URI', 'http://null.python.org\x00http://example.org'), - ('IP Address', '192.0.2.1'), - ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) - else: - # OpenSSL 0.9.7 doesn't support IPv6 addresses in subjectAltName - san = (('DNS', 'altnull.python.org\x00example.com'), - ('email', 'null at python.org\x00user at example.org'), - ('URI', 'http://null.python.org\x00http://example.org'), - ('IP Address', '192.0.2.1'), - ('IP Address', '')) From pypy.commits at gmail.com Wed Aug 21 13:37:22 2019 From: pypy.commits at gmail.com (mattip) Date: Wed, 21 Aug 2019 10:37:22 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: merge default, remove wrong version test files Message-ID: <5d5d8152.1c69fb81.5a03.9cb8@mx.google.com> Author: Matti Picus Branch: py3.6 Changeset: r97236:1cd037049338 Date: 2019-08-21 20:24 +0300 http://bitbucket.org/pypy/pypy/changeset/1cd037049338/ Log: merge default, remove wrong version test files diff --git a/lib-python/2.7/test/capath/efa5f9c3.0 b/lib-python/2.7/test/capath/efa5f9c3.0 deleted file mode 100644 --- a/lib-python/2.7/test/capath/efa5f9c3.0 +++ /dev/null @@ -1,34 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIF9zCCA9+gAwIBAgIUH98b4Fw/DyugC9cV7VK7ZODzHsIwDQYJKoZIhvcNAQEL -BQAwgYoxCzAJBgNVBAYTAlhZMRcwFQYDVQQIDA5DYXN0bGUgQW50aHJheDEYMBYG -A1UEBwwPQXJndW1lbnQgQ2xpbmljMSMwIQYDVQQKDBpQeXRob24gU29mdHdhcmUg -Rm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0aG9udGVzdC5uZXQw -HhcNMTkwNTA4MDEwMjQzWhcNMjcwNzI0MDEwMjQzWjCBijELMAkGA1UEBhMCWFkx -FzAVBgNVBAgMDkNhc3RsZSBBbnRocmF4MRgwFgYDVQQHDA9Bcmd1bWVudCBDbGlu -aWMxIzAhBgNVBAoMGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMSMwIQYDVQQD -DBpzZWxmLXNpZ25lZC5weXRob250ZXN0Lm5ldDCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAMKdJlyCThkahwoBb7pl5q64Pe9Fn5jrIvzsveHTc97TpjV2 -RLfICnXKrltPk/ohkVl6K5SUZQZwMVzFubkyxE0nZPHYHlpiKWQxbsYVkYv01rix -IFdLvaxxbGYke2jwQao31s4o61AdlsfK1SdpHQUynBBMssqI3SB4XPmcA7e+wEEx -jxjVish4ixA1vuIZOx8yibu+CFCf/geEjoBMF3QPdzULzlrCSw8k/45iZCSoNbvK -DoL4TVV07PHOxpheDh8ZQmepGvU6pVqhb9m4lgmV0OGWHgozd5Ur9CbTVDmxIEz3 -TSoRtNJK7qtyZdGNqwjksQxgZTjM/d/Lm/BJG99AiOmYOjsl9gbQMZgvQmMAtUsI -aMJnQuZ6R+KEpW/TR5qSKLWZSG45z/op+tzI2m+cE6HwTRVAWbcuJxcAA55MZjqU -OOOu3BBYMjS5nf2sQ9uoXsVBFH7i0mQqoW1SLzr9opI8KsWwFxQmO2vBxWYaN+lH -OmwBZBwyODIsmI1YGXmTp09NxRYz3Qe5GCgFzYowpMrcxUC24iduIdMwwhRM7rKg -7GtIWMSrFfuI1XCLRmSlhDbhNN6fVg2f8Bo9PdH9ihiIyxSrc+FOUasUYCCJvlSZ -8hFUlLvcmrZlWuazohm0lsXuMK1JflmQr/DA/uXxP9xzFfRy+RU3jDyxJbRHAgMB -AAGjUzBRMB0GA1UdDgQWBBSQJyxiPMRK01i+0BsV9zUwDiBaHzAfBgNVHSMEGDAW -gBSQJyxiPMRK01i+0BsV9zUwDiBaHzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4ICAQCR+7a7N/m+WLkxPPIA/CB4MOr2Uf8ixTv435Nyv6rXOun0+lTP -ExSZ0uYQ+L0WylItI3cQHULldDueD+s8TGzxf5woaLKf6tqyr0NYhKs+UeNEzDnN -9PHQIhX0SZw3XyXGUgPNBfRCg2ZDdtMMdOU4XlQN/IN/9hbYTrueyY7eXq9hmtI9 -1srftAMqr9SR1JP7aHI6DVgrEsZVMTDnfT8WmLSGLlY1HmGfdEn1Ip5sbo9uSkiH -AEPgPfjYIvR5LqTOMn4KsrlZyBbFIDh9Sl99M1kZzgH6zUGVLCDg1y6Cms69fx/e -W1HoIeVkY4b4TY7Bk7JsqyNhIuqu7ARaxkdaZWhYaA2YyknwANdFfNpfH+elCLIk -BUt5S3f4i7DaUePTvKukCZiCq4Oyln7RcOn5If73wCeLB/ZM9Ei1HforyLWP1CN8 -XLfpHaoeoPSWIveI0XHUl65LsPN2UbMbul/F23hwl+h8+BLmyAS680Yhn4zEN6Ku -B7Po90HoFa1Du3bmx4jsN73UkT/dwMTi6K072FbipnC1904oGlWmLwvAHvrtxxmL -Pl3pvEaZIu8wa/PNF6Y7J7VIewikIJq6Ta6FrWeFfzMWOj2qA1ZZi6fUaDSNYvuV -J5quYKCc/O+I/yDDf8wyBbZ/gvUXzUHTMYGG+bFrn1p7XDbYYeEJ6R/xEg== ------END CERTIFICATE----- diff --git a/lib-python/3.2/test/test_tools.py b/lib-python/3.2/test/test_tools.py deleted file mode 100644 --- a/lib-python/3.2/test/test_tools.py +++ /dev/null @@ -1,433 +0,0 @@ -"""Tests for scripts in the Tools directory. - -This file contains regression tests for some of the scripts found in the -Tools directory of a Python checkout or tarball, such as reindent.py. -""" - -import os -import sys -import imp -import unittest -import shutil -import subprocess -import sysconfig -import tempfile -import textwrap -from test import support -from test.script_helper import assert_python_ok, temp_dir - -if not sysconfig.is_python_build(): - # XXX some installers do contain the tools, should we detect that - # and run the tests in that case too? - raise unittest.SkipTest('test irrelevant for an installed Python') - -basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), - 'Tools') -scriptsdir = os.path.join(basepath, 'scripts') - - -class ReindentTests(unittest.TestCase): - script = os.path.join(scriptsdir, 'reindent.py') - - def test_noargs(self): - assert_python_ok(self.script) - - def test_help(self): - rc, out, err = assert_python_ok(self.script, '-h') - self.assertEqual(out, b'') - self.assertGreater(err, b'') - - -class PindentTests(unittest.TestCase): - script = os.path.join(scriptsdir, 'pindent.py') - - def assertFileEqual(self, fn1, fn2): - with open(fn1) as f1, open(fn2) as f2: - self.assertEqual(f1.readlines(), f2.readlines()) - - def pindent(self, source, *args): - with subprocess.Popen( - (sys.executable, self.script) + args, - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - universal_newlines=True) as proc: - out, err = proc.communicate(source) - self.assertIsNone(err) - return out - - def lstriplines(self, data): - return '\n'.join(line.lstrip() for line in data.splitlines()) + '\n' - - def test_selftest(self): - self.maxDiff = None - with temp_dir() as directory: - data_path = os.path.join(directory, '_test.py') - with open(self.script) as f: - closed = f.read() - with open(data_path, 'w') as f: - f.write(closed) - - rc, out, err = assert_python_ok(self.script, '-d', data_path) - self.assertEqual(out, b'') - self.assertEqual(err, b'') - backup = data_path + '~' - self.assertTrue(os.path.exists(backup)) - with open(backup) as f: - self.assertEqual(f.read(), closed) - with open(data_path) as f: - clean = f.read() - compile(clean, '_test.py', 'exec') - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - - rc, out, err = assert_python_ok(self.script, '-c', data_path) - self.assertEqual(out, b'') - self.assertEqual(err, b'') - with open(backup) as f: - self.assertEqual(f.read(), clean) - with open(data_path) as f: - self.assertEqual(f.read(), closed) - - broken = self.lstriplines(closed) - with open(data_path, 'w') as f: - f.write(broken) - rc, out, err = assert_python_ok(self.script, '-r', data_path) - self.assertEqual(out, b'') - self.assertEqual(err, b'') - with open(backup) as f: - self.assertEqual(f.read(), broken) - with open(data_path) as f: - indented = f.read() - compile(indented, '_test.py', 'exec') - self.assertEqual(self.pindent(broken, '-r'), indented) - - def pindent_test(self, clean, closed): - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - broken = self.lstriplines(closed) - self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '4'), closed) - - def test_statements(self): - clean = textwrap.dedent("""\ - if a: - pass - - if a: - pass - else: - pass - - if a: - pass - elif: - pass - else: - pass - - while a: - break - - while a: - break - else: - pass - - for i in a: - break - - for i in a: - break - else: - pass - - try: - pass - finally: - pass - - try: - pass - except TypeError: - pass - except ValueError: - pass - else: - pass - - try: - pass - except TypeError: - pass - except ValueError: - pass - finally: - pass - - with a: - pass - - class A: - pass - - def f(): - pass - """) - - closed = textwrap.dedent("""\ - if a: - pass - # end if - - if a: - pass - else: - pass - # end if - - if a: - pass - elif: - pass - else: - pass - # end if - - while a: - break - # end while - - while a: - break - else: - pass - # end while - - for i in a: - break - # end for - - for i in a: - break - else: - pass - # end for - - try: - pass - finally: - pass - # end try - - try: - pass - except TypeError: - pass - except ValueError: - pass - else: - pass - # end try - - try: - pass - except TypeError: - pass - except ValueError: - pass - finally: - pass - # end try - - with a: - pass - # end with - - class A: - pass - # end class A - - def f(): - pass - # end def f - """) - self.pindent_test(clean, closed) - - def test_multilevel(self): - clean = textwrap.dedent("""\ - def foobar(a, b): - if a == b: - a = a+1 - elif a < b: - b = b-1 - if b > a: a = a-1 - else: - print 'oops!' - """) - closed = textwrap.dedent("""\ - def foobar(a, b): - if a == b: - a = a+1 - elif a < b: - b = b-1 - if b > a: a = a-1 - # end if - else: - print 'oops!' - # end if - # end def foobar - """) - self.pindent_test(clean, closed) - - def test_preserve_indents(self): - clean = textwrap.dedent("""\ - if a: - if b: - pass - """) - closed = textwrap.dedent("""\ - if a: - if b: - pass - # end if - # end if - """) - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - broken = self.lstriplines(closed) - self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '9'), closed) - clean = textwrap.dedent("""\ - if a: - \tif b: - \t\tpass - """) - closed = textwrap.dedent("""\ - if a: - \tif b: - \t\tpass - \t# end if - # end if - """) - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - broken = self.lstriplines(closed) - self.assertEqual(self.pindent(broken, '-r'), closed) - - def test_escaped_newline(self): - clean = textwrap.dedent("""\ - class\\ - \\ - A: - def\ - \\ - f: - pass - """) - closed = textwrap.dedent("""\ - class\\ - \\ - A: - def\ - \\ - f: - pass - # end def f - # end class A - """) - self.assertEqual(self.pindent(clean, '-c'), closed) - self.assertEqual(self.pindent(closed, '-d'), clean) - - def test_empty_line(self): - clean = textwrap.dedent("""\ - if a: - - pass - """) - closed = textwrap.dedent("""\ - if a: - - pass - # end if - """) - self.pindent_test(clean, closed) - - def test_oneline(self): - clean = textwrap.dedent("""\ - if a: pass - """) - closed = textwrap.dedent("""\ - if a: pass - # end if - """) - self.pindent_test(clean, closed) - - -class TestSundryScripts(unittest.TestCase): - # At least make sure the rest don't have syntax errors. When tests are - # added for a script it should be added to the whitelist below. - - # scripts that have independent tests. - whitelist = ['reindent.py'] - # scripts that can't be imported without running - blacklist = ['make_ctype.py'] - # scripts that use windows-only modules - windows_only = ['win_add2path.py'] - # blacklisted for other reasons - other = ['analyze_dxp.py'] - - skiplist = blacklist + whitelist + windows_only + other - - def setUp(self): - cm = support.DirsOnSysPath(scriptsdir) - cm.__enter__() - self.addCleanup(cm.__exit__) - - def test_sundry(self): - for fn in os.listdir(scriptsdir): - if fn.endswith('.py') and fn not in self.skiplist: - __import__(fn[:-3]) - - @unittest.skipIf(sys.platform != "win32", "Windows-only test") - def test_sundry_windows(self): - for fn in self.windows_only: - __import__(fn[:-3]) - - @unittest.skipIf(not support.threading, "test requires _thread module") - def test_analyze_dxp_import(self): - if hasattr(sys, 'getdxp'): - import analyze_dxp - else: - with self.assertRaises(RuntimeError): - import analyze_dxp - - -class PdepsTests(unittest.TestCase): - - @classmethod - def setUpClass(self): - path = os.path.join(scriptsdir, 'pdeps.py') - self.pdeps = imp.load_source('pdeps', path) - - @classmethod - def tearDownClass(self): - if 'pdeps' in sys.modules: - del sys.modules['pdeps'] - - def test_process_errors(self): - # Issue #14492: m_import.match(line) can be None. - with tempfile.TemporaryDirectory() as tmpdir: - fn = os.path.join(tmpdir, 'foo') - with open(fn, 'w') as stream: - stream.write("#!/this/will/fail") - self.pdeps.process(fn, {}) - - def test_inverse_attribute_error(self): - # Issue #14492: this used to fail with an AttributeError. - self.pdeps.inverse({'a': []}) - - -def test_main(): - support.run_unittest(*[obj for obj in globals().values() - if isinstance(obj, type)]) - - -if __name__ == '__main__': - unittest.main() From pypy.commits at gmail.com Thu Aug 22 03:25:59 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 22 Aug 2019 00:25:59 -0700 (PDT) Subject: [pypy-commit] pypy default: sync imports with upstream 2.7.15 Message-ID: <5d5e4387.1c69fb81.4d05c.2380@mx.google.com> Author: Matti Picus Branch: Changeset: r97237:e35f09200d0b Date: 2019-08-22 10:21 +0300 http://bitbucket.org/pypy/pypy/changeset/e35f09200d0b/ Log: sync imports with upstream 2.7.15 diff --git a/lib-python/2.7/test/test_dictviews.py b/lib-python/2.7/test/test_dictviews.py --- a/lib-python/2.7/test/test_dictviews.py +++ b/lib-python/2.7/test/test_dictviews.py @@ -1,5 +1,6 @@ import copy import pickle +import sys import unittest import collections from test import test_support From pypy.commits at gmail.com Thu Aug 22 05:39:51 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 22 Aug 2019 02:39:51 -0700 (PDT) Subject: [pypy-commit] pypy default: typo (would generate two semicolons in the C sources) Message-ID: <5d5e62e7.1c69fb81.26ebf.0243@mx.google.com> Author: Armin Rigo Branch: Changeset: r97238:c0c361423879 Date: 2019-08-22 11:39 +0200 http://bitbucket.org/pypy/pypy/changeset/c0c361423879/ Log: typo (would generate two semicolons in the C sources) diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -562,7 +562,7 @@ return '%s = %d;' % (self.expr(op.result), ARRAY.length) else: - return self.generic_get(op, '%s->length;' % self.expr(op.args[0])) + return self.generic_get(op, '%s->length' % self.expr(op.args[0])) def OP_GETARRAYITEM(self, op): ARRAY = self.lltypemap(op.args[0]).TO From pypy.commits at gmail.com Thu Aug 22 06:09:52 2019 From: pypy.commits at gmail.com (mattip) Date: Thu, 22 Aug 2019 03:09:52 -0700 (PDT) Subject: [pypy-commit] pypy default: reference the pypyjit module from command-line JIT help Message-ID: <5d5e69f0.1c69fb81.160a0.03a0@mx.google.com> Author: Matti Picus Branch: Changeset: r97239:d21503421f0c Date: 2019-08-22 13:09 +0300 http://bitbucket.org/pypy/pypy/changeset/d21503421f0c/ Log: reference the pypyjit module from command-line JIT help diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py --- a/pypy/interpreter/app_main.py +++ b/pypy/interpreter/app_main.py @@ -226,6 +226,8 @@ print ' turn off the JIT' print ' help' print ' print this page' + print + print 'The "pypyjit" module can be used to control the JIT from inside python' def print_version(*args): print >> sys.stderr, "Python", sys.version From pypy.commits at gmail.com Thu Aug 22 07:16:03 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 22 Aug 2019 04:16:03 -0700 (PDT) Subject: [pypy-commit] pypy default: Issue #2979 Message-ID: <5d5e7973.1c69fb81.7a2cc.ea09@mx.google.com> Author: Armin Rigo Branch: Changeset: r97240:4b0a97483aaa Date: 2019-08-22 12:03 +0200 http://bitbucket.org/pypy/pypy/changeset/4b0a97483aaa/ Log: Issue #2979 Add a gc flag on "dummy" objects created by the rtyper, and never return these objects from pypy.module.gc. diff --git a/pypy/module/gc/referents.py b/pypy/module/gc/referents.py --- a/pypy/module/gc/referents.py +++ b/pypy/module/gc/referents.py @@ -14,6 +14,8 @@ def try_cast_gcref_to_w_root(gcref): + if rgc.get_gcflag_dummy(gcref): + return None w_obj = rgc.try_cast_gcref_to_instance(W_Root, gcref) # Ignore the instances of W_Root that are not really valid as Python # objects. There is e.g. WeakrefLifeline in module/_weakref that diff --git a/rpython/memory/gc/base.py b/rpython/memory/gc/base.py --- a/rpython/memory/gc/base.py +++ b/rpython/memory/gc/base.py @@ -23,6 +23,7 @@ can_usually_pin_objects = False object_minimal_size = 0 gcflag_extra = 0 # or a dedicated GC flag that the GC initializes to 0 + gcflag_dummy = 0 # dedicated GC flag set only on rmodel.ll_dummy_value _totalroots_rpy = 0 # for inspector.py def __init__(self, config, chunk_size=DEFAULT_CHUNK_SIZE, diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -162,7 +162,11 @@ # It does not need an additional copy in trace out GCFLAG_SHADOW_INITIALIZED = first_gcflag << 11 -_GCFLAG_FIRST_UNUSED = first_gcflag << 12 # the first unused bit +# another flag set only on specific objects: the ll_dummy_value from +# rpython.rtyper.rmodel +GCFLAG_DUMMY = first_gcflag << 12 + +_GCFLAG_FIRST_UNUSED = first_gcflag << 13 # the first unused bit # States for the incremental GC @@ -203,6 +207,7 @@ can_usually_pin_objects = True malloc_zero_filled = False gcflag_extra = GCFLAG_EXTRA + gcflag_dummy = GCFLAG_DUMMY # All objects start with a HDR, i.e. with a field 'tid' which contains # a word. This word is divided in two halves: the lower half contains diff --git a/rpython/memory/gc/minimark.py b/rpython/memory/gc/minimark.py --- a/rpython/memory/gc/minimark.py +++ b/rpython/memory/gc/minimark.py @@ -122,7 +122,11 @@ # note that GCFLAG_CARDS_SET is the most significant bit of a byte: # this is required for the JIT (x86) -_GCFLAG_FIRST_UNUSED = first_gcflag << 8 # the first unused bit +# another flag set only on specific objects: the ll_dummy_value from +# rpython.rtyper.rmodel +GCFLAG_DUMMY = first_gcflag << 8 + +_GCFLAG_FIRST_UNUSED = first_gcflag << 9 # the first unused bit FORWARDSTUB = lltype.GcStruct('forwarding_stub', @@ -140,6 +144,7 @@ prebuilt_gc_objects_are_static_roots = False malloc_zero_filled = True # xxx experiment with False gcflag_extra = GCFLAG_EXTRA + gcflag_dummy = GCFLAG_DUMMY # All objects start with a HDR, i.e. with a field 'tid' which contains # a word. This word is divided in two halves: the lower half contains diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -1675,6 +1675,15 @@ self.translator = translator super(TransformerLayoutBuilder, self).__init__(GCClass, lltype2vtable) + def is_dummy_struct(self, obj): + # overrides the base method + TYPE = lltype.typeOf(obj) + try: + dummy = self.translator.rtyper.cache_dummy_values[TYPE] + except KeyError: + return False + return dummy._obj == obj + def has_destructor(self, TYPE): rtti = get_rtti(TYPE) return rtti is not None and getattr(rtti._obj, 'destructor_funcptr', diff --git a/rpython/memory/gctypelayout.py b/rpython/memory/gctypelayout.py --- a/rpython/memory/gctypelayout.py +++ b/rpython/memory/gctypelayout.py @@ -466,7 +466,10 @@ typeid = self.get_type_id(TYPE) hdr = gc.gcheaderbuilder.new_header(value) adr = llmemory.cast_ptr_to_adr(hdr) - gc.init_gc_object_immortal(adr, typeid) + if gc.gcflag_dummy and self.is_dummy_struct(value): + gc.init_gc_object_immortal(adr, typeid, flags=gc.gcflag_dummy) + else: + gc.init_gc_object_immortal(adr, typeid) self.all_prebuilt_gc.append(value) # The following collects the addresses of all the fields that have @@ -484,6 +487,10 @@ for a in gc_pointers_inside(value, adr, mutable_only=True): appendto.append(a) + def is_dummy_struct(self, obj): + return False # overridden in TransformerLayoutBuilder + + # ____________________________________________________________ # # Helpers to discover GC pointers inside structures diff --git a/rpython/memory/gcwrapper.py b/rpython/memory/gcwrapper.py --- a/rpython/memory/gcwrapper.py +++ b/rpython/memory/gcwrapper.py @@ -188,6 +188,9 @@ hdr.tid &= ~self.gc.gcflag_extra else: hdr.tid |= self.gc.gcflag_extra + elif subopnum == 4: # get_gcflag_dummy + # returns always False if gc.gcflag_dummy == 0 + return (hdr.tid & self.gc.gcflag_dummy) != 0 return (hdr.tid & self.gc.gcflag_extra) != 0 def thread_run(self): diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py --- a/rpython/rlib/rgc.py +++ b/rpython/rlib/rgc.py @@ -835,6 +835,11 @@ _gcflag_extras.add(gcref) toggle_gcflag_extra._subopnum = 3 + at not_rpython +def get_gcflag_dummy(gcref): + return False +get_gcflag_dummy._subopnum = 4 + def assert_no_more_gcflags(): if not we_are_translated(): assert not _gcflag_extras @@ -1079,7 +1084,8 @@ return hop.genop('gc_typeids_list', [], resulttype = hop.r_result) class Entry(ExtRegistryEntry): - _about_ = (has_gcflag_extra, get_gcflag_extra, toggle_gcflag_extra) + _about_ = (has_gcflag_extra, get_gcflag_extra, toggle_gcflag_extra, + get_gcflag_dummy) def compute_result_annotation(self, s_arg=None): from rpython.annotator.model import s_Bool return s_Bool diff --git a/rpython/translator/c/gc.py b/rpython/translator/c/gc.py --- a/rpython/translator/c/gc.py +++ b/rpython/translator/c/gc.py @@ -388,10 +388,14 @@ raise Exception("the FramewokGCTransformer should handle this") def OP_GC_GCFLAG_EXTRA(self, funcgen, op): - gcflag_extra = self.db.gctransformer.gcdata.gc.gcflag_extra + subopnum = op.args[0].value + if subopnum != 4: + gcflag_extra = self.db.gctransformer.gcdata.gc.gcflag_extra + else: + gcflag_extra = self.db.gctransformer.gcdata.gc.gcflag_dummy + # if gcflag_extra == 0: return BasicGcPolicy.OP_GC_GCFLAG_EXTRA(self, funcgen, op) - subopnum = op.args[0].value if subopnum == 1: return '%s = 1; /* has_gcflag_extra */' % ( funcgen.expr(op.result),) @@ -407,6 +411,8 @@ parts.insert(0, '%s ^= %dL;' % (hdrfield, gcflag_extra)) parts.append('/* toggle_gcflag_extra */') + elif subopnum == 4: # get_gcflag_dummy + parts.append('/* get_gcflag_dummy */') else: raise AssertionError(subopnum) return ' '.join(parts) From pypy.commits at gmail.com Thu Aug 22 07:16:05 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 22 Aug 2019 04:16:05 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: hg merge default Message-ID: <5d5e7975.1c69fb81.b88d6.eb3a@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97241:27e4db43a117 Date: 2019-08-22 12:04 +0200 http://bitbucket.org/pypy/pypy/changeset/27e4db43a117/ Log: hg merge default diff --git a/pypy/module/gc/referents.py b/pypy/module/gc/referents.py --- a/pypy/module/gc/referents.py +++ b/pypy/module/gc/referents.py @@ -14,6 +14,8 @@ def try_cast_gcref_to_w_root(gcref): + if rgc.get_gcflag_dummy(gcref): + return None w_obj = rgc.try_cast_gcref_to_instance(W_Root, gcref) # Ignore the instances of W_Root that are not really valid as Python # objects. There is e.g. WeakrefLifeline in module/_weakref that diff --git a/rpython/memory/gc/base.py b/rpython/memory/gc/base.py --- a/rpython/memory/gc/base.py +++ b/rpython/memory/gc/base.py @@ -23,6 +23,7 @@ can_usually_pin_objects = False object_minimal_size = 0 gcflag_extra = 0 # or a dedicated GC flag that the GC initializes to 0 + gcflag_dummy = 0 # dedicated GC flag set only on rmodel.ll_dummy_value _totalroots_rpy = 0 # for inspector.py def __init__(self, config, chunk_size=DEFAULT_CHUNK_SIZE, diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -162,7 +162,11 @@ # It does not need an additional copy in trace out GCFLAG_SHADOW_INITIALIZED = first_gcflag << 11 -_GCFLAG_FIRST_UNUSED = first_gcflag << 12 # the first unused bit +# another flag set only on specific objects: the ll_dummy_value from +# rpython.rtyper.rmodel +GCFLAG_DUMMY = first_gcflag << 12 + +_GCFLAG_FIRST_UNUSED = first_gcflag << 13 # the first unused bit # States for the incremental GC @@ -203,6 +207,7 @@ can_usually_pin_objects = True malloc_zero_filled = False gcflag_extra = GCFLAG_EXTRA + gcflag_dummy = GCFLAG_DUMMY # All objects start with a HDR, i.e. with a field 'tid' which contains # a word. This word is divided in two halves: the lower half contains diff --git a/rpython/memory/gc/minimark.py b/rpython/memory/gc/minimark.py --- a/rpython/memory/gc/minimark.py +++ b/rpython/memory/gc/minimark.py @@ -122,7 +122,11 @@ # note that GCFLAG_CARDS_SET is the most significant bit of a byte: # this is required for the JIT (x86) -_GCFLAG_FIRST_UNUSED = first_gcflag << 8 # the first unused bit +# another flag set only on specific objects: the ll_dummy_value from +# rpython.rtyper.rmodel +GCFLAG_DUMMY = first_gcflag << 8 + +_GCFLAG_FIRST_UNUSED = first_gcflag << 9 # the first unused bit FORWARDSTUB = lltype.GcStruct('forwarding_stub', @@ -140,6 +144,7 @@ prebuilt_gc_objects_are_static_roots = False malloc_zero_filled = True # xxx experiment with False gcflag_extra = GCFLAG_EXTRA + gcflag_dummy = GCFLAG_DUMMY # All objects start with a HDR, i.e. with a field 'tid' which contains # a word. This word is divided in two halves: the lower half contains diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -1675,6 +1675,15 @@ self.translator = translator super(TransformerLayoutBuilder, self).__init__(GCClass, lltype2vtable) + def is_dummy_struct(self, obj): + # overrides the base method + TYPE = lltype.typeOf(obj) + try: + dummy = self.translator.rtyper.cache_dummy_values[TYPE] + except KeyError: + return False + return dummy._obj == obj + def has_destructor(self, TYPE): rtti = get_rtti(TYPE) return rtti is not None and getattr(rtti._obj, 'destructor_funcptr', diff --git a/rpython/memory/gctypelayout.py b/rpython/memory/gctypelayout.py --- a/rpython/memory/gctypelayout.py +++ b/rpython/memory/gctypelayout.py @@ -466,7 +466,10 @@ typeid = self.get_type_id(TYPE) hdr = gc.gcheaderbuilder.new_header(value) adr = llmemory.cast_ptr_to_adr(hdr) - gc.init_gc_object_immortal(adr, typeid) + if gc.gcflag_dummy and self.is_dummy_struct(value): + gc.init_gc_object_immortal(adr, typeid, flags=gc.gcflag_dummy) + else: + gc.init_gc_object_immortal(adr, typeid) self.all_prebuilt_gc.append(value) # The following collects the addresses of all the fields that have @@ -484,6 +487,10 @@ for a in gc_pointers_inside(value, adr, mutable_only=True): appendto.append(a) + def is_dummy_struct(self, obj): + return False # overridden in TransformerLayoutBuilder + + # ____________________________________________________________ # # Helpers to discover GC pointers inside structures diff --git a/rpython/memory/gcwrapper.py b/rpython/memory/gcwrapper.py --- a/rpython/memory/gcwrapper.py +++ b/rpython/memory/gcwrapper.py @@ -188,6 +188,9 @@ hdr.tid &= ~self.gc.gcflag_extra else: hdr.tid |= self.gc.gcflag_extra + elif subopnum == 4: # get_gcflag_dummy + # returns always False if gc.gcflag_dummy == 0 + return (hdr.tid & self.gc.gcflag_dummy) != 0 return (hdr.tid & self.gc.gcflag_extra) != 0 def thread_run(self): diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py --- a/rpython/rlib/rgc.py +++ b/rpython/rlib/rgc.py @@ -835,6 +835,11 @@ _gcflag_extras.add(gcref) toggle_gcflag_extra._subopnum = 3 + at not_rpython +def get_gcflag_dummy(gcref): + return False +get_gcflag_dummy._subopnum = 4 + def assert_no_more_gcflags(): if not we_are_translated(): assert not _gcflag_extras @@ -1079,7 +1084,8 @@ return hop.genop('gc_typeids_list', [], resulttype = hop.r_result) class Entry(ExtRegistryEntry): - _about_ = (has_gcflag_extra, get_gcflag_extra, toggle_gcflag_extra) + _about_ = (has_gcflag_extra, get_gcflag_extra, toggle_gcflag_extra, + get_gcflag_dummy) def compute_result_annotation(self, s_arg=None): from rpython.annotator.model import s_Bool return s_Bool diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -562,7 +562,7 @@ return '%s = %d;' % (self.expr(op.result), ARRAY.length) else: - return self.generic_get(op, '%s->length;' % self.expr(op.args[0])) + return self.generic_get(op, '%s->length' % self.expr(op.args[0])) def OP_GETARRAYITEM(self, op): ARRAY = self.lltypemap(op.args[0]).TO diff --git a/rpython/translator/c/gc.py b/rpython/translator/c/gc.py --- a/rpython/translator/c/gc.py +++ b/rpython/translator/c/gc.py @@ -388,10 +388,14 @@ raise Exception("the FramewokGCTransformer should handle this") def OP_GC_GCFLAG_EXTRA(self, funcgen, op): - gcflag_extra = self.db.gctransformer.gcdata.gc.gcflag_extra + subopnum = op.args[0].value + if subopnum != 4: + gcflag_extra = self.db.gctransformer.gcdata.gc.gcflag_extra + else: + gcflag_extra = self.db.gctransformer.gcdata.gc.gcflag_dummy + # if gcflag_extra == 0: return BasicGcPolicy.OP_GC_GCFLAG_EXTRA(self, funcgen, op) - subopnum = op.args[0].value if subopnum == 1: return '%s = 1; /* has_gcflag_extra */' % ( funcgen.expr(op.result),) @@ -407,6 +411,8 @@ parts.insert(0, '%s ^= %dL;' % (hdrfield, gcflag_extra)) parts.append('/* toggle_gcflag_extra */') + elif subopnum == 4: # get_gcflag_dummy + parts.append('/* get_gcflag_dummy */') else: raise AssertionError(subopnum) return ' '.join(parts) From pypy.commits at gmail.com Thu Aug 22 07:16:07 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 22 Aug 2019 04:16:07 -0700 (PDT) Subject: [pypy-commit] pypy default: merge heads Message-ID: <5d5e7977.1c69fb81.4d358.59b8@mx.google.com> Author: Armin Rigo Branch: Changeset: r97242:bacea0b28bb3 Date: 2019-08-22 13:15 +0200 http://bitbucket.org/pypy/pypy/changeset/bacea0b28bb3/ Log: merge heads diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py --- a/pypy/interpreter/app_main.py +++ b/pypy/interpreter/app_main.py @@ -226,6 +226,8 @@ print ' turn off the JIT' print ' help' print ' print this page' + print + print 'The "pypyjit" module can be used to control the JIT from inside python' def print_version(*args): print >> sys.stderr, "Python", sys.version From pypy.commits at gmail.com Thu Aug 22 09:01:51 2019 From: pypy.commits at gmail.com (arigo) Date: Thu, 22 Aug 2019 06:01:51 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Make gettext be importable, at least, if sys.base_prefix is not set. Message-ID: <5d5e923f.1c69fb81.faa84.9474@mx.google.com> Author: Armin Rigo Branch: py3.6 Changeset: r97243:38055c21771a Date: 2019-08-22 15:01 +0200 http://bitbucket.org/pypy/pypy/changeset/38055c21771a/ Log: Make gettext be importable, at least, if sys.base_prefix is not set. See comment diff --git a/lib-python/3/gettext.py b/lib-python/3/gettext.py --- a/lib-python/3/gettext.py +++ b/lib-python/3/gettext.py @@ -57,7 +57,11 @@ 'ldngettext', 'lngettext', 'ngettext', ] -_default_localedir = os.path.join(sys.base_prefix, 'share', 'locale') +try: + _default_localedir = os.path.join(sys.base_prefix, 'share', 'locale') +except AttributeError: + pass # pypy: sys.base_prefix is not set if pypy3-c is issuing + # "Library path not found, using compiled-in sys.path" # Expression parsing for plural form selection. # From pypy.commits at gmail.com Thu Aug 22 11:49:40 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 22 Aug 2019 08:49:40 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Use same logic as CPython in int.__new__ and fix yet another corner case Message-ID: <5d5eb994.1c69fb81.ea7c8.002a@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97244:1a1112535b61 Date: 2019-08-22 16:47 +0100 http://bitbucket.org/pypy/pypy/changeset/1a1112535b61/ Log: Use same logic as CPython in int.__new__ and fix yet another corner case diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -296,7 +296,6 @@ "expected %s, got %T object", expected, self) def int(self, space): - from pypy.objspace.std.intobject import _new_int w_impl = space.lookup(self, '__int__') if w_impl is None: self._typed_unwrap_error(space, "integer") diff --git a/pypy/objspace/std/intobject.py b/pypy/objspace/std/intobject.py --- a/pypy/objspace/std/intobject.py +++ b/pypy/objspace/std/intobject.py @@ -9,7 +9,7 @@ import sys from rpython.rlib import jit -from rpython.rlib.objectmodel import instantiate +from rpython.rlib.objectmodel import instantiate, enforceargs from rpython.rlib.rarithmetic import ( LONG_BIT, intmask, is_valid_int, ovfcheck, r_longlong, r_uint, string_to_int) @@ -851,80 +851,49 @@ sys.maxint == 2147483647) -def _string_to_int_or_long(space, w_inttype, w_source, string, base=10): +def _string_to_int_or_long(space, w_source, string, base=10): try: - value = string_to_int(string, base, allow_underscores=True, no_implicit_octal=True) + value = string_to_int( + string, base, allow_underscores=True, no_implicit_octal=True) + return wrapint(space, value) except ParseStringError as e: raise wrap_parsestringerror(space, e, w_source) except ParseStringOverflowError as e: - return _retry_to_w_long(space, e.parser, w_inttype, w_source) + return _retry_to_w_long(space, e.parser, w_source) - if space.is_w(w_inttype, space.w_int): - w_result = wrapint(space, value) - else: - w_result = space.allocate_instance(W_IntObject, w_inttype) - W_IntObject.__init__(w_result, value) - return w_result - -def _retry_to_w_long(space, parser, w_inttype, w_source): +def _retry_to_w_long(space, parser, w_source): from pypy.objspace.std.longobject import newbigint parser.rewind() try: bigint = rbigint._from_numberstring_parser(parser) except ParseStringError as e: raise wrap_parsestringerror(space, e, w_source) - return newbigint(space, w_inttype, bigint) + return newbigint(space, space.w_int, bigint) def _new_int(space, w_inttype, w_x, w_base=None): - from pypy.objspace.std.longobject import ( - W_AbstractLongObject, W_LongObject, newlong, newbigint) - if space.config.objspace.std.withsmalllong: - from pypy.objspace.std.smalllongobject import W_SmallLongObject + w_value = w_x # 'x' is the keyword argument name in CPython + if w_inttype is space.w_int: + return _new_baseint(space, w_x, w_base) else: - W_SmallLongObject = None + w_tmp = _new_baseint(space, w_x, w_base) + return _as_subint(space, w_inttype, w_tmp) - w_longval = None - w_value = w_x # 'x' is the keyword argument name in CPython - value = 0 +def _new_baseint(space, w_value, w_base=None): if w_base is None: - #import pdb; pdb.set_trace() - # check for easy cases - if type(w_value) is W_IntObject: - if space.is_w(w_inttype, space.w_int): - return w_value - value = w_value.intval - w_obj = space.allocate_instance(W_IntObject, w_inttype) - W_IntObject.__init__(w_obj, value) - return w_obj - elif type(w_value) is W_LongObject: - if space.is_w(w_inttype, space.w_int): - return w_value - return newbigint(space, w_inttype, w_value.num) - elif W_SmallLongObject and type(w_value) is W_SmallLongObject: - if space.is_w(w_inttype, space.w_int): - return w_value - return newbigint(space, w_inttype, space.bigint_w(w_value)) + if space.is_w(space.type(w_value), space.w_int): + assert isinstance(w_value, W_AbstractIntObject) + return w_value elif space.lookup(w_value, '__int__') is not None: w_intvalue = space.int(w_value) - if isinstance(w_intvalue, W_IntObject): - if type(w_intvalue) is not W_IntObject: - w_intvalue = wrapint(space, w_intvalue.intval) - return _new_int(space, w_inttype, w_intvalue) - elif isinstance(w_intvalue, W_AbstractLongObject): - if type(w_intvalue) is not W_LongObject: - w_intvalue = newlong(space, w_intvalue.asbigint()) - return _new_int(space, w_inttype, w_intvalue) - else: - # shouldn't happen - raise oefmt(space.w_RuntimeError, - "internal error in int.__new__()") + return _ensure_baseint(space, w_intvalue) elif space.lookup(w_value, '__trunc__') is not None: w_obj = space.trunc(w_value) - if not space.is_w(space.type(w_obj), space.w_int): + if not space.isinstance_w(w_obj, space.w_int): w_obj = space.int(w_obj) - return _from_intlike(space, w_inttype, w_obj) + assert isinstance(w_obj, W_AbstractIntObject) + return _ensure_baseint(space, w_obj) elif space.isinstance_w(w_value, space.w_unicode): from pypy.objspace.std.unicodeobject import unicode_to_decimal_w try: @@ -933,10 +902,10 @@ raise oefmt(space.w_ValueError, 'invalid literal for int() with base 10: %R', w_value) - return _string_to_int_or_long(space, w_inttype, w_value, b) + return _string_to_int_or_long(space, w_value, b) elif (space.isinstance_w(w_value, space.w_bytearray) or space.isinstance_w(w_value, space.w_bytes)): - return _string_to_int_or_long(space, w_inttype, w_value, + return _string_to_int_or_long(space, w_value, space.charbuf_w(w_value)) else: # If object supports the buffer interface @@ -949,7 +918,7 @@ "int() argument must be a string, a bytes-like " "object or a number, not '%T'", w_value) else: - return _string_to_int_or_long(space, w_inttype, w_value, buf) + return _string_to_int_or_long(space, w_value, buf) else: try: base = space.getindex_w(w_base, None) @@ -973,14 +942,40 @@ raise oefmt(space.w_TypeError, "int() can't convert non-string with explicit base") - return _string_to_int_or_long(space, w_inttype, w_value, s, base) + return _string_to_int_or_long(space, w_value, s, base) + at enforceargs(None, None, W_AbstractIntObject, typecheck=False) +def _as_subint(space, w_inttype, w_value): + from pypy.objspace.std.longobject import W_LongObject, newbigint + if space.config.objspace.std.withsmalllong: + from pypy.objspace.std.smalllongobject import W_SmallLongObject + else: + W_SmallLongObject = None + if type(w_value) is W_IntObject: + w_obj = space.allocate_instance(W_IntObject, w_inttype) + W_IntObject.__init__(w_obj, w_value.intval) + return w_obj + elif type(w_value) is W_LongObject: + return newbigint(space, w_inttype, w_value.num) + elif W_SmallLongObject and type(w_value) is W_SmallLongObject: + return newbigint(space, w_inttype, space.bigint_w(w_value)) -def _from_intlike(space, w_inttype, w_intlike): - if space.is_w(w_inttype, space.w_int): - return w_intlike - from pypy.objspace.std.longobject import newbigint - return newbigint(space, w_inttype, space.bigint_w(w_intlike)) +#@enforceargs(None, W_AbstractIntObject, typecheck=False) +def _ensure_baseint(space, w_intvalue): + from pypy.objspace.std.longobject import ( + W_LongObject, W_AbstractLongObject, newlong) + if isinstance(w_intvalue, W_IntObject): + if type(w_intvalue) is not W_IntObject: + w_intvalue = wrapint(space, w_intvalue.intval) + return w_intvalue + elif isinstance(w_intvalue, W_AbstractLongObject): + if type(w_intvalue) is not W_LongObject: + w_intvalue = newlong(space, w_intvalue.asbigint()) + return w_intvalue + else: + # shouldn't happen + raise oefmt(space.w_RuntimeError, + "internal error in int.__new__()") W_AbstractIntObject.typedef = TypeDef("int", diff --git a/pypy/objspace/std/test/test_intobject.py b/pypy/objspace/std/test/test_intobject.py --- a/pypy/objspace/std/test/test_intobject.py +++ b/pypy/objspace/std/test/test_intobject.py @@ -533,6 +533,19 @@ assert n == 1 assert type(n) is int + def test_trunc_returns_int_subclass_2(self): + class BadInt: + def __int__(self): + return True + + class TruncReturnsBadInt: + def __trunc__(self): + return BadInt() + bad_int = TruncReturnsBadInt() + n = int(bad_int) + assert n == 1 + assert type(n) is int + def test_int_before_string(self): class Integral(str): def __int__(self): From pypy.commits at gmail.com Thu Aug 22 12:31:25 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 22 Aug 2019 09:31:25 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Fix annotations related to _new_int() Message-ID: <5d5ec35d.1c69fb81.5bd33.00e0@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97245:1114b8c0be6d Date: 2019-08-22 17:30 +0100 http://bitbucket.org/pypy/pypy/changeset/1114b8c0be6d/ Log: Fix annotations related to _new_int() diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -296,14 +296,17 @@ "expected %s, got %T object", expected, self) def int(self, space): + from pypy.objspace.std.intobject import W_AbstractIntObject w_impl = space.lookup(self, '__int__') if w_impl is None: self._typed_unwrap_error(space, "integer") w_result = space.get_and_call_function(w_impl, self) if space.is_w(space.type(w_result), space.w_int): + assert isinstance(w_result, W_AbstractIntObject) return w_result if space.isinstance_w(w_result, space.w_int): + assert isinstance(w_result, W_AbstractIntObject) tp = space.type(w_result).name space.warn(space.newtext( "__int__ returned non-int (type %s). " @@ -815,7 +818,7 @@ return self.w_None return w_obj - @signature(types.any(), types.bool(), returns=types.instance(W_Root)) + @signature(types.any(), types.bool(), returns=types.any()) def newbool(self, b): if b: return self.w_True diff --git a/pypy/objspace/std/intobject.py b/pypy/objspace/std/intobject.py --- a/pypy/objspace/std/intobject.py +++ b/pypy/objspace/std/intobject.py @@ -960,7 +960,7 @@ elif W_SmallLongObject and type(w_value) is W_SmallLongObject: return newbigint(space, w_inttype, space.bigint_w(w_value)) -#@enforceargs(None, W_AbstractIntObject, typecheck=False) + at enforceargs(None, W_AbstractIntObject, typecheck=False) def _ensure_baseint(space, w_intvalue): from pypy.objspace.std.longobject import ( W_LongObject, W_AbstractLongObject, newlong) From pypy.commits at gmail.com Thu Aug 22 12:46:16 2019 From: pypy.commits at gmail.com (rlamy) Date: Thu, 22 Aug 2019 09:46:16 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: CPython compatibility: IOBase.readlines() should rely on the iterator protocol instead of calling readline() directly Message-ID: <5d5ec6d8.1c69fb81.941b5.0487@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97246:7b9b2790269b Date: 2019-08-22 17:45 +0100 http://bitbucket.org/pypy/pypy/changeset/7b9b2790269b/ Log: CPython compatibility: IOBase.readlines() should rely on the iterator protocol instead of calling readline() directly diff --git a/pypy/module/_io/interp_iobase.py b/pypy/module/_io/interp_iobase.py --- a/pypy/module/_io/interp_iobase.py +++ b/pypy/module/_io/interp_iobase.py @@ -266,24 +266,16 @@ def readlines_w(self, space, w_hint=None): hint = convert_size(space, w_hint) - if hint <= 0: return space.newlist(space.unpackiterable(self)) + length = 0 lines_w = [] - length = 0 - while True: - w_line = space.call_method(self, "readline") - line_length = space.len_w(w_line) - if line_length == 0: # done - break - + for w_line in space.iteriterable(self): lines_w.append(w_line) - - length += line_length + length += space.len_w(w_line) if length > hint: break - return space.newlist(lines_w) def writelines_w(self, space, w_lines): From pypy.commits at gmail.com Fri Aug 23 08:36:10 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 23 Aug 2019 05:36:10 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Avoid import-time exception in test.test_asyncio.test_futures Message-ID: <5d5fddba.1c69fb81.f1489.8e68@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97247:d7477dd3fb65 Date: 2019-08-23 13:35 +0100 http://bitbucket.org/pypy/pypy/changeset/d7477dd3fb65/ Log: Avoid import-time exception in test.test_asyncio.test_futures diff --git a/lib-python/3/test/test_asyncio/test_futures.py b/lib-python/3/test/test_asyncio/test_futures.py --- a/lib-python/3/test/test_asyncio/test_futures.py +++ b/lib-python/3/test/test_asyncio/test_futures.py @@ -534,7 +534,7 @@ @unittest.skipUnless(hasattr(futures, '_CFuture'), 'requires the C _asyncio module') class CFutureTests(BaseFutureTests, test_utils.TestCase): - cls = getattr(futures, '_CFuture') + cls = getattr(futures, '_CFuture', None) class PyFutureTests(BaseFutureTests, test_utils.TestCase): From pypy.commits at gmail.com Fri Aug 23 10:12:47 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 23 Aug 2019 07:12:47 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Don't inherit IS_ABSTRACT flag Message-ID: <5d5ff45f.1c69fb81.a0645.23aa@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97248:486a01a9aafe Date: 2019-08-23 15:11 +0100 http://bitbucket.org/pypy/pypy/changeset/486a01a9aafe/ Log: Don't inherit IS_ABSTRACT flag diff --git a/pypy/objspace/std/test/test_typeobject.py b/pypy/objspace/std/test/test_typeobject.py --- a/pypy/objspace/std/test/test_typeobject.py +++ b/pypy/objspace/std/test/test_typeobject.py @@ -71,6 +71,22 @@ raises(AttributeError, getattr, type, "__abstractmethods__") raises(TypeError, "int.__abstractmethods__ = ('abc', )") + def test_is_abstract_flag(self): + # IS_ABSTRACT flag should always be in sync with + # cls.__dict__['__abstractmethods__'] + FLAG_IS_ABSTRACT = 1 << 20 + + class Base: + pass + Base.__abstractmethods__ = {'x'} + assert Base.__flags__ & FLAG_IS_ABSTRACT + + class Derived(Base): + pass + assert not (Derived.__flags__ & FLAG_IS_ABSTRACT) + Derived.__abstractmethods__ = {'x'} + assert Derived.__flags__ & FLAG_IS_ABSTRACT + def test_attribute_error(self): class X(object): __module__ = 'test' diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py --- a/pypy/objspace/std/typeobject.py +++ b/pypy/objspace/std/typeobject.py @@ -816,7 +816,7 @@ return space.call_function(newfunc, w_winner, w_name, w_bases, w_dict) w_typetype = w_winner - name = space.text_w(w_name) + name = space.text_w(w_name) if '\x00' in name: raise oefmt(space.w_ValueError, "type name must not contain null characters") pos = surrogate_in_utf8(name) @@ -1339,7 +1339,6 @@ if not isinstance(w_base, W_TypeObject): continue w_self.flag_cpytype |= w_base.flag_cpytype - w_self.flag_abstract |= w_base.flag_abstract if w_self.flag_map_or_seq == '?': w_self.flag_map_or_seq = w_base.flag_map_or_seq From pypy.commits at gmail.com Fri Aug 23 11:51:08 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 23 Aug 2019 08:51:08 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Check return value of decoder.getstate() Message-ID: <5d600b6c.1c69fb81.1ed1.ba6f@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97249:c795f015ed36 Date: 2019-08-23 16:50 +0100 http://bitbucket.org/pypy/pypy/changeset/c795f015ed36/ Log: Check return value of decoder.getstate() diff --git a/pypy/module/_io/interp_textio.py b/pypy/module/_io/interp_textio.py --- a/pypy/module/_io/interp_textio.py +++ b/pypy/module/_io/interp_textio.py @@ -663,12 +663,15 @@ # To prepare for tell(), we need to snapshot a point in the file # where the decoder's input buffer is empty. w_state = space.call_method(self.w_decoder, "getstate") + if (not space.isinstance_w(w_state, space.w_tuple) + or space.len_w(w_state) != 2): + raise oefmt(space.w_TypeError, "illegal decoder state") # Given this, we know there was a valid snapshot point # len(dec_buffer) bytes ago with decoder state (b'', dec_flags). w_dec_buffer, w_dec_flags = space.unpackiterable(w_state, 2) if not space.isinstance_w(w_dec_buffer, space.w_bytes): - msg = "decoder getstate() should have returned a bytes " \ - "object not '%T'" + msg = ("illegal decoder state: the first value should be a " + "bytes object not '%T'") raise oefmt(space.w_TypeError, msg, w_dec_buffer) dec_buffer = space.bytes_w(w_dec_buffer) dec_flags = space.int_w(w_dec_flags) From pypy.commits at gmail.com Fri Aug 23 12:43:56 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 23 Aug 2019 09:43:56 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: Reset raw_pos after unwinding the raw stream (bpo-32228) Message-ID: <5d6017cc.1c69fb81.c0252.d50b@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97250:bad11dff1f59 Date: 2019-08-23 17:43 +0100 http://bitbucket.org/pypy/pypy/changeset/bad11dff1f59/ Log: Reset raw_pos after unwinding the raw stream (bpo-32228) diff --git a/pypy/module/_io/interp_bufferedio.py b/pypy/module/_io/interp_bufferedio.py --- a/pypy/module/_io/interp_bufferedio.py +++ b/pypy/module/_io/interp_bufferedio.py @@ -319,7 +319,6 @@ with self.lock: if self.writable: self._writer_flush_unlocked(space) - self._writer_reset_buf() if whence == 1: pos -= self._raw_offset() @@ -371,6 +370,7 @@ def _writer_flush_unlocked(self, space): if self.write_end == -1 or self.write_pos == self.write_end: + self._writer_reset_buf() return # First, rewind rewind = self._raw_offset() + (self.pos - self.write_pos) diff --git a/pypy/module/_io/test/test_bufferedio.py b/pypy/module/_io/test/test_bufferedio.py --- a/pypy/module/_io/test/test_bufferedio.py +++ b/pypy/module/_io/test/test_bufferedio.py @@ -513,6 +513,17 @@ assert b.truncate() == 8 assert b.tell() == 8 + def test_truncate_after_write(self): + import _io + raw = _io.FileIO(self.tmpfile, 'rb+') + raw.write(b'\x00' * 50) + raw.seek(0) + b = _io.BufferedRandom(raw, 10) + b.write(b'\x00' * 11) + b.read(1) + b.truncate() + assert b.tell() == 12 + def test_write_non_blocking(self): import _io, io class MockNonBlockWriterIO(io.RawIOBase): From pypy.commits at gmail.com Fri Aug 23 13:32:41 2019 From: pypy.commits at gmail.com (rlamy) Date: Fri, 23 Aug 2019 10:32:41 -0700 (PDT) Subject: [pypy-commit] pypy py3.6: fix test for minor implementation difference Message-ID: <5d602339.1c69fb81.7a9e7.0a55@mx.google.com> Author: Ronan Lamy Branch: py3.6 Changeset: r97251:5aa182e30c62 Date: 2019-08-23 18:32 +0100 http://bitbucket.org/pypy/pypy/changeset/5aa182e30c62/ Log: fix test for minor implementation difference diff --git a/lib-python/3/test/list_tests.py b/lib-python/3/test/list_tests.py --- a/lib-python/3/test/list_tests.py +++ b/lib-python/3/test/list_tests.py @@ -546,7 +546,7 @@ u += "eggs" self.assertEqual(u, self.type2test("spameggs")) - self.assertRaises(TypeError, u.__iadd__, None) + self.assertRaises(TypeError, "u += None") # PyPy change def test_imul(self): u = self.type2test([0, 1]) From pypy.commits at gmail.com Fri Aug 23 14:25:11 2019 From: pypy.commits at gmail.com (andrewjlawrence) Date: Fri, 23 Aug 2019 11:25:11 -0700 (PDT) Subject: [pypy-commit] pypy winconsoleio: A bit more implementation Message-ID: <5d602f87.1c69fb81.8d74b.c4b0@mx.google.com> Author: andrewjlawrence Branch: winconsoleio Changeset: r97252:22748ac954cb Date: 2019-08-19 08:17 +0100 http://bitbucket.org/pypy/pypy/changeset/22748ac954cb/ Log: A bit more implementation diff --git a/pypy/module/_io/interp_win32consoleio.py b/pypy/module/_io/interp_win32consoleio.py --- a/pypy/module/_io/interp_win32consoleio.py +++ b/pypy/module/_io/interp_win32consoleio.py @@ -15,11 +15,22 @@ import unicodedata SMALLBUF = 4 +BUFMAX = (32*1024*1024) def err_closed(space): raise oefmt(space.w_ValueError, "I/O operation on closed file") +def err_mode(space, state): + # TODO sort out the state + raise oefmt(space.w_ValueError, + "I/O operation on closed file") + + +def read_console_w(handle, maxlen, readlen): + #TODO implement me + pass + def _get_console_type(handle): mode = lltype.malloc(rwin32.LPDWORD.TO,0,flavor='raw') peek_count = lltype.malloc(rwin32.LPDWORD.TO,0,flavor='raw') @@ -115,6 +126,10 @@ # def _internal_close(self, space): # pass + def _copyfrombuf(self, buf, len): + # TODO implement me. + pass + @unwrap_spec(w_mode=WrappedDefault("r"), w_closefd=WrappedDefault(True), w_opener=WrappedDefault(None)) def descr_init(self, space, w_nameobj, w_mode, w_closefd, w_opener): return None @@ -262,10 +277,50 @@ return err_mode("fileno") return space.newint(self.fd) - def readinto_w(self, space): + def readinto_w(self, space, w_buffer): + rwbuffer = space.writebuf_w(w_buffer) + length = rwbuffer.getlength() + if self.handle == rwin32.INVALID_HANDLE_VALUE: return err_closed(space) + if not self.readable: + err_mode(space, "reading") + + if not length: + return space.newint(0) + + if length > BUFMAX: + raise oefmt(space.w_ValueError, + "cannot read more than %d bytes", BUFMAX) + + wlen = rffi.cast(rffi.DWORD, length / 4) + if not wlen: + wlen = 1 + + read_len = _copyfrombuf(self, buf, rffi.cast(rffi.DWORD, length)) + if read_len: + buf.setslice(read_len, length) + length = length - read_len + wlen = wlen - 1 + + if length == read_len or not wlen: + return read_len + + with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as n: + wbuf = read_console_w(self.handle, wlen , n) + + if not wbuf: + return -1 + + if n == 0: + return read_len + + u8n = 0 + + ##if len < 4: + + def get_blksize(self,space): return space.newint(self.blksize) diff --git a/rpython/rlib/rwin32.py b/rpython/rlib/rwin32.py --- a/rpython/rlib/rwin32.py +++ b/rpython/rlib/rwin32.py @@ -575,3 +575,8 @@ GetNumberOfConsoleInputEvents = winexternal( 'GetNumberOfConsoleInputEvents', [HANDLE, LPDWORD], BOOL) + + WideCharToMultiByte = winexternal( + 'WideCharToMultiByte', [rffi.UINT, DWORD, rffi.CWCHARP, rffi.INT, + LPSTR, rffi.INT, rffi.CCHARP, LPBOOL], rffi.INT, + save_err=rffi.RFFI_SAVE_LASTERROR) From pypy.commits at gmail.com Fri Aug 23 14:25:13 2019 From: pypy.commits at gmail.com (andrewjlawrence) Date: Fri, 23 Aug 2019 11:25:13 -0700 (PDT) Subject: [pypy-commit] pypy winconsoleio: work in progress Message-ID: <5d602f89.1c69fb81.aa2f2.9490@mx.google.com> Author: andrewjlawrence Branch: winconsoleio Changeset: r97253:2626fc18dc10 Date: 2019-08-23 19:22 +0100 http://bitbucket.org/pypy/pypy/changeset/2626fc18dc10/ Log: work in progress diff --git a/pypy/module/_io/interp_win32consoleio.py b/pypy/module/_io/interp_win32consoleio.py --- a/pypy/module/_io/interp_win32consoleio.py +++ b/pypy/module/_io/interp_win32consoleio.py @@ -28,8 +28,22 @@ def read_console_w(handle, maxlen, readlen): - #TODO implement me - pass + err = 0 + sig = 0 + buf = lltype.malloc(rwin32.CWCHARP, maxlen, flavor='raw') + try: + if not buf: + return None + + off = 0 + while off < maxlen: + n = rffi.cast(rwin32.DWORD, -1) + len = m + finally: + lltype.free(buf, flavor='raw') + + + def _get_console_type(handle): mode = lltype.malloc(rwin32.LPDWORD.TO,0,flavor='raw') @@ -294,32 +308,63 @@ raise oefmt(space.w_ValueError, "cannot read more than %d bytes", BUFMAX) - wlen = rffi.cast(rffi.DWORD, length / 4) + wlen = rffi.cast(rwin32.DWORD, length / 4) if not wlen: wlen = 1 - read_len = _copyfrombuf(self, buf, rffi.cast(rffi.DWORD, length)) + read_len = self._copyfrombuf(rwbuffer, rffi.cast(rwin32.DWORD, length)) if read_len: - buf.setslice(read_len, length) + rwbuffer.setslice(read_len, length) length = length - read_len wlen = wlen - 1 if length == read_len or not wlen: - return read_len + return space.newint(read_len) with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as n: wbuf = read_console_w(self.handle, wlen , n) if not wbuf: - return -1 + return space.newint(-1) if n == 0: - return read_len + return space.newint(read_len) u8n = 0 - ##if len < 4: + if len < 4: + if rwin32.WideCharToMultiByte(rwin32.CP_UTF8, + 0, wbuf, n, self.buf, + rffi.sizeof(self.buf)/ rffi.sizeof(self.buf[0]), + rffi.NULL, rffi.NULL): + u8n = self._copyfrombuf(rwbuffer, len) + else: + u8n = rwin32.WideCharToMultiByte(rwin32.CP_UTF8, + 0, wbuf, n, buf, len, + rffi.NULL, rffi.NULL) + + if u8n: + read_len += u8n + u8n = 0 + else: + err = rwin32.GetLastError_saved() + if err == rwin32.ERROR_INSUFFICIENT_BUFFER: + u8n = rwin32.WideCharToMultiByte(rwin32.CP_UTF8, 0, wbuf, + n, rffi.NULL, 0, rffi.NULL, rffi.NULL) + if u8n: + raise oefmt(space.w_ValueError, + "Buffer had room for %d bytes but %d bytes required", + len, u8n) + + if err: + raise oefmt(space.w_WindowsError, + err) + + if len < 0: + return None + + return space.newint(read_len) def get_blksize(self,space): diff --git a/rpython/rlib/rwin32.py b/rpython/rlib/rwin32.py --- a/rpython/rlib/rwin32.py +++ b/rpython/rlib/rwin32.py @@ -576,6 +576,8 @@ GetNumberOfConsoleInputEvents = winexternal( 'GetNumberOfConsoleInputEvents', [HANDLE, LPDWORD], BOOL) + ERROR_INSUFFICIENT_BUFFER = 122 + CP_UTF8 = 65001 WideCharToMultiByte = winexternal( 'WideCharToMultiByte', [rffi.UINT, DWORD, rffi.CWCHARP, rffi.INT, LPSTR, rffi.INT, rffi.CCHARP, LPBOOL], rffi.INT, From pypy.commits at gmail.com Sat Aug 24 09:26:58 2019 From: pypy.commits at gmail.com (arigo) Date: Sat, 24 Aug 2019 06:26:58 -0700 (PDT) Subject: [pypy-commit] pypy py3.6-sandbox-2: Remove these two modules after all Message-ID: <5d613b22.1c69fb81.35e02.5a18@mx.google.com> Author: Armin Rigo Branch: py3.6-sandbox-2 Changeset: r97254:e0a4fa746e3f Date: 2019-08-24 15:26 +0200 http://bitbucket.org/pypy/pypy/changeset/e0a4fa746e3f/ Log: Remove these two modules after all diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -47,7 +47,7 @@ # --sandbox sandbox_modules = default_modules.copy() sandbox_modules.update([ - "array", "binascii", "_socket", "select", + "array", "binascii", ]) import rpython.rlib.rvmprof.cintf From pypy.commits at gmail.com Sat Aug 24 14:34:12 2019 From: pypy.commits at gmail.com (mattip) Date: Sat, 24 Aug 2019 11:34:12 -0700 (PDT) Subject: [pypy-commit] pypy.org extradoc: remove references to numpypy, numpy is now just a "normal" c-extension module Message-ID: <5d618324.1c69fb81.62b9.3b80@mx.google.com> Author: Matti Picus Branch: extradoc Changeset: r952:1d7194367707 Date: 2019-08-24 21:33 +0300 http://bitbucket.org/pypy/pypy.org/changeset/1d7194367707/ Log: remove references to numpypy, numpy is now just a "normal" c-extension module Maybe someday we will revive it ... diff --git a/download.html b/download.html --- a/download.html +++ b/download.html @@ -81,7 +81,6 @@
  • Installing (optional)
  • Installing more modules
  • -
  • Installing NumPy (optional)
  • Building from source
  • Packaging
  • Checksums
  • @@ -215,50 +214,6 @@ into a virtualenv. If you try to build a module and the build process complains about “missing Python.h”, you may need to install the pypy-dev package.

    -
    -

    Installing NumPy

    -

    There are two different versions of NumPy for PyPy. For details see this -FAQ question.

    -
    -

    1. Standard NumPy

    -

    Installation works on any recent PyPy (the release above is fine). -For example, without using a virtualenv:

    -
    -$ ./pypy-xxx/bin/pypy -m ensurepip
    -$ ./pypy-xxx/bin/pip install cython numpy
    -
    -

    (See the general installation documentation for more.)

    -
    -
    -

    2. NumPyPy

    -

    The “numpy” module can also be installed from our own repository rather -than from the official source. This version uses our -built-in _numpypy multiarray replacement module, written in RPython. -This module is not complete, but if it works it should give correct answers. -Its performance is hard to predict exactly. For regular NumPy -source code that handles large arrays, it is likely to be slower than -the standard NumPy. It is faster on pure python code that loop over ndarrays -doing things on an element-by-element basis.

    -

    Installation (see the installation documentation for installing pip):

    -
    -pypy -m pip install git+https://bitbucket.org/pypy/numpy.git
    -
    -

    Alternatively, the direct way:

    -
    -git clone https://bitbucket.org/pypy/numpy.git
    -cd numpy
    -pypy setup.py install
    -
    -

    If you installed to a system directory, you need to also run this once:

    -
    -sudo pypy -c 'import numpy'
    -
    -

    Note again that this version is incomplete: many things do -not work and those that do may not be any faster than NumPy on CPython. -For further instructions see the pypy/numpy repository and the -FAQ question about the difference between the two.

    -
    -

    Building from source

    (see more build instructions)

    diff --git a/performance.html b/performance.html --- a/performance.html +++ b/performance.html @@ -245,7 +245,9 @@

    the JIT cannot optimize out intermediate copies. This code is actually quadratic in the total size of the mylist strings due to -repeated string copies of ever-larger prefix segments.

    +repeated string copies of ever-larger prefix segments. (Such code +is always fine for bytearrays, because in this case += is an +in-place operation.)

    This:

     parts = []
    diff --git a/source/download.txt b/source/download.txt
    --- a/source/download.txt
    +++ b/source/download.txt
    @@ -35,7 +35,6 @@
     
      * `Installing`_ (optional)
      * `Installing more modules`_
    - * `Installing NumPy`_ (optional)
      * `Building from source`_
      * `Packaging`_
      * `Checksums`_
    @@ -250,63 +249,6 @@
     
     .. _installation documentation: http://doc.pypy.org/en/latest/install.html
     
    -
    -
    -Installing NumPy
    --------------------------------
    -
    -**There are two different versions of NumPy for PyPy.** For details see this
    -`FAQ question`_.
    -
    -1. Standard NumPy
    -+++++++++++++++++
    -
    -Installation works on any recent PyPy (the release_ above is fine).
    -For example, without using a virtualenv::
    -
    -    $ ./pypy-xxx/bin/pypy -m ensurepip
    -    $ ./pypy-xxx/bin/pip install cython numpy
    -
    -(See the general `installation documentation`_ for more.)
    -
    -
    -2. NumPyPy
    -++++++++++
    -
    -The "numpy" module can also be installed from `our own repository`__ rather
    -than from the official source.  This version uses our
    -built-in ``_numpypy`` multiarray replacement module, written in RPython.
    -This module is not complete, but if it works it should give correct answers.
    -Its performance is hard to predict exactly.  For regular NumPy
    -source code that handles large arrays, it is likely to be slower than
    -the standard NumPy.  It is faster on pure python code that loop over ndarrays
    -doing things on an element-by-element basis.
    -
    -.. __: https://bitbucket.org/pypy/numpy
    -
    -Installation (see the `installation documentation`_ for installing ``pip``)::
    -
    -    pypy -m pip install git+https://bitbucket.org/pypy/numpy.git
    -
    -Alternatively, the direct way::
    -
    -    git clone https://bitbucket.org/pypy/numpy.git
    -    cd numpy
    -    pypy setup.py install
    -
    -If you installed to a system directory, you need to also run this once::
    -
    -    sudo pypy -c 'import numpy'
    -
    -Note again that this version is incomplete: many things do
    -not work and those that do may not be any faster than NumPy on CPython.
    -For further instructions see `the pypy/numpy repository`__ and the
    -`FAQ question`_ about the difference between the two.
    -
    -.. __: https://bitbucket.org/pypy/numpy
    -.. _`FAQ question`: http://doc.pypy.org/en/latest/faq.html#should-i-install-numpy-or-numpypy
    -
    -
     .. _translate:
     
     Building from source
    
    From pypy.commits at gmail.com  Sun Aug 25 03:40:31 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Sun, 25 Aug 2019 00:40:31 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: cpython3 compatibility for raising when
     calling methods on abstract classes
    Message-ID: <5d623b6f.1c69fb81.4a738.b423@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97255:544f648a1d31
    Date: 2019-08-25 10:36 +0300
    http://bitbucket.org/pypy/pypy/changeset/544f648a1d31/
    
    Log:	cpython3 compatibility for raising when calling methods on abstract
    	classes
    
    diff --git a/lib_pypy/_ctypes/array.py b/lib_pypy/_ctypes/array.py
    --- a/lib_pypy/_ctypes/array.py
    +++ b/lib_pypy/_ctypes/array.py
    @@ -108,27 +108,29 @@
             # array accepts very strange parameters as part of structure
             # or function argument...
             from ctypes import c_char, c_wchar
    -        if issubclass(self._type_, c_char):
    -            if isinstance(value, bytes):
    -                if len(value) > self._length_:
    -                    raise ValueError("Invalid length")
    -                value = self(*value)
    -            elif not isinstance(value, self):
    -                raise TypeError("expected bytes, %s found"
    -                                % (value.__class__.__name__,))
    -        elif issubclass(self._type_, c_wchar):
    -            if isinstance(value, str):
    -                if len(value) > self._length_:
    -                    raise ValueError("Invalid length")
    -                value = self(*value)
    -            elif not isinstance(value, self):
    -                raise TypeError("expected unicode string, %s found"
    -                                % (value.__class__.__name__,))
    -        else:
    -            if isinstance(value, tuple):
    -                if len(value) > self._length_:
    -                    raise RuntimeError("Invalid length")
    -                value = self(*value)
    +        if isinstance(value, self):
    +            return value
    +        if hasattr(self, '_type_'):
    +            if issubclass(self._type_, c_char):
    +                if isinstance(value, bytes):
    +                    if len(value) > self._length_:
    +                        raise ValueError("Invalid length")
    +                    value = self(*value)
    +                elif not isinstance(value, self):
    +                    raise TypeError("expected bytes, %s found"
    +                                    % (value.__class__.__name__,))
    +            elif issubclass(self._type_, c_wchar):
    +                if isinstance(value, str):
    +                    if len(value) > self._length_:
    +                        raise ValueError("Invalid length")
    +                    value = self(*value)
    +                elif not isinstance(value, self):
    +                    raise TypeError("expected unicode string, %s found"
    +                                    % (value.__class__.__name__,))
    +        if isinstance(value, tuple):
    +            if len(value) > self._length_:
    +                raise RuntimeError("Invalid length")
    +            value = self(*value)
             return _CDataMeta.from_param(self, value)
     
         def _build_ffiargtype(self):
    diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py
    --- a/lib_pypy/_ctypes/basics.py
    +++ b/lib_pypy/_ctypes/basics.py
    @@ -45,6 +45,9 @@
             self.details = details
     
     class _CDataMeta(type):
    +    def _is_abstract(self):
    +        return getattr(self, '_type_', 'abstract') == 'abstract'
    +
         def from_param(self, value):
             if isinstance(value, self):
                 return value
    @@ -95,6 +98,8 @@
             return self.from_address(dll.__pypy_dll__.getaddressindll(name))
     
         def from_buffer(self, obj, offset=0):
    +        if self._is_abstract():
    +            raise TypeError('abstract class')
             size = self._sizeofinstances()
             buf = memoryview(obj)
             if buf.nbytes < offset + size:
    @@ -111,6 +116,8 @@
             return result
     
         def from_buffer_copy(self, obj, offset=0):
    +        if self._is_abstract():
    +            raise TypeError('abstract class')
             size = self._sizeofinstances()
             buf = memoryview(obj)
             if buf.nbytes < offset + size:
    diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py
    --- a/lib_pypy/_ctypes/pointer.py
    +++ b/lib_pypy/_ctypes/pointer.py
    @@ -40,14 +40,17 @@
         def from_param(self, value):
             if value is None:
                 return self(None)
    -        # If we expect POINTER(), but receive a  instance, accept
    -        # it by calling byref().
    -        if isinstance(value, self._type_):
    -            return byref(value)
    -        # Array instances are also pointers when the item types are the same.
    -        if isinstance(value, (_Pointer, Array)):
    -            if issubclass(type(value)._type_, self._type_):
    -                return value
    +        if isinstance(value, self):
    +            return value
    +        if hasattr(self, '_type_'):
    +            # If we expect POINTER(), but receive a  instance, accept
    +            # it by calling byref().
    +            if isinstance(value, self._type_):
    +                return byref(value)
    +            # Array instances are also pointers when the item types are the same.
    +            if isinstance(value, (_Pointer, Array)):
    +                if issubclass(type(value)._type_, self._type_):
    +                    return value
             return _CDataMeta.from_param(self, value)
     
         def _sizeofinstances(self):
    @@ -60,6 +63,8 @@
             return True
     
         def set_type(self, TP):
    +        if self._is_abstract():
    +            raise TypeError('abstract class')
             ffiarray = _rawffi.Array('P')
             def __init__(self, value=None):
                 if not hasattr(self, '_buffer'):
    @@ -179,6 +184,7 @@
             klass = type(_Pointer)("LP_%s" % cls,
                                    (_Pointer,),
                                    {})
    +        klass._type_ = 'P'
             _pointer_type_cache[id(klass)] = klass
             return klass
         else:
    diff --git a/lib_pypy/_ctypes/primitive.py b/lib_pypy/_ctypes/primitive.py
    --- a/lib_pypy/_ctypes/primitive.py
    +++ b/lib_pypy/_ctypes/primitive.py
    @@ -158,6 +158,8 @@
                         break
                 else:
                     raise AttributeError("cannot find _type_ attribute")
    +        if tp == 'abstract':
    +            tp = 'i'
             if (not isinstance(tp, str) or
                 not len(tp) == 1 or
                 tp not in SIMPLE_TYPE_CHARS):
    @@ -341,7 +343,8 @@
         def from_param(self, value):
             if isinstance(value, self):
                 return value
    -
    +        if self._type_ == 'abstract':
    +            raise TypeError('abstract class')
             from_param_f = FROM_PARAM_BY_TYPE.get(self._type_)
             if from_param_f:
                 res = from_param_f(self, value)
    @@ -371,7 +374,7 @@
             return self._type_ in "sPzUZXO"
     
     class _SimpleCData(_CData, metaclass=SimpleType):
    -    _type_ = 'i'
    +    _type_ = 'abstract'
     
         def __init__(self, value=DEFAULT_VALUE):
             if not hasattr(self, '_buffer'):
    diff --git a/lib_pypy/_ctypes/structure.py b/lib_pypy/_ctypes/structure.py
    --- a/lib_pypy/_ctypes/structure.py
    +++ b/lib_pypy/_ctypes/structure.py
    @@ -119,6 +119,8 @@
             if self.is_bitfield:
                 # bitfield member, use direct access
                 return obj._buffer.__getattr__(self.name)
    +        elif not isinstance(obj, _CData):
    +            raise(TypeError, 'not a ctype instance') 
             else:
                 fieldtype = self.ctype
                 offset = self.num
    @@ -142,6 +144,8 @@
                 from ctypes import memmove
                 dest = obj._buffer.fieldaddress(self.name)
                 memmove(dest, arg, fieldtype._fficompositesize_)
    +        elif not isinstance(obj, _CData):
    +            raise(TypeError, 'not a ctype instance') 
             else:
                 obj._buffer.__setattr__(self.name, arg)
     
    @@ -209,6 +213,9 @@
     
         __setattr__ = struct_setattr
     
    +    def _is_abstract(self):
    +        return False
    +
         def from_address(self, address):
             instance = StructOrUnion.__new__(self)
             if isinstance(address, _rawffi.StructureInstance):
    
    From pypy.commits at gmail.com  Sun Aug 25 13:11:12 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Sun, 25 Aug 2019 10:11:12 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: fix unicode.encode('hex'),
     bytes.decode('hex') to raise LookupError
    Message-ID: <5d62c130.1c69fb81.1dc94.ec78@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97256:064c9ee805b4
    Date: 2019-08-25 20:10 +0300
    http://bitbucket.org/pypy/pypy/changeset/064c9ee805b4/
    
    Log:	fix unicode.encode('hex'), bytes.decode('hex') to raise LookupError
    
    diff --git a/pypy/module/_codecs/interp_codecs.py b/pypy/module/_codecs/interp_codecs.py
    --- a/pypy/module/_codecs/interp_codecs.py
    +++ b/pypy/module/_codecs/interp_codecs.py
    @@ -618,10 +618,10 @@
     # ____________________________________________________________
     # Helpers for unicode.encode() and bytes.decode()
     def lookup_text_codec(space, action, encoding):
    -    codec_info = lookup_codec(space, encoding)
    +    w_codec_info = lookup_codec(space, encoding)
         try:
             is_text_encoding = space.is_true(
    -                space.getattr(codec_info, space.newtext('_is_text_encoding')))
    +                space.getattr(w_codec_info, space.newtext('_is_text_encoding')))
         except OperationError as e:
             if e.match(space, space.w_AttributeError):
                 is_text_encoding = True
    @@ -630,8 +630,8 @@
         if not is_text_encoding:
             raise oefmt(space.w_LookupError,
                         "'%s' is not a text encoding; "
    -                    "use %s to handle arbitrary codecs", encoding, action)
    -    return codec_info
    +                    "use codecs.%s() to handle arbitrary codecs", encoding, action)
    +    return w_codec_info
     
     # ____________________________________________________________
     
    diff --git a/pypy/objspace/std/test/test_unicodeobject.py b/pypy/objspace/std/test/test_unicodeobject.py
    --- a/pypy/objspace/std/test/test_unicodeobject.py
    +++ b/pypy/objspace/std/test/test_unicodeobject.py
    @@ -780,6 +780,11 @@
             raises(UnicodeError, b"\xc2".decode, "utf-8")
             assert b'\xe1\x80'.decode('utf-8', 'replace') == "\ufffd"
     
    +    def test_invalid_lookup(self):
    +
    +        raises(LookupError, u"abcd".encode, "hex")
    +        raises(LookupError, b"abcd".decode, "hex")
    +
         def test_repr_printable(self):
             # PEP 3138: __repr__ respects printable characters.
             x = '\u027d'
    diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
    --- a/pypy/objspace/std/unicodeobject.py
    +++ b/pypy/objspace/std/unicodeobject.py
    @@ -1236,7 +1236,7 @@
         return encoding, errors
     
     def encode_object(space, w_obj, encoding, errors):
    -    from pypy.module._codecs.interp_codecs import encode
    +    from pypy.module._codecs.interp_codecs import _call_codec, lookup_text_codec
         if errors is None or errors == 'strict':
             # fast paths
             utf8 = space.utf8_w(w_obj)
    @@ -1258,7 +1258,11 @@
                         a.pos, a.pos + 1)
                     assert False, "always raises"
                 return space.newbytes(utf8)
    -    w_retval = encode(space, w_obj, encoding, errors)
    +    if encoding is None:
    +        encoding = space.sys.defaultencoding
    +    w_codec_info = lookup_text_codec(space, 'encode', encoding)
    +    w_encfunc = space.getitem(w_codec_info, space.newint(0))
    +    w_retval = _call_codec(space, w_encfunc, w_obj, "encoding", encoding, errors)
         if not space.isinstance_w(w_retval, space.w_bytes):
             raise oefmt(space.w_TypeError,
                         "'%s' encoder returned '%T' instead of 'bytes'; "
    @@ -1269,6 +1273,7 @@
     
     
     def decode_object(space, w_obj, encoding, errors=None):
    +    from pypy.module._codecs.interp_codecs import _call_codec, lookup_text_codec
         if errors == 'strict' or errors is None:
             # fast paths
             if encoding == 'ascii':
    @@ -1279,8 +1284,11 @@
                 s = space.charbuf_w(w_obj)
                 lgt = unicodehelper.check_utf8_or_raise(space, s)
                 return space.newutf8(s, lgt)
    -    from pypy.module._codecs.interp_codecs import decode
    -    w_retval = decode(space, w_obj, encoding, errors)
    +    if encoding is None:
    +        encoding = space.sys.defaultencoding
    +    w_codec_info = lookup_text_codec(space, 'decode', encoding)
    +    w_encfunc = space.getitem(w_codec_info, space.newint(1))
    +    w_retval = _call_codec(space, w_encfunc, w_obj, "decoding", encoding, errors)
         if not isinstance(w_retval, W_UnicodeObject):
             raise oefmt(space.w_TypeError,
                         "'%s' decoder returned '%T' instead of 'str'; "
    
    From pypy.commits at gmail.com  Sun Aug 25 13:28:11 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Sun, 25 Aug 2019 10:28:11 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: cpython compatibility
    Message-ID: <5d62c52b.1c69fb81.aa7dc.4f8c@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97257:188c336c0f74
    Date: 2019-08-25 20:24 +0300
    http://bitbucket.org/pypy/pypy/changeset/188c336c0f74/
    
    Log:	cpython compatibility
    
    diff --git a/pypy/module/select/interp_epoll.py b/pypy/module/select/interp_epoll.py
    --- a/pypy/module/select/interp_epoll.py
    +++ b/pypy/module/select/interp_epoll.py
    @@ -87,10 +87,12 @@
             self.register_finalizer(space)
     
         @unwrap_spec(sizehint=int, flags=int)
    -    def descr__new__(space, w_subtype, sizehint=0, flags=0):
    -        if sizehint < 0:     # 'sizehint' is otherwise ignored
    +    def descr__new__(space, w_subtype, sizehint=-1, flags=0):
    +        if sizehint == -1:
    +            sizehint = FD_SETSIZE - 1
    +        elif sizehint <= 0:     # 'sizehint' is otherwise ignored
                 raise oefmt(space.w_ValueError,
    -                        "sizehint must be greater than zero, got %d", sizehint)
    +                        "sizehint must be positive or -1")
             epfd = epoll_create1(flags | EPOLL_CLOEXEC)
             if epfd < 0:
                 raise exception_from_saved_errno(space, space.w_IOError)
    
    From pypy.commits at gmail.com  Sun Aug 25 17:48:59 2019
    From: pypy.commits at gmail.com (cfbolz)
    Date: Sun, 25 Aug 2019 14:48:59 -0700 (PDT)
    Subject: [pypy-commit] pypy default: optimization for ascii case in
     unicode.(r)find and .(r)index:
    Message-ID: <5d63024b.1c69fb81.f300f.827a@mx.google.com>
    
    Author: Carl Friedrich Bolz-Tereick 
    Branch: 
    Changeset: r97258:e2fdef728670
    Date: 2019-08-25 23:42 +0200
    http://bitbucket.org/pypy/pypy/changeset/e2fdef728670/
    
    Log:	optimization for ascii case in unicode.(r)find and .(r)index:
    
    	no need to convert the result index back from bytes to codepoints if
    	the string is ascii.
    
    diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
    --- a/pypy/objspace/std/unicodeobject.py
    +++ b/pypy/objspace/std/unicodeobject.py
    @@ -886,6 +886,11 @@
             return rutf8.codepoint_position_at_index(
                 self._utf8, self._get_index_storage(), index)
     
    +    def _codepoints_in_utf8(self, start, end):
    +        if self.is_ascii():
    +            return end - start
    +        return rutf8.codepoints_in_utf8(self._utf8, start, end)
    +
         @always_inline
         def _unwrap_and_search(self, space, w_sub, w_start, w_end, forward=True):
             w_sub = self.convert_arg_to_w_unicode(space, w_sub)
    @@ -907,7 +912,7 @@
                 res_index = self._utf8.find(w_sub._utf8, start_index, end_index)
                 if res_index < 0:
                     return None
    -            skip = rutf8.codepoints_in_utf8(self._utf8, start_index, res_index)
    +            skip = self._codepoints_in_utf8(start_index, res_index)
                 res = start + skip
                 assert res >= 0
                 return space.newint(res)
    @@ -915,7 +920,7 @@
                 res_index = self._utf8.rfind(w_sub._utf8, start_index, end_index)
                 if res_index < 0:
                     return None
    -            skip = rutf8.codepoints_in_utf8(self._utf8, res_index, end_index)
    +            skip = self._codepoints_in_utf8(res_index, end_index)
                 res = end - skip
                 assert res >= 0
                 return space.newint(res)
    
    From pypy.commits at gmail.com  Mon Aug 26 04:52:31 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 01:52:31 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: in-progress: enforce a review on
     functions manipulating directly
    Message-ID: <5d639dcf.1c69fb81.2ea77.8402@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97259:906b820ecdac
    Date: 2019-08-26 10:51 +0200
    http://bitbucket.org/pypy/pypy/changeset/906b820ecdac/
    
    Log:	in-progress: enforce a review on functions manipulating directly
    	low-level pointers or doing any other strange things
    
    diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py
    --- a/rpython/rlib/objectmodel.py
    +++ b/rpython/rlib/objectmodel.py
    @@ -226,6 +226,38 @@
         func._not_rpython_ = True
         return func
     
    +def sandbox_review(reviewed=False, check_caller=False, abort=False):
    +    """Mark a function as reviewed for sandboxing purposes.
    +    This should not be necessary on any function written in "normal" RPython
    +    code, but only on functions using some lloperation that is not
    +    whitelisted in rpython.translator.sandbox.graphchecker.
    +
    +    Call this with one of the three flags set to True:
    +
    +      *reviewed*: This function is fine and any other code can call it.
    +      If the function contains external calls, they will still be replaced with
    +      stubs using I/O to communicate with the parent process (as long as they
    +      are not marked sandboxsafe themselves).
    +
    +      *check_caller*: This function is fine, but you should still check the
    +      callers; they must all have a sandbox_review() as well.
    +
    +      *abort*: An abort is prepended to the function's code, making the
    +      whole process abort if it is called at runtime.
    +
    +    """
    +    assert reviewed + check_caller + abort == 1
    +    def wrap(func):
    +        assert not hasattr(func, '_sandbox_review_')
    +        if reviewed:
    +            func._sandbox_review_ = 'reviewed'
    +        if check_caller:
    +            func._sandbox_review_ = 'check_caller'
    +        if abort:
    +            func._sandbox_review_ = 'abort'
    +        return func
    +    return wrap
    +
     
     # ____________________________________________________________
     
    diff --git a/rpython/rlib/rstack.py b/rpython/rlib/rstack.py
    --- a/rpython/rlib/rstack.py
    +++ b/rpython/rlib/rstack.py
    @@ -6,6 +6,7 @@
     import py
     
     from rpython.rlib.objectmodel import we_are_translated, fetch_translated_config
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib.rarithmetic import r_uint
     from rpython.rlib import rgc
     from rpython.rtyper.lltypesystem import lltype, rffi
    @@ -39,6 +40,7 @@
     _stack_criticalcode_stop = llexternal('LL_stack_criticalcode_stop', [],
                                           lltype.Void, lambda: None)
     
    + at sandbox_review(reviewed=True)
     def stack_check():
         if not we_are_translated():
             return
    @@ -64,6 +66,7 @@
     stack_check._always_inline_ = True
     stack_check._dont_insert_stackcheck_ = True
     
    + at sandbox_review(check_caller=True)
     @rgc.no_collect
     def stack_check_slowpath(current):
         if ord(_stack_too_big_slowpath(current)):
    @@ -72,6 +75,7 @@
     stack_check_slowpath._dont_inline_ = True
     stack_check_slowpath._dont_insert_stackcheck_ = True
     
    + at sandbox_review(reviewed=True)
     def stack_almost_full():
         """Return True if the stack is more than 15/16th full."""
         if not we_are_translated():
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -9,6 +9,7 @@
     from rpython.tool.sourcetools import func_with_new_name
     from rpython.rlib.objectmodel import Symbolic, specialize, not_rpython
     from rpython.rlib.objectmodel import keepalive_until_here, enforceargs
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib import rarithmetic, rgc
     from rpython.rtyper.extregistry import ExtRegistryEntry
     from rpython.rlib.unroll import unrolling_iterable
    @@ -214,6 +215,8 @@
             #
             call_external_function = func_with_new_name(call_external_function,
                                                         'ccall_' + name)
    +        call_external_function = sandbox_review(check_caller=True)(
    +            call_external_function)
             # don't inline, as a hack to guarantee that no GC pointer is alive
             # anywhere in call_external_function
         else:
    @@ -251,6 +254,8 @@
                                                             'ccall_' + name)
                 call_external_function = jit.dont_look_inside(
                     call_external_function)
    +            call_external_function = sandbox_review(check_caller=True)(
    +                call_external_function)
     
         def _oops():
             raise AssertionError("can't pass (any more) a unicode string"
    @@ -329,8 +334,12 @@
         # for debugging, stick ll func ptr to that
         wrapper._ptr = funcptr
         wrapper = func_with_new_name(wrapper, name)
    +    wrapper = sandbox_review(reviewed=True)(wrapper)
         return wrapper
     
    +def sandbox_check_type(TYPE):
    +    return not isinstance(TYPE, lltype.Primitive) or TYPE == llmemory.Address
    +
     
     class CallbackHolder:
         def __init__(self):
    @@ -838,6 +847,7 @@
     
         # str -> (buf, llobj, flag)
         # Can't inline this because of the raw address manipulation.
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def get_nonmovingbuffer_ll(data):
             """
    @@ -891,6 +901,7 @@
         get_nonmovingbuffer_ll._annenforceargs_ = [strtype]
     
     
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def get_nonmovingbuffer_ll_final_null(data):
             tup = get_nonmovingbuffer_ll(data)
    @@ -902,6 +913,7 @@
     
         # args-from-tuple-returned-by-get_nonmoving_buffer() -> None
         # Can't inline this because of the raw address manipulation.
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def free_nonmovingbuffer_ll(buf, llobj, flag):
             """
    diff --git a/rpython/rtyper/lltypesystem/rstr.py b/rpython/rtyper/lltypesystem/rstr.py
    --- a/rpython/rtyper/lltypesystem/rstr.py
    +++ b/rpython/rtyper/lltypesystem/rstr.py
    @@ -3,7 +3,8 @@
     from rpython.annotator import model as annmodel
     from rpython.rlib import jit, types, objectmodel, rgc
     from rpython.rlib.objectmodel import (malloc_zero_filled, we_are_translated,
    -    ll_hash_string, keepalive_until_here, specialize, enforceargs, dont_inline)
    +    ll_hash_string, keepalive_until_here, specialize, enforceargs, dont_inline,
    +    sandbox_review)
     from rpython.rlib.signature import signature
     from rpython.rlib.rarithmetic import ovfcheck
     from rpython.rtyper.error import TyperError
    @@ -59,6 +60,7 @@
                     llmemory.itemoffsetof(TP.chars, 0) +
                     llmemory.sizeof(CHAR_TP) * item)
     
    +    @sandbox_review(check_caller=True)
         @signature(types.any(), types.any(), types.int(), returns=types.any())
         @specialize.arg(0)
         def _get_raw_buf(TP, src, ofs):
    @@ -75,6 +77,7 @@
         _get_raw_buf._always_inline_ = True
     
         @jit.oopspec('stroruni.copy_contents(src, dst, srcstart, dststart, length)')
    +    @sandbox_review(reviewed=True)
         @signature(types.any(), types.any(), types.int(), types.int(), types.int(), returns=types.none())
         def copy_string_contents(src, dst, srcstart, dststart, length):
             """Copies 'length' characters from the 'src' string to the 'dst'
    @@ -112,6 +115,7 @@
         copy_string_contents = func_with_new_name(copy_string_contents,
                                                   'copy_%s_contents' % name)
     
    +    @sandbox_review(reviewed=True)
         @jit.oopspec('stroruni.copy_string_to_raw(src, ptrdst, srcstart, length)')
         def copy_string_to_raw(src, ptrdst, srcstart, length):
             """
    @@ -141,6 +145,7 @@
         copy_string_to_raw._always_inline_ = True
         copy_string_to_raw = func_with_new_name(copy_string_to_raw, 'copy_%s_to_raw' % name)
     
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         @signature(types.any(), types.any(), types.int(), types.int(),
                    returns=types.none())
    @@ -1258,6 +1263,7 @@
             return hop.gendirectcall(cls.ll_join_strs, size, vtemp)
     
         @staticmethod
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def ll_string2list(RESLIST, src):
             length = len(src.chars)
    diff --git a/rpython/translator/backendopt/all.py b/rpython/translator/backendopt/all.py
    --- a/rpython/translator/backendopt/all.py
    +++ b/rpython/translator/backendopt/all.py
    @@ -113,7 +113,7 @@
         if config.profile_based_inline and not secondary:
             threshold = config.profile_based_inline_threshold
             heuristic = get_function(config.profile_based_inline_heuristic)
    -        inline.instrument_inline_candidates(graphs, threshold)
    +        inline.instrument_inline_candidates(translator, graphs, threshold)
             counters = translator.driver_instrument_result(
                 config.profile_based_inline)
             n = len(counters)
    diff --git a/rpython/translator/backendopt/inline.py b/rpython/translator/backendopt/inline.py
    --- a/rpython/translator/backendopt/inline.py
    +++ b/rpython/translator/backendopt/inline.py
    @@ -548,7 +548,8 @@
         return (0.9999 * measure_median_execution_cost(graph) +
                 count), True       # may be NaN
     
    -def inlinable_static_callers(graphs, store_calls=False, ok_to_call=None):
    +def inlinable_static_callers(translator, graphs, store_calls=False,
    +                             ok_to_call=None):
         if ok_to_call is None:
             ok_to_call = set(graphs)
         result = []
    @@ -558,6 +559,7 @@
             else:
                 result.append((parentgraph, graph))
         #
    +    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 for op in block.operations:
    @@ -565,13 +567,12 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None and graph in ok_to_call:
    -                        if getattr(getattr(funcobj, '_callable', None),
    -                                   '_dont_inline_', False):
    +                        if dont_inline(funcobj):
                                 continue
                             add(parentgraph, block, op, graph)
         return result
     
    -def instrument_inline_candidates(graphs, threshold):
    +def instrument_inline_candidates(translator, graphs, threshold):
         cache = {None: False}
         def candidate(graph):
             try:
    @@ -581,6 +582,7 @@
                 cache[graph] = res
                 return res
         n = 0
    +    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 ops = block.operations
    @@ -592,8 +594,7 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None:
    -                        if getattr(getattr(funcobj, '_callable', None),
    -                                   '_dont_inline_', False):
    +                        if dont_inline(funcobj):
                                 continue
                         if candidate(graph):
                             tag = Constant('inline', Void)
    @@ -610,6 +611,18 @@
         return (hasattr(graph, 'func') and
                 getattr(graph.func, '_always_inline_', None))
     
    +def make_dont_inline_checker(translator):
    +    sandbox = translator.config.translation.sandbox
    +
    +    def dont_inline(funcobj):
    +        func = getattr(funcobj, '_callable', None)
    +        if sandbox:
    +            review = getattr(func, '_sandbox_review_', None)
    +            if review is not None and review != 'check_caller':
    +                return True
    +        return getattr(func, '_dont_inline_', False)
    +    return dont_inline
    +
     def auto_inlining(translator, threshold=None,
                       callgraph=None,
                       call_count_pred=None,
    @@ -621,7 +634,7 @@
         callers = {}     # {graph: {graphs-that-call-it}}
         callees = {}     # {graph: {graphs-that-it-calls}}
         if callgraph is None:
    -        callgraph = inlinable_static_callers(translator.graphs)
    +        callgraph = inlinable_static_callers(translator, translator.graphs)
         for graph1, graph2 in callgraph:
             callers.setdefault(graph2, {})[graph1] = True
             callees.setdefault(graph1, {})[graph2] = True
    @@ -727,7 +740,8 @@
                                     if not hasattr(graph, 'exceptiontransformed')])
         else:
             ok_to_call = None
    -    callgraph = inlinable_static_callers(graphs, ok_to_call=ok_to_call)
    +    callgraph = inlinable_static_callers(translator, graphs,
    +                                         ok_to_call=ok_to_call)
         count = auto_inlining(translator, threshold, callgraph=callgraph,
                               heuristic=heuristic,
                               call_count_pred=call_count_pred)
    diff --git a/rpython/translator/backendopt/test/test_inline.py b/rpython/translator/backendopt/test/test_inline.py
    --- a/rpython/translator/backendopt/test/test_inline.py
    +++ b/rpython/translator/backendopt/test/test_inline.py
    @@ -100,7 +100,7 @@
             call_count_pred = None
             if call_count_check:
                 call_count_pred = lambda lbl: True
    -            instrument_inline_candidates(t.graphs, threshold)
    +            instrument_inline_candidates(t, t.graphs, threshold)
     
             if remove_same_as:
                 for graph in t.graphs:
    diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py
    --- a/rpython/translator/c/genc.py
    +++ b/rpython/translator/c/genc.py
    @@ -65,6 +65,11 @@
     
         def __init__(self, translator, entrypoint, config, gcpolicy=None,
                      gchooks=None, secondary_entrypoints=()):
    +        #
    +        if config.translation.sandbox:
    +            assert not config.translation.thread
    +            gchooks = None     # no custom gc hooks
    +        #
             self.translator = translator
             self.entrypoint = entrypoint
             self.entrypoint_name = getattr(self.entrypoint, 'func_name', None)
    diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py
    --- a/rpython/translator/driver.py
    +++ b/rpython/translator/driver.py
    @@ -412,6 +412,10 @@
             if translator.annotator is not None:
                 translator.frozen = True
     
    +        if self.config.translation.sandbox:
    +            from rpython.translator.sandbox import graphchecker
    +            graphchecker.check_all_graphs(self.translator)
    +
             standalone = self.standalone
             get_gchooks = self.extra.get('get_gchooks', lambda: None)
             gchooks = get_gchooks()
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    new file mode 100644
    --- /dev/null
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -0,0 +1,112 @@
    +"""Logic to check the operations in all the user graphs.
    +This runs at the start of the database-c step, so it excludes the
    +graphs produced later, notably for the GC.  These are "low-level"
    +graphs that are assumed to be safe.
    +"""
    +
    +from rpython.flowspace.model import SpaceOperation, Constant
    +from rpython.rtyper.rmodel import inputconst
    +from rpython.rtyper.lltypesystem import lltype, llmemory, rstr
    +from rpython.rtyper.lltypesystem.rffi import sandbox_check_type
    +from rpython.rtyper.lltypesystem.lloperation import LL_OPERATIONS
    +from rpython.translator.unsimplify import varoftype
    +from rpython.tool.ansi_print import AnsiLogger
    +
    +class UnsafeException(Exception):
    +    pass
    +
    +log = AnsiLogger("sandbox")
    +
    +safe_operations = set([
    +    'keepalive', 'threadlocalref_get', 'threadlocalref_store',
    +    'malloc', 'malloc_varsize', 'free',
    +    'getfield', 'getarrayitem', 'getinteriorfield',
    +    'gc_thread_run',
    +    ])
    +gc_set_operations = set([
    +    'setfield', 'setarrayitem', 'setinteriorfield',
    +    ])
    +for opname, opdesc in LL_OPERATIONS.items():
    +    if opdesc.tryfold:
    +        safe_operations.add(opname)
    +
    +def graph_review(graph):
    +    return getattr(getattr(graph, 'func', None), '_sandbox_review_', None)
    +
    +def make_abort_graph(graph):
    +    ll_err = rstr.conststr("reached forbidden function %r" % (graph.name,))
    +    c_err = inputconst(lltype.typeOf(ll_err), ll_err)
    +    op = SpaceOperation('debug_fatalerror', [c_err], varoftype(lltype.Void))
    +    graph.startblock.operations.insert(0, op)
    +
    +
    +
    +class GraphChecker(object):
    +
    +    def __init__(self, translator):
    +        self.translator = translator
    +
    +    def graph_is_unsafe(self, graph):
    +        for block, op in graph.iterblockops():
    +            opname = op.opname
    +
    +            if opname in safe_operations:
    +                pass
    +
    +            elif opname in gc_set_operations:
    +                if op.args[0].concretetype.TO._gckind != 'gc':
    +                    return "non-GC memory write: %r" % (op,)
    +
    +            elif opname == 'direct_call':
    +                c_target = op.args[0]
    +                assert isinstance(c_target, Constant)
    +                TYPE = lltype.typeOf(c_target.value)
    +                assert isinstance(TYPE.TO, lltype.FuncType)
    +                obj = c_target.value._obj
    +                if hasattr(obj, 'graph'):
    +                    g2 = obj.graph
    +                    if graph_review(g2) == 'check_caller':
    +                        return "caller has not been checked: %r" % (op,)
    +                elif getattr(obj, 'sandboxsafe', False):
    +                    pass
    +                elif getattr(obj, 'external', None) is not None:
    +                    # either obj._safe_not_sandboxed is True, and then it's
    +                    # fine; or obj._safe_not_sandboxed is False, and then
    +                    # this will be transformed into a stdin/stdout stub
    +                    pass
    +                else:
    +                    return "direct_call to %r" % (obj,)
    +
    +            elif opname == 'force_cast':
    +                if sandbox_check_type(op.result.concretetype):
    +                    return "force_cast to pointer type: %r" % (op,)
    +                if sandbox_check_type(op.args[0].concretetype):
    +                    return "force_cast from pointer type: %r" % (op,)
    +            else:
    +                return "unsupported llop: %r" % (opname,)
    +
    +    def check(self):
    +        unsafe = {}
    +        for graph in self.translator.graphs:
    +            review = graph_review(graph)
    +            if review is not None:
    +                if review in ('reviewed', 'check_caller'):
    +                    continue
    +                elif review == 'abort':
    +                    make_abort_graph(graph)
    +                    continue
    +                else:
    +                    assert False, repr(review)
    +
    +            problem = self.graph_is_unsafe(graph)
    +            if problem is not None:
    +                unsafe[graph] = problem
    +        if unsafe:
    +            raise UnsafeException(
    +                '\n'.join('%r: %s' % kv for kv in unsafe.items()))
    +
    +
    +def check_all_graphs(translator):
    +    log("Checking the graphs for sandbox-unsafe operations")
    +    checker = GraphChecker(translator)
    +    checker.check()
    
    From pypy.commits at gmail.com  Mon Aug 26 05:12:13 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 02:12:13 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: in-progress
    Message-ID: <5d63a26d.1c69fb81.2ea77.8bcc@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97260:84f2711b0798
    Date: 2019-08-26 11:11 +0200
    http://bitbucket.org/pypy/pypy/changeset/84f2711b0798/
    
    Log:	in-progress
    
    diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
    --- a/rpython/memory/gc/incminimark.py
    +++ b/rpython/memory/gc/incminimark.py
    @@ -1171,6 +1171,8 @@
     
     
         def unpin(self, obj):
    +        if self.safer_variant():
    +            out_of_memory("unpin() unexpected")
             ll_assert(self._is_pinned(obj),
                 "unpin: object is already not pinned")
             #
    @@ -1181,6 +1183,8 @@
             return (self.header(obj).tid & GCFLAG_PINNED) != 0
     
         def shrink_array(self, obj, smallerlength):
    +        if self.safer_variant():    # no shrinking in the safer variant
    +            return False       # (because the original 'obj' is kind of broken)
             #
             # Only objects in the nursery can be "resized".  Resizing them
             # means recording that they have a smaller size, so that when
    diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py
    --- a/rpython/rlib/rgc.py
    +++ b/rpython/rlib/rgc.py
    @@ -6,6 +6,7 @@
     from rpython.rlib import jit
     from rpython.rlib.objectmodel import we_are_translated, enforceargs, specialize
     from rpython.rlib.objectmodel import CDefinedIntSymbolic, not_rpython
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rtyper.extregistry import ExtRegistryEntry
     from rpython.rtyper.lltypesystem import lltype, llmemory
     
    @@ -361,6 +362,7 @@
     
     @jit.oopspec('list.ll_arraycopy(source, dest, source_start, dest_start, length)')
     @enforceargs(None, None, int, int, int)
    + at sandbox_review(check_caller=True)
     @specialize.ll()
     def ll_arraycopy(source, dest, source_start, dest_start, length):
         from rpython.rtyper.lltypesystem.lloperation import llop
    @@ -415,6 +417,7 @@
     
     @jit.oopspec('rgc.ll_shrink_array(p, smallerlength)')
     @enforceargs(None, int)
    + at sandbox_review(reviewed=True)
     @specialize.ll()
     def ll_shrink_array(p, smallerlength):
         from rpython.rtyper.lltypesystem.lloperation import llop
    @@ -454,6 +457,7 @@
         return newp
     
     @jit.dont_look_inside
    + at sandbox_review(reviewed=True)
     @specialize.ll()
     def ll_arrayclear(p):
         # Equivalent to memset(array, 0).  Only for GcArray(primitive-type) for now.
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -801,6 +801,7 @@
             lastchar = u'\x00'
     
         # str -> char*
    +    @sandbox_review(reviewed=True)
         def str2charp(s, track_allocation=True):
             """ str -> char*
             """
    @@ -815,6 +816,7 @@
             return array
         str2charp._annenforceargs_ = [strtype, bool]
     
    +    @sandbox_review(reviewed=True)
         def free_charp(cp, track_allocation=True):
             if track_allocation:
                 lltype.free(cp, flavor='raw', track_allocation=True)
    @@ -930,6 +932,7 @@
     
         # int -> (char*, str, int)
         # Can't inline this because of the raw address manipulation.
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def alloc_buffer(count):
             """
    diff --git a/rpython/rtyper/lltypesystem/rlist.py b/rpython/rtyper/lltypesystem/rlist.py
    --- a/rpython/rtyper/lltypesystem/rlist.py
    +++ b/rpython/rtyper/lltypesystem/rlist.py
    @@ -10,6 +10,7 @@
         ADTIList, ADTIFixedList, dum_nocheck)
     from rpython.rtyper.rmodel import Repr, inputconst, externalvsinternal
     from rpython.tool.pairtype import pairtype, pair
    +from rpython.rlib.objectmodel import sandbox_review
     
     
     # ____________________________________________________________
    @@ -196,6 +197,7 @@
     # adapted C code
     
     @jit.look_inside_iff(lambda l, newsize, overallocate: jit.isconstant(len(l.items)) and jit.isconstant(newsize))
    + at sandbox_review(reviewed=True)
     @signature(types.any(), types.int(), types.bool(), returns=types.none())
     def _ll_list_resize_hint_really(l, newsize, overallocate):
         """
    diff --git a/rpython/rtyper/lltypesystem/rstr.py b/rpython/rtyper/lltypesystem/rstr.py
    --- a/rpython/rtyper/lltypesystem/rstr.py
    +++ b/rpython/rtyper/lltypesystem/rstr.py
    @@ -115,7 +115,7 @@
         copy_string_contents = func_with_new_name(copy_string_contents,
                                                   'copy_%s_contents' % name)
     
    -    @sandbox_review(reviewed=True)
    +    @sandbox_review(check_caller=True)
         @jit.oopspec('stroruni.copy_string_to_raw(src, ptrdst, srcstart, length)')
         def copy_string_to_raw(src, ptrdst, srcstart, length):
             """
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -7,7 +7,6 @@
     from rpython.flowspace.model import SpaceOperation, Constant
     from rpython.rtyper.rmodel import inputconst
     from rpython.rtyper.lltypesystem import lltype, llmemory, rstr
    -from rpython.rtyper.lltypesystem.rffi import sandbox_check_type
     from rpython.rtyper.lltypesystem.lloperation import LL_OPERATIONS
     from rpython.translator.unsimplify import varoftype
     from rpython.tool.ansi_print import AnsiLogger
    @@ -22,6 +21,8 @@
         'malloc', 'malloc_varsize', 'free',
         'getfield', 'getarrayitem', 'getinteriorfield',
         'gc_thread_run',
    +    'shrink_array', 'gc_pin', 'gc_unpin', 'gc_can_move',
    +    'debug_fatalerror',
         ])
     gc_set_operations = set([
         'setfield', 'setarrayitem', 'setinteriorfield',
    @@ -39,6 +40,8 @@
         op = SpaceOperation('debug_fatalerror', [c_err], varoftype(lltype.Void))
         graph.startblock.operations.insert(0, op)
     
    +def is_gc_ptr(TYPE):
    +    return isinstance(TYPE, lltype.Ptr) and TYPE.TO._gckind == 'gc'
     
     
     class GraphChecker(object):
    @@ -77,11 +80,12 @@
                     else:
                         return "direct_call to %r" % (obj,)
     
    -            elif opname == 'force_cast':
    -                if sandbox_check_type(op.result.concretetype):
    -                    return "force_cast to pointer type: %r" % (op,)
    -                if sandbox_check_type(op.args[0].concretetype):
    -                    return "force_cast from pointer type: %r" % (op,)
    +            elif opname in ('cast_ptr_to_adr', 'force_cast'):
    +                if is_gc_ptr(op.args[0].concretetype):
    +                    return "argument is a GC ptr: %r" % (opname,)
    +                if is_gc_ptr(op.result.concretetype):
    +                    return "result is a GC ptr: %r" % (opname,)
    +
                 else:
                     return "unsupported llop: %r" % (opname,)
     
    
    From pypy.commits at gmail.com  Mon Aug 26 05:32:30 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 02:32:30 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: Direct tests
    Message-ID: <5d63a72e.1c69fb81.ace47.adb2@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97261:0328e9de2ba0
    Date: 2019-08-26 11:31 +0200
    http://bitbucket.org/pypy/pypy/changeset/0328e9de2ba0/
    
    Log:	Direct tests
    
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -78,6 +78,7 @@
                         # this will be transformed into a stdin/stdout stub
                         pass
                     else:
    +                    # not 'external', but no 'graph' either?
                         return "direct_call to %r" % (obj,)
     
                 elif opname in ('cast_ptr_to_adr', 'force_cast'):
    diff --git a/rpython/translator/sandbox/test/test_graphchecker.py b/rpython/translator/sandbox/test/test_graphchecker.py
    new file mode 100644
    --- /dev/null
    +++ b/rpython/translator/sandbox/test/test_graphchecker.py
    @@ -0,0 +1,102 @@
    +from rpython.translator.translator import TranslationContext, graphof
    +from rpython.rtyper.lltypesystem import lltype, rffi
    +from rpython.rtyper.lltypesystem.lloperation import llop
    +from rpython.rlib.objectmodel import sandbox_review
    +
    +from rpython.translator.sandbox.graphchecker import GraphChecker
    +from rpython.translator.sandbox.graphchecker import make_abort_graph
    +
    +
    +class TestGraphIsUnsafe(object):
    +
    +    def graph_is_unsafe(self, fn, signature=[]):
    +        t = TranslationContext()
    +        self.t = t
    +        t.buildannotator().build_types(fn, signature)
    +        t.buildrtyper().specialize()
    +        graph = graphof(t, fn)
    +
    +        checker = GraphChecker(t)
    +        return checker.graph_is_unsafe(graph)
    +
    +    def check_safe(self, fn, signature=[]):
    +        result = self.graph_is_unsafe(fn, signature)
    +        assert result is None, repr(fn)
    +
    +    def check_unsafe(self, error_substring, fn, signature=[]):
    +        result = self.graph_is_unsafe(fn, signature)
    +        assert result is not None, repr(fn)
    +        assert error_substring in result
    +
    +    def test_simple(self):
    +        def f():
    +            pass
    +        self.check_safe(f)
    +
    +    def test_unsafe_setfield(self):
    +        S = lltype.Struct('S', ('x', lltype.Signed))
    +        s = lltype.malloc(S, flavor='raw', immortal=True)
    +        def f():
    +            s.x = 42
    +        self.check_unsafe("non-GC memory write", f)
    +
    +    def test_unsafe_operation(self):
    +        def f():
    +            llop.debug_forked(lltype.Void)
    +        self.check_unsafe("unsupported llop", f)
    +
    +    def test_force_cast(self):
    +        SRAW = lltype.Struct('SRAW', ('x', lltype.Signed))
    +        SGC = lltype.GcStruct('SGC', ('x', lltype.Signed))
    +        def f(x):
    +            return llop.force_cast(lltype.Signed, x)
    +        self.check_safe(f, [float])
    +        self.check_safe(f, [lltype.Ptr(SRAW)])
    +        self.check_unsafe("argument is a GC ptr", f, [lltype.Ptr(SGC)])
    +
    +    def test_direct_call_to_check_caller(self):
    +        @sandbox_review(check_caller=True)
    +        def g():
    +            pass
    +        def f():
    +            g()
    +        self.check_unsafe("caller has not been checked", f)
    +
    +    def test_direct_call_to_reviewed(self):
    +        @sandbox_review(reviewed=True)
    +        def g():
    +            pass
    +        def f():
    +            g()
    +        self.check_safe(f)
    +
    +    def test_direct_call_to_abort(self):
    +        @sandbox_review(abort=True)
    +        def g():
    +            pass
    +        def f():
    +            g()
    +        self.check_safe(f)
    +
    +    def test_direct_call_external(self):
    +        llfn1 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=True,
    +                                _nowrapper=True)
    +        self.check_safe(lambda: llfn1)
    +        #
    +        llfn2 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=False,
    +                                _nowrapper=True)
    +        self.check_safe(lambda: llfn2)   # will be turned into an I/O stub
    +        #
    +        llfn3 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=True)
    +        self.check_safe(lambda: llfn3)
    +        #
    +        llfn4 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=False)
    +        self.check_safe(lambda: llfn4)
    +
    +    def test_make_abort_graph(self):
    +        def dummy():
    +            pass
    +        self.check_safe(dummy)
    +        graph = graphof(self.t, dummy)
    +        make_abort_graph(graph)
    +        assert graph.startblock.operations[0].opname == 'debug_fatalerror'
    
    From pypy.commits at gmail.com  Mon Aug 26 08:11:47 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 05:11:47 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: in-progress
    Message-ID: <5d63cc83.1c69fb81.cb675.d088@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97262:9b6556323472
    Date: 2019-08-26 14:11 +0200
    http://bitbucket.org/pypy/pypy/changeset/9b6556323472/
    
    Log:	in-progress
    
    diff --git a/pypy/module/__pypy__/interp_debug.py b/pypy/module/__pypy__/interp_debug.py
    --- a/pypy/module/__pypy__/interp_debug.py
    +++ b/pypy/module/__pypy__/interp_debug.py
    @@ -1,7 +1,13 @@
     from pypy.interpreter.gateway import unwrap_spec
     from rpython.rlib import debug, jit
     from rpython.rlib import rtimer
    +from rpython.rlib.objectmodel import sandbox_review
     
    +# In sandbox mode, the debug_start/debug_print functions are disabled,
    +# because they could allow the attacker to write arbitrary bytes to stderr
    +
    +
    + at sandbox_review(abort=True)
     @jit.dont_look_inside
     @unwrap_spec(category='text', timestamp=bool)
     def debug_start(space, category, timestamp=False):
    @@ -10,11 +16,13 @@
             return space.newint(res)
         return space.w_None
     
    + at sandbox_review(abort=True)
     @jit.dont_look_inside
     def debug_print(space, args_w):
         parts = [space.text_w(space.str(w_item)) for w_item in args_w]
         debug.debug_print(' '.join(parts))
     
    + at sandbox_review(abort=True)
     @jit.dont_look_inside
     @unwrap_spec(category='text', timestamp=bool)
     def debug_stop(space, category, timestamp=False):
    @@ -23,6 +31,7 @@
             return space.newint(res)
         return space.w_None
     
    + at sandbox_review(abort=True)
     @unwrap_spec(category='text')
     def debug_print_once(space, category, args_w):
         debug_start(space, category)
    @@ -34,9 +43,16 @@
     def debug_flush(space):
         debug.debug_flush()
     
    +
    +# In sandbox mode, these two helpers are disabled because they give unlimited
    +# access to the real time (if you enable them, note that they use lloperations
    +# that must also be white-listed in graphchecker.py)
    +
    + at sandbox_review(abort=True)
     def debug_read_timestamp(space):
         return space.newint(rtimer.read_timestamp())
     
    + at sandbox_review(abort=True)
     def debug_get_timestamp_unit(space):
         unit = rtimer.get_timestamp_unit()
         if unit == rtimer.UNIT_TSC:
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -1,6 +1,6 @@
     from rpython.rlib import jit, rgc, rutf8
     from rpython.rlib.buffer import RawBuffer
    -from rpython.rlib.objectmodel import keepalive_until_here
    +from rpython.rlib.objectmodel import keepalive_until_here, sandbox_review
     from rpython.rlib.rarithmetic import ovfcheck, widen, r_uint
     from rpython.rlib.unroll import unrolling_iterable
     from rpython.rtyper.annlowlevel import llstr
    @@ -159,6 +159,7 @@
             if self._buffer:
                 lltype.free(self._buffer, flavor='raw')
     
    +    @sandbox_review(reviewed=True)
         def setlen(self, size, zero=False, overallocate=True):
             if self._buffer:
                 delta_memory_pressure = -self.allocated * self.itemsize
    @@ -233,6 +234,7 @@
         def _charbuf_stop(self):
             keepalive_until_here(self)
     
    +    @sandbox_review(reviewed=True)
         def delitem(self, space, i, j):
             if i < 0:
                 i += self.len
    @@ -378,6 +380,7 @@
             self._charbuf_stop()
             return self.space.newbytes(s)
     
    +    @sandbox_review(reviewed=True)
         def descr_fromstring(self, space, w_s):
             """ fromstring(string)
     
    @@ -496,6 +499,7 @@
                 w_dict = space.w_None
             return space.newtuple([space.type(self), space.newtuple(args), w_dict])
     
    +    @sandbox_review(reviewed=True)
         def descr_copy(self, space):
             """ copy(array)
     
    @@ -510,6 +514,7 @@
             )
             return w_a
     
    +    @sandbox_review(reviewed=True)
         def descr_byteswap(self, space):
             """ byteswap()
     
    @@ -602,6 +607,7 @@
         def descr_iter(self, space):
             return space.newseqiter(self)
     
    +    @sandbox_review(reviewed=True)
         def descr_add(self, space, w_other):
             if (not isinstance(w_other, W_ArrayBase)
                     or w_other.typecode != self.typecode):
    @@ -625,6 +631,7 @@
             keepalive_until_here(a)
             return a
     
    +    @sandbox_review(reviewed=True)
         def descr_inplace_add(self, space, w_other):
             if (not isinstance(w_other, W_ArrayBase)
                     or w_other.typecode != self.typecode):
    @@ -643,6 +650,7 @@
             keepalive_until_here(w_other)
             return self
     
    +    @sandbox_review(reviewed=True)
         def _mul_helper(self, space, w_repeat, is_inplace):
             try:
                 repeat = space.getindex_w(w_repeat, space.w_OverflowError)
    @@ -965,6 +973,7 @@
                                              self.space.newtext(msg))
                 return result
     
    +        @sandbox_review(reviewed=True)
             def fromsequence(self, w_seq):
                 space = self.space
                 oldlen = self.len
    @@ -1013,6 +1022,7 @@
     
                 self._fromiterable(w_seq)
     
    +        @sandbox_review(reviewed=True)
             def extend(self, w_iterable, accept_different_array=False):
                 space = self.space
                 if isinstance(w_iterable, W_Array):
    @@ -1070,6 +1080,7 @@
     
             # interface
     
    +        @sandbox_review(reviewed=True)
             def descr_append(self, space, w_x):
                 x = self.item_w(w_x)
                 index = self.len
    @@ -1079,12 +1090,14 @@
     
             # List interface
     
    +        @sandbox_review(reviewed=True)
             def descr_reverse(self, space):
                 b = self.get_buffer()
                 for i in range(self.len / 2):
                     b[i], b[self.len - i - 1] = b[self.len - i - 1], b[i]
                 keepalive_until_here(self)
     
    +        @sandbox_review(reviewed=True)
             def descr_pop(self, space, i):
                 if i < 0:
                     i += self.len
    @@ -1099,6 +1112,7 @@
                 self.setlen(self.len - 1)
                 return w_val
     
    +        @sandbox_review(reviewed=True)
             def descr_insert(self, space, idx, w_val):
                 if idx < 0:
                     idx += self.len
    @@ -1117,6 +1131,7 @@
                 b[i] = val
                 keepalive_until_here(self)
     
    +        @sandbox_review(reviewed=True)
             def getitem_slice(self, space, w_idx):
                 start, stop, step, size = space.decode_index4(w_idx, self.len)
                 w_a = mytype.w_class(self.space)
    @@ -1132,6 +1147,7 @@
                 keepalive_until_here(w_a)
                 return w_a
     
    +        @sandbox_review(reviewed=True)
             def setitem(self, space, w_idx, w_item):
                 idx, stop, step = space.decode_index(w_idx, self.len)
                 if step != 0:
    @@ -1141,6 +1157,7 @@
                 self.get_buffer()[idx] = item
                 keepalive_until_here(self)
     
    +        @sandbox_review(reviewed=True)
             def setitem_slice(self, space, w_idx, w_item):
                 if not isinstance(w_item, W_Array):
                     raise oefmt(space.w_TypeError,
    @@ -1168,6 +1185,7 @@
                     keepalive_until_here(w_item)
                     keepalive_until_here(self)
     
    +        @sandbox_review(check_caller=True)
             def _repeat_single_item(self, a, start, repeat):
                 # 
                 assert isinstance(a, W_Array)
    diff --git a/pypy/module/gc/interp_gc.py b/pypy/module/gc/interp_gc.py
    --- a/pypy/module/gc/interp_gc.py
    +++ b/pypy/module/gc/interp_gc.py
    @@ -46,7 +46,8 @@
         If they were already enabled, no-op.
         If they were disabled even several times, enable them anyway.
         """
    -    rgc.enable()
    +    if not space.config.translation.sandbox:    # not available in sandbox
    +        rgc.enable()
         if not space.user_del_action.enabled_at_app_level:
             space.user_del_action.enabled_at_app_level = True
             enable_finalizers(space)
    @@ -55,7 +56,8 @@
         """Non-recursive version.  Disable major collections and finalizers.
         Multiple calls to this function are ignored.
         """
    -    rgc.disable()
    +    if not space.config.translation.sandbox:    # not available in sandbox
    +        rgc.disable()
         if space.user_del_action.enabled_at_app_level:
             space.user_del_action.enabled_at_app_level = False
             disable_finalizers(space)
    diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py
    --- a/pypy/module/time/interp_time.py
    +++ b/pypy/module/time/interp_time.py
    @@ -5,6 +5,7 @@
     from rpython.rtyper.lltypesystem import lltype
     from rpython.rlib.rarithmetic import intmask
     from rpython.rlib import rposix, rtime
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.translator.tool.cbuild import ExternalCompilationInfo
     import math
     import os
    @@ -224,6 +225,7 @@
             accept2dyear = 1
         _set_module_object(space, "accept2dyear", space.newint(accept2dyear))
     
    + at sandbox_review(reviewed=True)
     def _init_timezone(space):
         timezone = daylight = altzone = 0
         tzname = ["", ""]
    @@ -413,6 +415,7 @@
         w_time_tuple = space.newtuple(time_tuple)
         return space.call_function(w_struct_time, w_time_tuple)
     
    + at sandbox_review(reviewed=True)
     def _gettmarg(space, w_tup, allowNone=True):
         if space.is_none(w_tup):
             if not allowNone:
    @@ -507,6 +510,7 @@
     
         return space.newfloat(pytime.clock())
     
    + at sandbox_review(reviewed=True)
     def ctime(space, w_seconds=None):
         """ctime([seconds]) -> string
     
    @@ -540,6 +544,7 @@
     
         return space.newtext(rffi.charp2str(p)[:-1]) # get rid of new line
     
    + at sandbox_review(reviewed=True)
     def gmtime(space, w_seconds=None):
         """gmtime([seconds]) -> (tm_year, tm_mon, tm_day, tm_hour, tm_min,
                               tm_sec, tm_wday, tm_yday, tm_isdst)
    @@ -560,6 +565,7 @@
             raise OperationError(space.w_ValueError, space.newtext(_get_error_msg()))
         return _tm_to_tuple(space, p)
     
    + at sandbox_review(reviewed=True)
     def localtime(space, w_seconds=None):
         """localtime([seconds]) -> (tm_year, tm_mon, tm_day, tm_hour, tm_min,
                                  tm_sec, tm_wday, tm_yday, tm_isdst)
    @@ -577,6 +583,7 @@
             raise OperationError(space.w_ValueError, space.newtext(_get_error_msg()))
         return _tm_to_tuple(space, p)
     
    + at sandbox_review(reviewed=True)
     def mktime(space, w_tup):
         """mktime(tuple) -> floating point number
     
    diff --git a/rpython/memory/gc/inspector.py b/rpython/memory/gc/inspector.py
    --- a/rpython/memory/gc/inspector.py
    +++ b/rpython/memory/gc/inspector.py
    @@ -89,7 +89,7 @@
     raw_os_write = rffi.llexternal(rposix.UNDERSCORE_ON_WIN32 + 'write',
                                    [rffi.INT, llmemory.Address, rffi.SIZE_T],
                                    rffi.SIZE_T,
    -                               sandboxsafe=True, _nowrapper=True)
    +                               _nowrapper=True)
     
     AddressStack = get_address_stack()
     
    diff --git a/rpython/rlib/buffer.py b/rpython/rlib/buffer.py
    --- a/rpython/rlib/buffer.py
    +++ b/rpython/rlib/buffer.py
    @@ -7,6 +7,7 @@
     from rpython.rtyper.lltypesystem.rlist import LIST_OF
     from rpython.rtyper.annlowlevel import llstr
     from rpython.rlib.objectmodel import specialize, we_are_translated
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib import jit
     from rpython.rlib.rgc import (resizable_list_supporting_raw_ptr,
                                   nonmoving_raw_ptr_for_resizable_list,
    @@ -143,6 +144,7 @@
             ptr = self.get_raw_address()
             return llop.raw_load(TP, ptr, byte_offset)
     
    +    @sandbox_review(check_caller=True)
         @specialize.ll_and_arg(1)
         def typed_write(self, TP, byte_offset, value):
             """
    @@ -179,6 +181,7 @@
             base_ofs = targetcls._get_gc_data_offset()
             scale_factor = llmemory.sizeof(lltype.Char)
     
    +        @sandbox_review(check_caller=True)
             @specialize.ll_and_arg(1)
             def typed_read(self, TP, byte_offset):
                 if not is_alignment_correct(TP, byte_offset):
    @@ -188,6 +191,7 @@
                 return llop.gc_load_indexed(TP, lldata, byte_offset,
                                             scale_factor, base_ofs)
     
    +        @sandbox_review(check_caller=True)
             @specialize.ll_and_arg(1)
             def typed_write(self, TP, byte_offset, value):
                 if self.readonly or not is_alignment_correct(TP, byte_offset):
    @@ -362,10 +366,12 @@
             ptr = self.buffer.get_raw_address()
             return rffi.ptradd(ptr, self.offset)
     
    +    @sandbox_review(check_caller=True)
         @specialize.ll_and_arg(1)
         def typed_read(self, TP, byte_offset):
             return self.buffer.typed_read(TP, byte_offset + self.offset)
     
    +    @sandbox_review(check_caller=True)
         @specialize.ll_and_arg(1)
         def typed_write(self, TP, byte_offset, value):
             return self.buffer.typed_write(TP, byte_offset + self.offset, value)
    diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py
    --- a/rpython/rlib/objectmodel.py
    +++ b/rpython/rlib/objectmodel.py
    @@ -248,7 +248,7 @@
         """
         assert reviewed + check_caller + abort == 1
         def wrap(func):
    -        assert not hasattr(func, '_sandbox_review_')
    +        assert not hasattr(func, '_sandbox_review_') or abort
             if reviewed:
                 func._sandbox_review_ = 'reviewed'
             if check_caller:
    @@ -379,6 +379,10 @@
         # XXX this can be made more efficient in the future
         return bytearray(str(i))
     
    +def sandboxed_translation():
    +    config = fetch_translated_config()
    +    return config is not None and config.translation.sandbox
    +
     def fetch_translated_config():
         """Returns the config that is current when translating.
         Returns None if not translated.
    diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py
    --- a/rpython/rlib/rgc.py
    +++ b/rpython/rlib/rgc.py
    @@ -6,7 +6,7 @@
     from rpython.rlib import jit
     from rpython.rlib.objectmodel import we_are_translated, enforceargs, specialize
     from rpython.rlib.objectmodel import CDefinedIntSymbolic, not_rpython
    -from rpython.rlib.objectmodel import sandbox_review
    +from rpython.rlib.objectmodel import sandbox_review, sandboxed_translation
     from rpython.rtyper.extregistry import ExtRegistryEntry
     from rpython.rtyper.lltypesystem import lltype, llmemory
     
    @@ -359,15 +359,23 @@
                 return True
         return False
     
    + at not_rpython
    +def _ll_arraycopy_of_nongc_not_for_sandboxed():
    +    pass
     
     @jit.oopspec('list.ll_arraycopy(source, dest, source_start, dest_start, length)')
     @enforceargs(None, None, int, int, int)
    - at sandbox_review(check_caller=True)
    + at sandbox_review(reviewed=True)
     @specialize.ll()
     def ll_arraycopy(source, dest, source_start, dest_start, length):
         from rpython.rtyper.lltypesystem.lloperation import llop
         from rpython.rlib.objectmodel import keepalive_until_here
     
    +    TP = lltype.typeOf(source).TO
    +    assert TP == lltype.typeOf(dest).TO
    +    if not lltype_is_gc(TP) and sandboxed_translation():
    +        _ll_arraycopy_of_nongc_not_for_sandboxed()
    +
         # XXX: Hack to ensure that we get a proper effectinfo.write_descrs_arrays
         # and also, maybe, speed up very small cases
         if length <= 1:
    @@ -381,9 +389,6 @@
                 assert (source_start + length <= dest_start or
                         dest_start + length <= source_start)
     
    -    TP = lltype.typeOf(source).TO
    -    assert TP == lltype.typeOf(dest).TO
    -
         slowpath = False
         if must_split_gc_address_space():
             slowpath = True
    @@ -1094,6 +1099,7 @@
             hop.exception_cannot_occur()
             return hop.genop('gc_gcflag_extra', vlist, resulttype = hop.r_result)
     
    + at specialize.memo()
     def lltype_is_gc(TP):
         return getattr(getattr(TP, "TO", None), "_gckind", "?") == 'gc'
     
    @@ -1417,7 +1423,7 @@
         return _ResizableListSupportingRawPtr(lst)
     
     def nonmoving_raw_ptr_for_resizable_list(lst):
    -    if must_split_gc_address_space():
    +    if must_split_gc_address_space() or sandboxed_translation():
             raise ValueError
         return _nonmoving_raw_ptr_for_resizable_list(lst)
     
    @@ -1499,6 +1505,7 @@
     
     
     @jit.dont_look_inside
    + at sandbox_review(check_caller=True)
     def ll_nonmovable_raw_ptr_for_resizable_list(ll_list):
         """
         WARNING: dragons ahead.
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -10,7 +10,8 @@
         _CYGWIN, _MACRO_ON_POSIX, UNDERSCORE_ON_WIN32, _WIN32,
         _prefer_unicode, _preferred_traits, _preferred_traits2)
     from rpython.rlib.objectmodel import (
    -    specialize, enforceargs, register_replacement_for, NOT_CONSTANT)
    +    specialize, enforceargs, register_replacement_for, NOT_CONSTANT,
    +    sandbox_review)
     from rpython.rlib.rarithmetic import intmask, widen
     from rpython.rlib.signature import signature
     from rpython.tool.sourcetools import func_renamer
    @@ -988,6 +989,7 @@
                          [rffi.INTP, rffi.VOIDP, rffi.VOIDP, rffi.VOIDP],
                          rffi.PID_T, _nowrapper = True)
     
    + at sandbox_review(abort=True)
     @replace_os_function('fork')
     @jit.dont_look_inside
     def fork():
    @@ -1017,6 +1019,7 @@
             lltype.free(master_p, flavor='raw')
             lltype.free(slave_p, flavor='raw')
     
    + at sandbox_review(abort=True)
     @replace_os_function('forkpty')
     @jit.dont_look_inside
     def forkpty():
    @@ -1058,6 +1061,7 @@
                              [rffi.PID_T, rffi.INTP, rffi.INT], rffi.PID_T,
                              save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('waitpid')
     def waitpid(pid, options):
         status_p = lltype.malloc(rffi.INTP.TO, 1, flavor='raw')
    @@ -1743,6 +1747,7 @@
         finally:
             lltype.free(groups, flavor='raw')
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('setgroups')
     def setgroups(gids):
         n = len(gids)
    diff --git a/rpython/rlib/rstack.py b/rpython/rlib/rstack.py
    --- a/rpython/rlib/rstack.py
    +++ b/rpython/rlib/rstack.py
    @@ -16,7 +16,7 @@
     
     def llexternal(name, args, res, _callable=None):
         return rffi.llexternal(name, args, res,
    -                           sandboxsafe=True, _nowrapper=True,
    +                           sandboxsafe='check_caller', _nowrapper=True,
                                _callable=_callable)
     
     _stack_get_end = llexternal('LL_stack_get_end', [], lltype.Signed,
    diff --git a/rpython/rlib/rstruct/standardfmttable.py b/rpython/rlib/rstruct/standardfmttable.py
    --- a/rpython/rlib/rstruct/standardfmttable.py
    +++ b/rpython/rlib/rstruct/standardfmttable.py
    @@ -7,7 +7,7 @@
     
     import struct
     
    -from rpython.rlib.objectmodel import specialize
    +from rpython.rlib.objectmodel import specialize, sandbox_review
     from rpython.rlib.rarithmetic import r_uint, r_longlong, r_ulonglong
     from rpython.rlib.rstruct import ieee
     from rpython.rlib.rstruct.error import StructError, StructOverflowError
    @@ -30,6 +30,7 @@
         Create a fast path packer for TYPE. The packer returns True is it succeded
         or False otherwise.
         """
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(0)
         def do_pack_fastpath(fmtiter, value):
             size = rffi.sizeof(TYPE)
    @@ -39,6 +40,7 @@
                 raise CannotWrite
             #
             # typed_write() might raise CannotWrite
    +        # (note that we assume the write cannot overflow its buffer)
             fmtiter.wbuf.typed_write(TYPE, fmtiter.pos, value)
             if not ALLOW_FASTPATH:
                 # if we are here it means that typed_write did not raise, and thus
    @@ -211,6 +213,7 @@
     
     @specialize.memo()
     def unpack_fastpath(TYPE):
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(0)
         def do_unpack_fastpath(fmtiter):
             size = rffi.sizeof(TYPE)
    @@ -289,9 +292,15 @@
                 # because of alignment issues. So we copy the slice into a new
                 # string, which is guaranteed to be properly aligned, and read the
                 # float/double from there
    -            input = fmtiter.read(size)
    -            val = StringBuffer(input).typed_read(TYPE, 0)
    +            val = read_slowpath(fmtiter)
             fmtiter.appendobj(float(val))
    +
    +    @sandbox_review(reviewed=True)
    +    def read_slowpath(fmtiter):
    +        size = rffi.sizeof(TYPE)
    +        input = fmtiter.read(size)
    +        return StringBuffer(input).typed_read(TYPE, 0)
    +
         return unpack_ieee
     
     @specialize.argtype(0)
    diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py
    --- a/rpython/rlib/rthread.py
    +++ b/rpython/rlib/rthread.py
    @@ -6,6 +6,7 @@
     from rpython.rlib.debug import ll_assert
     from rpython.rlib.objectmodel import we_are_translated, specialize
     from rpython.rlib.objectmodel import CDefinedIntSymbolic, not_rpython
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rtyper.lltypesystem.lloperation import llop
     from rpython.rtyper.tool import rffi_platform
     from rpython.rtyper.extregistry import ExtRegistryEntry
    @@ -225,7 +226,7 @@
     
     get_stacksize = llexternal('RPyThreadGetStackSize', [], lltype.Signed)
     set_stacksize = llexternal('RPyThreadSetStackSize', [lltype.Signed],
    -                           lltype.Signed)
    +                           lltype.Signed, sandboxsafe='abort')
     
     # ____________________________________________________________
     #
    diff --git a/rpython/rlib/rtime.py b/rpython/rlib/rtime.py
    --- a/rpython/rlib/rtime.py
    +++ b/rpython/rlib/rtime.py
    @@ -8,7 +8,7 @@
     from rpython.translator.tool.cbuild import ExternalCompilationInfo
     from rpython.rtyper.tool import rffi_platform
     from rpython.rtyper.lltypesystem import rffi, lltype
    -from rpython.rlib.objectmodel import register_replacement_for
    +from rpython.rlib.objectmodel import register_replacement_for, sandbox_review
     from rpython.rlib.rarithmetic import intmask, UINT_MAX
     from rpython.rlib import rposix
     
    @@ -262,6 +262,7 @@
                                        lltype.Ptr(TIMEVAL)], rffi.INT,
                             save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_time_function('sleep')
     def sleep(secs):
         if _WIN32:
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -98,6 +98,12 @@
                     don't bother releasing the GIL.  An explicit True or False
                     overrides this logic.
     
    +    sandboxsafe: if True, the process really calls the C function even if it
    +                 is sandboxed.  If False, it will turn into a stdin/stdout
    +                 communication with the parent process.  If "check_caller",
    +                 it is like True but we call @sandbox_review(check_caller=True)
    +                 which means that we need to also check the callers.
    +
         calling_conv: if 'unknown' or 'win', the C function is not directly seen
                       by the JIT.  If 'c', it can be seen (depending on
                       releasegil=False).  For tests only, or if _nowrapper,
    @@ -334,7 +340,13 @@
         # for debugging, stick ll func ptr to that
         wrapper._ptr = funcptr
         wrapper = func_with_new_name(wrapper, name)
    -    wrapper = sandbox_review(reviewed=True)(wrapper)
    +    if sandboxsafe == 'check_caller':
    +        wrapper = sandbox_review(check_caller=True)(wrapper)
    +    elif sandboxsafe == 'abort':
    +        wrapper = sandbox_review(abort=True)(wrapper)
    +    else:
    +        assert isinstance(sandboxsafe, bool)
    +        wrapper = sandbox_review(reviewed=True)(wrapper)
         return wrapper
     
     def sandbox_check_type(TYPE):
    @@ -1111,6 +1123,7 @@
     CCHARPP = lltype.Ptr(lltype.Array(CCHARP, hints={'nolength': True}))
     CWCHARPP = lltype.Ptr(lltype.Array(CWCHARP, hints={'nolength': True}))
     
    + at sandbox_review(reviewed=True)
     def liststr2charpp(l):
         """ list[str] -> char**, NULL terminated
         """
    @@ -1256,6 +1269,7 @@
             return v_ptr
     
     
    + at sandbox_review(check_caller=True)
     def structcopy(pdst, psrc):
         """Copy all the fields of the structure given by 'psrc'
         into the structure given by 'pdst'.
    @@ -1273,6 +1287,7 @@
                                                  if name not in padding]
             unrollfields = unrolling_iterable(fields)
     
    +        @sandbox_review(check_caller=True)
             def copyfn(pdst, psrc):
                 for name, TYPE in unrollfields:
                     if isinstance(TYPE, lltype.ContainerType):
    @@ -1286,6 +1301,7 @@
     _get_structcopy_fn._annspecialcase_ = 'specialize:memo'
     
     
    + at sandbox_review(check_caller=True)
     def setintfield(pdst, fieldname, value):
         """Maybe temporary: a helper to set an integer field into a structure,
         transparently casting between the various integer types.
    @@ -1420,14 +1436,14 @@
                 lltype.Void,
                 releasegil=False,
                 calling_conv='c',
    -            sandboxsafe=True,
    +            sandboxsafe='check_caller',
             )
     c_memset = llexternal("memset",
                 [VOIDP, lltype.Signed, SIZE_T],
                 lltype.Void,
                 releasegil=False,
                 calling_conv='c',
    -            sandboxsafe=True,
    +            sandboxsafe='check_caller',
             )
     
     
    diff --git a/rpython/rtyper/lltypesystem/rlist.py b/rpython/rtyper/lltypesystem/rlist.py
    --- a/rpython/rtyper/lltypesystem/rlist.py
    +++ b/rpython/rtyper/lltypesystem/rlist.py
    @@ -10,7 +10,6 @@
         ADTIList, ADTIFixedList, dum_nocheck)
     from rpython.rtyper.rmodel import Repr, inputconst, externalvsinternal
     from rpython.tool.pairtype import pairtype, pair
    -from rpython.rlib.objectmodel import sandbox_review
     
     
     # ____________________________________________________________
    @@ -197,7 +196,6 @@
     # adapted C code
     
     @jit.look_inside_iff(lambda l, newsize, overallocate: jit.isconstant(len(l.items)) and jit.isconstant(newsize))
    - at sandbox_review(reviewed=True)
     @signature(types.any(), types.int(), types.bool(), returns=types.none())
     def _ll_list_resize_hint_really(l, newsize, overallocate):
         """
    diff --git a/rpython/translator/backendopt/inline.py b/rpython/translator/backendopt/inline.py
    --- a/rpython/translator/backendopt/inline.py
    +++ b/rpython/translator/backendopt/inline.py
    @@ -617,8 +617,7 @@
         def dont_inline(funcobj):
             func = getattr(funcobj, '_callable', None)
             if sandbox:
    -            review = getattr(func, '_sandbox_review_', None)
    -            if review is not None and review != 'check_caller':
    +            if hasattr(func, '_sandbox_review_'):
                     return True
             return getattr(func, '_dont_inline_', False)
         return dont_inline
    diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py
    --- a/rpython/translator/c/node.py
    +++ b/rpython/translator/c/node.py
    @@ -882,9 +882,9 @@
     
     def new_funcnode(db, T, obj, forcename=None):
         from rpython.rtyper.rtyper import llinterp_backend
    -    if db.sandbox:
    -        if (getattr(obj, 'external', None) is not None and
    -                not obj._safe_not_sandboxed):
    +    if db.sandbox and getattr(obj, 'external', None) is not None:
    +        safe_flag = obj._safe_not_sandboxed
    +        if not (safe_flag is True or safe_flag == "check_caller"):
                 try:
                     sandbox_mapping = db.sandbox_mapping
                 except AttributeError:
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -19,10 +19,19 @@
     safe_operations = set([
         'keepalive', 'threadlocalref_get', 'threadlocalref_store',
         'malloc', 'malloc_varsize', 'free',
    -    'getfield', 'getarrayitem', 'getinteriorfield',
    -    'gc_thread_run',
    -    'shrink_array', 'gc_pin', 'gc_unpin', 'gc_can_move',
    -    'debug_fatalerror',
    +    'getfield', 'getarrayitem', 'getinteriorfield', 'raw_load',
    +    'cast_opaque_ptr', 'cast_ptr_to_int',
    +    'gc_thread_run', 'gc_stack_bottom', 'gc_thread_after_fork',
    +    'shrink_array', 'gc_pin', 'gc_unpin', 'gc_can_move', 'gc_id',
    +    'gc_identityhash', 'weakref_create', 'weakref_deref',
    +    'gc_fq_register', 'gc_fq_next_dead',
    +    'gc_set_max_heap_size', 'gc_ignore_finalizer', 'gc_add_memory_pressure',
    +    'gc_writebarrier', 'gc__collect',
    +    'length_of_simple_gcarray_from_opaque',
    +    'debug_fatalerror', 'debug_print_traceback', 'debug_flush',
    +    'hint', 'debug_start', 'debug_stop', 'debug_print', 'debug_offset',
    +    'jit_force_quasi_immutable', 'jit_force_virtual', 'jit_marker',
    +    'jit_is_virtual',
         ])
     gc_set_operations = set([
         'setfield', 'setarrayitem', 'setinteriorfield',
    @@ -69,9 +78,13 @@
                     if hasattr(obj, 'graph'):
                         g2 = obj.graph
                         if graph_review(g2) == 'check_caller':
    -                        return "caller has not been checked: %r" % (op,)
    -                elif getattr(obj, 'sandboxsafe', False):
    -                    pass
    +                        return ("direct_call to a graph with "
    +                                "check_caller=True: %r" % (op,))
    +                elif getattr(obj, '_safe_not_sandboxed', False) is not False:
    +                    ss = obj._safe_not_sandboxed
    +                    if ss is not True:
    +                        return ("direct_call to llfunc with "
    +                                "sandboxsafe=%r: %r" % (ss, obj))
                     elif getattr(obj, 'external', None) is not None:
                         # either obj._safe_not_sandboxed is True, and then it's
                         # fine; or obj._safe_not_sandboxed is False, and then
    @@ -81,7 +94,15 @@
                         # not 'external', but no 'graph' either?
                         return "direct_call to %r" % (obj,)
     
    -            elif opname in ('cast_ptr_to_adr', 'force_cast'):
    +            elif opname == 'indirect_call':
    +                graph_list = op.args[-1].value
    +                for g2 in graph_list:
    +                    if graph_review(g2) == 'check_caller':
    +                        return ("indirect_call that can go to at least one "
    +                                "graph with check_caller=True: %r" % (op,))
    +
    +            elif opname in ('cast_ptr_to_adr', 'force_cast',
    +                            'cast_int_to_ptr'):
                     if is_gc_ptr(op.args[0].concretetype):
                         return "argument is a GC ptr: %r" % (opname,)
                     if is_gc_ptr(op.result.concretetype):
    diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py
    --- a/rpython/translator/sandbox/rsandbox.py
    +++ b/rpython/translator/sandbox/rsandbox.py
    @@ -6,7 +6,7 @@
     import py
     import sys
     
    -from rpython.rlib import types
    +from rpython.rlib import types, debug
     from rpython.rlib.objectmodel import specialize
     from rpython.rlib.signature import signature
     from rpython.rlib.unroll import unrolling_iterable
    @@ -20,6 +20,7 @@
     from rpython.rtyper.llannotation import lltype_to_annotation
     from rpython.rtyper.annlowlevel import MixLevelHelperAnnotator
     from rpython.tool.ansi_print import AnsiLogger
    +from rpython.translator.sandbox.graphchecker import make_abort_graph
     
     log = AnsiLogger("sandbox")
     
    @@ -99,34 +100,43 @@
              lltype.typeOf(rpy_sandbox_arg[arg_kind]).TO.ARGS[0])
             for arg_kind in arg_kinds])
     
    -    result_func = rpy_sandbox_res[result_kind]
    -    RESTYPE = FUNCTYPE.RESULT
    +    if fnobj._safe_not_sandboxed == 'abort':
     
    -    try:
    -        lst = rtyper._sandboxed_functions
    -    except AttributeError:
    -        lst = rtyper._sandboxed_functions = []
    -    name_and_sig = '%s(%s)%s' % (fnname, ''.join(arg_kinds), result_kind)
    -    lst.append(name_and_sig)
    -    log(name_and_sig)
    -    name_and_sig = rffi.str2charp(name_and_sig, track_allocation=False)
    +        msg = "sandboxed subprocess aborts on call to %r" % (fnname,)
    +        def execute(*args):
    +            debug.fatalerror(msg)
     
    -    def execute(*args):
    -        #
    -        # serialize the arguments
    -        i = 0
    -        for arg_kind, func, ARGTYPE in unroll_args:
    -            if arg_kind == 'v':
    -                continue
    -            func(rffi.cast(ARGTYPE, args[i]))
    -            i = i + 1
    -        #
    -        # send the function name and the arguments and wait for an answer
    -        result = result_func(name_and_sig)
    -        #
    -        # result the answer, if any
    -        if RESTYPE is not lltype.Void:
    -            return rffi.cast(RESTYPE, result)
    +    else:
    +
    +        result_func = rpy_sandbox_res[result_kind]
    +        RESTYPE = FUNCTYPE.RESULT
    +
    +        try:
    +            lst = rtyper._sandboxed_functions
    +        except AttributeError:
    +            lst = rtyper._sandboxed_functions = []
    +        name_and_sig = '%s(%s)%s' % (fnname, ''.join(arg_kinds), result_kind)
    +        lst.append(name_and_sig)
    +        log(name_and_sig)
    +        name_and_sig = rffi.str2charp(name_and_sig, track_allocation=False)
    +
    +        def execute(*args):
    +            #
    +            # serialize the arguments
    +            i = 0
    +            for arg_kind, func, ARGTYPE in unroll_args:
    +                if arg_kind == 'v':
    +                    continue
    +                func(rffi.cast(ARGTYPE, args[i]))
    +                i = i + 1
    +            #
    +            # send the function name and the arguments and wait for an answer
    +            result = result_func(name_and_sig)
    +            #
    +            # result the answer, if any
    +            if RESTYPE is not lltype.Void:
    +                return rffi.cast(RESTYPE, result)
    +    #
         execute.__name__ = 'sandboxed_%s' % (fnname,)
         #
         args_s, s_result = sig_ll(fnobj)
    diff --git a/rpython/translator/sandbox/test/test_graphchecker.py b/rpython/translator/sandbox/test/test_graphchecker.py
    --- a/rpython/translator/sandbox/test/test_graphchecker.py
    +++ b/rpython/translator/sandbox/test/test_graphchecker.py
    @@ -21,11 +21,11 @@
     
         def check_safe(self, fn, signature=[]):
             result = self.graph_is_unsafe(fn, signature)
    -        assert result is None, repr(fn)
    +        assert result is None
     
         def check_unsafe(self, error_substring, fn, signature=[]):
             result = self.graph_is_unsafe(fn, signature)
    -        assert result is not None, repr(fn)
    +        assert result is not None
             assert error_substring in result
     
         def test_simple(self):
    @@ -60,7 +60,7 @@
                 pass
             def f():
                 g()
    -        self.check_unsafe("caller has not been checked", f)
    +        self.check_unsafe("direct_call to a graph with check_caller=True", f)
     
         def test_direct_call_to_reviewed(self):
             @sandbox_review(reviewed=True)
    @@ -78,20 +78,46 @@
                 g()
             self.check_safe(f)
     
    +    def test_indirect_call_to_check_caller(self):
    +        class A:
    +            def meth(self, i):
    +                pass
    +        class B(A):
    +            def meth(self, i):
    +                pass
    +        class C(A):
    +            @sandbox_review(check_caller=True)
    +            def meth(self, i):
    +                pass
    +        def f(i):
    +            if i > 5:
    +                x = B()
    +            else:
    +                x = C()
    +            x.meth(i)
    +        self.check_unsafe("indirect_call that can go to at least one "
    +                          "graph with check_caller=True", f, [int])
    +
         def test_direct_call_external(self):
             llfn1 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=True,
                                     _nowrapper=True)
    -        self.check_safe(lambda: llfn1)
    +        self.check_safe(lambda: llfn1())
             #
             llfn2 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=False,
                                     _nowrapper=True)
    -        self.check_safe(lambda: llfn2)   # will be turned into an I/O stub
    +        self.check_safe(lambda: llfn2())   # will be turned into an I/O stub
    +        #
    +        llfn2b = rffi.llexternal("foobar", [], lltype.Void,
    +                                 sandboxsafe="check_caller",
    +                                 _nowrapper=True)
    +        self.check_unsafe("direct_call to llfunc with "
    +                          "sandboxsafe='check_caller'", lambda: llfn2b())
             #
             llfn3 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=True)
    -        self.check_safe(lambda: llfn3)
    +        self.check_safe(lambda: llfn3())
             #
             llfn4 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=False)
    -        self.check_safe(lambda: llfn4)
    +        self.check_safe(lambda: llfn4())
     
         def test_make_abort_graph(self):
             def dummy():
    
    From pypy.commits at gmail.com  Mon Aug 26 08:19:56 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 05:19:56 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: fix
    Message-ID: <5d63ce6c.1c69fb81.4c637.76aa@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97263:8bd962a47352
    Date: 2019-08-26 14:19 +0200
    http://bitbucket.org/pypy/pypy/changeset/8bd962a47352/
    
    Log:	fix
    
    diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py
    --- a/rpython/rlib/rgc.py
    +++ b/rpython/rlib/rgc.py
    @@ -373,7 +373,7 @@
     
         TP = lltype.typeOf(source).TO
         assert TP == lltype.typeOf(dest).TO
    -    if not lltype_is_gc(TP) and sandboxed_translation():
    +    if TP._gckind != 'gc' and sandboxed_translation():
             _ll_arraycopy_of_nongc_not_for_sandboxed()
     
         # XXX: Hack to ensure that we get a proper effectinfo.write_descrs_arrays
    
    From pypy.commits at gmail.com  Mon Aug 26 10:32:09 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 07:32:09 -0700 (PDT)
    Subject: [pypy-commit] pypy default: Check for a rare case of someone
     shrinking the buffer from another thread
    Message-ID: <5d63ed69.1c69fb81.4da51.39d2@mx.google.com>
    
    Author: Armin Rigo 
    Branch: 
    Changeset: r97264:215ea348ccb4
    Date: 2019-08-26 16:31 +0200
    http://bitbucket.org/pypy/pypy/changeset/215ea348ccb4/
    
    Log:	Check for a rare case of someone shrinking the buffer from another
    	thread while using it in a read() variant
    
    diff --git a/pypy/module/_file/readinto.py b/pypy/module/_file/readinto.py
    --- a/pypy/module/_file/readinto.py
    +++ b/pypy/module/_file/readinto.py
    @@ -4,6 +4,14 @@
     from rpython.rlib.rposix import c_read
     from rpython.rtyper.lltypesystem import lltype, rffi
     from pypy.module._file.interp_file import is_wouldblock_error, signal_checker
    +from pypy.interpreter.error import oefmt
    +
    +
    +def output_slice(space, rwbuffer, target_pos, data):
    +    if target_pos + len(data) > rwbuffer.getlength():
    +        raise oefmt(space.w_RuntimeError,
    +                    "target buffer has shrunk during operation")
    +    rwbuffer.setslice(target_pos, data)
     
     
     def direct_readinto(self, w_rwbuffer):
    @@ -27,14 +35,14 @@
             MAX_PART = 1024 * 1024    # 1 MB
             while size > MAX_PART:
                 data = self.direct_read(MAX_PART)
    -            rwbuffer.setslice(target_pos, data)
    +            output_slice(self.space, rwbuffer, target_pos, data)
                 target_pos += len(data)
                 size -= len(data)
                 if len(data) != MAX_PART:
                     break
             else:
                 data = self.direct_read(size)
    -            rwbuffer.setslice(target_pos, data)
    +            output_slice(self.space, rwbuffer, target_pos, data)
                 target_pos += len(data)
     
         else:
    @@ -46,7 +54,7 @@
             initial_size = min(size, stream.count_buffered_bytes())
             if initial_size > 0:
                 data = stream.read(initial_size)
    -            rwbuffer.setslice(target_pos, data)
    +            output_slice(self.space, rwbuffer, target_pos, data)
                 target_pos += len(data)
                 size -= len(data)
     
    diff --git a/pypy/module/_io/interp_bufferedio.py b/pypy/module/_io/interp_bufferedio.py
    --- a/pypy/module/_io/interp_bufferedio.py
    +++ b/pypy/module/_io/interp_bufferedio.py
    @@ -101,7 +101,7 @@
                 raise oefmt(space.w_TypeError, "%s() should return bytes",
                             methodname)
             data = space.bytes_w(w_data)
    -        rwbuffer.setslice(0, data)
    +        self.output_slice(space, rwbuffer, 0, data)
             return space.newint(len(data))
     
     W_BufferedIOBase.typedef = TypeDef(
    @@ -555,7 +555,7 @@
             remaining = n
             written = 0
             if current_size:
    -            result_buffer.setslice(
    +            self.output_slice(space, result_buffer,
                     written, self.buffer[self.pos:self.pos + current_size])
                 remaining -= current_size
                 written += current_size
    @@ -600,7 +600,7 @@
                 if remaining > 0:
                     if size > remaining:
                         size = remaining
    -                result_buffer.setslice(
    +                self.output_slice(space, result_buffer,
                         written, self.buffer[self.pos:self.pos + size])
                     self.pos += size
                     written += size
    diff --git a/pypy/module/_io/interp_bytesio.py b/pypy/module/_io/interp_bytesio.py
    --- a/pypy/module/_io/interp_bytesio.py
    +++ b/pypy/module/_io/interp_bytesio.py
    @@ -48,7 +48,7 @@
             size = rwbuffer.getlength()
     
             output = self.read(size)
    -        rwbuffer.setslice(0, output)
    +        self.output_slice(space, rwbuffer, 0, output)
             return space.newint(len(output))
     
         def write_w(self, space, w_data):
    diff --git a/pypy/module/_io/interp_fileio.py b/pypy/module/_io/interp_fileio.py
    --- a/pypy/module/_io/interp_fileio.py
    +++ b/pypy/module/_io/interp_fileio.py
    @@ -389,7 +389,7 @@
                         return space.w_None
                     raise wrap_oserror(space, e,
                                        exception_name='w_IOError')
    -            rwbuffer.setslice(0, buf)
    +            self.output_slice(space, rwbuffer, 0, buf)
                 return space.newint(len(buf))
             else:
                 # optimized case: reading more than 64 bytes into a rwbuffer
    diff --git a/pypy/module/_io/interp_iobase.py b/pypy/module/_io/interp_iobase.py
    --- a/pypy/module/_io/interp_iobase.py
    +++ b/pypy/module/_io/interp_iobase.py
    @@ -275,6 +275,14 @@
                     else:
                         break
     
    +    @staticmethod
    +    def output_slice(space, rwbuffer, target_pos, data):
    +        if target_pos + len(data) > rwbuffer.getlength():
    +            raise oefmt(space.w_RuntimeError,
    +                        "target buffer has shrunk during operation")
    +        rwbuffer.setslice(target_pos, data)
    +
    +
     W_IOBase.typedef = TypeDef(
         '_io._IOBase',
         __new__ = generic_new_descr(W_IOBase),
    
    From pypy.commits at gmail.com  Mon Aug 26 11:00:38 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:00:38 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: Tweaks
    Message-ID: <5d63f416.1c69fb81.a604.96a1@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97266:7c1bcd64f114
    Date: 2019-08-26 16:59 +0200
    http://bitbucket.org/pypy/pypy/changeset/7c1bcd64f114/
    
    Log:	Tweaks
    
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -866,9 +866,11 @@
             w_array._charbuf_stop()
             return char
     
    +    @sandbox_review(reviewed=True)
         def setitem(self, index, char):
             w_array = self.w_array
             data = w_array._charbuf_start()
    +        assert 0 <= index < w_array.len
             data[index] = char
             w_array._charbuf_stop()
     
    diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
    --- a/pypy/objspace/std/unicodeobject.py
    +++ b/pypy/objspace/std/unicodeobject.py
    @@ -4,7 +4,7 @@
     
     from rpython.rlib.objectmodel import (
         compute_hash, compute_unique_id, import_from_mixin, always_inline,
    -    enforceargs, newlist_hint, specialize, we_are_translated)
    +    enforceargs, newlist_hint, specialize, we_are_translated, sandbox_review)
     from rpython.rlib.buffer import StringBuffer
     from rpython.rlib.mutbuffer import MutableStringBuffer
     from rpython.rlib.rarithmetic import ovfcheck
    @@ -94,6 +94,7 @@
         def utf8_w(self, space):
             return self._utf8
     
    +    @sandbox_review(reviewed=True)
         def readbuf_w(self, space):
             # XXX for now
             from rpython.rlib.rstruct.unichar import pack_codepoint, UNICODE_SIZE
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -1482,6 +1482,7 @@
             calling_conv='win')
     
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('utime')
     @specialize.argtype(0, 1)
     def utime(path, times):
    @@ -1540,11 +1541,13 @@
                 lltype.free(atime, flavor='raw')
                 lltype.free(mtime, flavor='raw')
     
    + at sandbox_review(check_caller=True)
     def times_to_timeval2p(times, l_timeval2p):
         actime, modtime = times
         _time_to_timeval(actime, l_timeval2p[0])
         _time_to_timeval(modtime, l_timeval2p[1])
     
    + at sandbox_review(check_caller=True)
     def _time_to_timeval(t, l_timeval):
         import math
         fracpart, intpart = math.modf(t)
    @@ -2255,6 +2258,7 @@
             [rffi.CCHARP, TIMEVAL2P], rffi.INT,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(1)
         def lutimes(pathname, times):
             if times is None:
    @@ -2270,6 +2274,7 @@
             [rffi.INT, TIMEVAL2P], rffi.INT,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(1)
         def futimes(fd, times):
             if times is None:
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -1450,6 +1450,7 @@
     # NOTE: This is not a weak key dictionary, thus keeping a lot of stuff alive.
     TEST_RAW_ADDR_KEEP_ALIVE = {}
     
    + at sandbox_review(reviewed=True)
     @jit.dont_look_inside
     def get_raw_address_of_string(string):
         """Returns a 'char *' that is valid as long as the rpython string object is alive.
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -21,7 +21,8 @@
         'malloc', 'malloc_varsize', 'free',
         'getfield', 'getarrayitem', 'getinteriorfield', 'raw_load',
         'cast_opaque_ptr', 'cast_ptr_to_int',
    -    'gc_thread_run', 'gc_stack_bottom', 'gc_thread_after_fork',
    +    'gc_thread_run', 'gc_stack_bottom',
    +    'gc_thread_before_fork', 'gc_thread_after_fork',
         'shrink_array', 'gc_pin', 'gc_unpin', 'gc_can_move', 'gc_id',
         'gc_identityhash', 'weakref_create', 'weakref_deref',
         'gc_fq_register', 'gc_fq_next_dead',
    @@ -103,8 +104,6 @@
     
                 elif opname in ('cast_ptr_to_adr', 'force_cast',
                                 'cast_int_to_ptr'):
    -                if is_gc_ptr(op.args[0].concretetype):
    -                    return "argument is a GC ptr: %r" % (opname,)
                     if is_gc_ptr(op.result.concretetype):
                         return "result is a GC ptr: %r" % (opname,)
     
    
    From pypy.commits at gmail.com  Mon Aug 26 11:00:36 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:00:36 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: hg merge default
    Message-ID: <5d63f414.1c69fb81.0f26.f924@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97265:0d5f51e7a2c8
    Date: 2019-08-26 16:32 +0200
    http://bitbucket.org/pypy/pypy/changeset/0d5f51e7a2c8/
    
    Log:	hg merge default
    
    diff too long, truncating to 2000 out of 29422 lines
    
    diff --git a/.hgignore b/.hgignore
    --- a/.hgignore
    +++ b/.hgignore
    @@ -70,7 +70,9 @@
     ^lib_pypy/ctypes_config_cache/_.+_cache\.py$
     ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$
     ^lib_pypy/_libmpdec/.+.o$
    -^lib_pypy/.+.c$
    +^lib_pypy/.+_cffi.c$
    +^lib_pypy/_curses_cffi_check.c
    +^lib_pypy/_pypy_openssl.c
     ^lib_pypy/.+.o$
     ^lib_pypy/.+.so$
     ^lib_pypy/.+.pyd$
    diff --git a/extra_tests/cffi_tests/cffi1/test_recompiler.py b/extra_tests/cffi_tests/cffi1/test_recompiler.py
    --- a/extra_tests/cffi_tests/cffi1/test_recompiler.py
    +++ b/extra_tests/cffi_tests/cffi1/test_recompiler.py
    @@ -2414,6 +2414,18 @@
         assert ffi.sizeof(a[0]) == ffi.sizeof("unsigned")
         assert ffi.sizeof(b[0]) == ffi.sizeof(a[0])
     
    +def test_struct_with_func_with_struct_pointer_arg():
    +    ffi = FFI()
    +    ffi.cdef("""struct BinaryTree {
    +            int (* CompareKey)(struct BinaryTree *tree);
    +        };""")
    +    lib = verify(ffi, "test_struct_with_func_with_struct_pointer_arg", """
    +        struct BinaryTree {
    +            int (* CompareKey)(struct BinaryTree *tree);
    +        };
    +    """)
    +    ffi.new("struct BinaryTree *")
    +
     def test_struct_with_func_with_struct_arg():
         ffi = FFI()
         ffi.cdef("""struct BinaryTree {
    diff --git a/extra_tests/ctypes_tests/test_cast.py b/extra_tests/ctypes_tests/test_cast.py
    --- a/extra_tests/ctypes_tests/test_cast.py
    +++ b/extra_tests/ctypes_tests/test_cast.py
    @@ -28,3 +28,13 @@
         assert x.value is False
         x = c_bool(['yadda'])
         assert x.value is True
    +
    +def test_cast_array():
    +    import sys
    +    data = b'data'
    +    ubyte = c_ubyte * len(data)
    +    byteslike = ubyte.from_buffer_copy(data)
    +    m = memoryview(byteslike)
    +    if sys.version_info > (3, 3):
    +        b = m.cast('B')
    +        assert bytes(b) == data
    diff --git a/extra_tests/ctypes_tests/test_win32.py b/extra_tests/ctypes_tests/test_win32.py
    --- a/extra_tests/ctypes_tests/test_win32.py
    +++ b/extra_tests/ctypes_tests/test_win32.py
    @@ -5,7 +5,7 @@
     import pytest
     
     @pytest.mark.skipif("sys.platform != 'win32'")
    -def test_VARIANT(self):
    +def test_VARIANT():
         from ctypes import wintypes
         a = wintypes.VARIANT_BOOL()
         assert a.value is False
    diff --git a/extra_tests/test_datetime.py b/extra_tests/test_datetime.py
    --- a/extra_tests/test_datetime.py
    +++ b/extra_tests/test_datetime.py
    @@ -128,7 +128,7 @@
         import os
         import time
         if os.name == 'nt':
    -        skip("setting os.environ['TZ'] ineffective on windows")
    +        pytest.skip("setting os.environ['TZ'] ineffective on windows")
         try:
             prev_tz = os.environ.get("TZ")
             os.environ["TZ"] = "GMT"
    diff --git a/extra_tests/test_json.py b/extra_tests/test_json.py
    --- a/extra_tests/test_json.py
    +++ b/extra_tests/test_json.py
    @@ -31,3 +31,31 @@
     @given(jsondata)
     def test_roundtrip(d):
         assert json.loads(json.dumps(d)) == d
    +
    +def test_skipkeys():
    +    assert json.dumps({Ellipsis: 42}, skipkeys=True) == '{}'
    +    assert json.dumps({Ellipsis: 42, 3: 4}, skipkeys=True) == '{"3": 4}'
    +    assert json.dumps({3: 4, Ellipsis: 42}, skipkeys=True) == '{"3": 4}'
    +    assert json.dumps({Ellipsis: 42, NotImplemented: 43}, skipkeys=True) \
    +                 == '{}'
    +    assert json.dumps({3: 4, Ellipsis: 42, NotImplemented: 43}, skipkeys=True)\
    +                 == '{"3": 4}'
    +    assert json.dumps({Ellipsis: 42, 3: 4, NotImplemented: 43}, skipkeys=True)\
    +                 == '{"3": 4}'
    +    assert json.dumps({Ellipsis: 42, NotImplemented: 43, 3: 4}, skipkeys=True)\
    +                 == '{"3": 4}'
    +    assert json.dumps({3: 4, 5: 6, Ellipsis: 42}, skipkeys=True) \
    +                 == '{"3": 4, "5": 6}'
    +    assert json.dumps({3: 4, Ellipsis: 42, 5: 6}, skipkeys=True) \
    +                 == '{"3": 4, "5": 6}'
    +    assert json.dumps({Ellipsis: 42, 3: 4, 5: 6}, skipkeys=True) \
    +                 == '{"3": 4, "5": 6}'
    +
    +def test_boolean_as_dict_key():
    +    # In CPython 2.x, dumps({True:...}) gives {"True":...}.  It should be
    +    # "true" instead; it's a bug as far as I can tell.  In 3.x it was fixed.
    +    # BUT! if we call dumps() with sort_keys=True, then CPython (any version)
    +    # gives "true" instead of "True".  Surprize!
    +    # I don't want to understand why, let's just not attempt to reproduce that.
    +    assert json.dumps({True: 5}) == '{"true": 5}'
    +    assert json.dumps({False: 5}) == '{"false": 5}'
    diff --git a/lib-python/2.7/json/encoder.py b/lib-python/2.7/json/encoder.py
    --- a/lib-python/2.7/json/encoder.py
    +++ b/lib-python/2.7/json/encoder.py
    @@ -294,10 +294,6 @@
                 items = d.iteritems()
     
             for key, v in items:
    -            if first:
    -                first = False
    -            else:
    -                builder.append(separator)
                 if isinstance(key, basestring):
                     pass
                 # JavaScript is weakly typed for these, so it makes sense to
    @@ -316,6 +312,10 @@
                     continue
                 else:
                     raise TypeError("key " + repr(key) + " is not a string")
    +            if first:
    +                first = False
    +            else:
    +                builder.append(separator)
                 builder.append('"')
                 builder.append(self.__encoder(key))
                 builder.append('"')
    diff --git a/lib-python/2.7/test/test_dictviews.py b/lib-python/2.7/test/test_dictviews.py
    --- a/lib-python/2.7/test/test_dictviews.py
    +++ b/lib-python/2.7/test/test_dictviews.py
    @@ -1,5 +1,6 @@
     import copy
     import pickle
    +import sys
     import unittest
     import collections
     from test import test_support
    @@ -169,6 +170,20 @@
         def test_recursive_repr(self):
             d = {}
             d[42] = d.viewvalues()
    +        r = repr(d)
    +        # Cannot perform a stronger test, as the contents of the repr
    +        # are implementation-dependent.  All we can say is that we
    +        # want a str result, not an exception of any sort.
    +        self.assertIsInstance(r, str)
    +        d[42] = d.viewitems()
    +        r = repr(d)
    +        # Again.
    +        self.assertIsInstance(r, str)
    +
    +    def test_deeply_nested_repr(self):
    +        d = {}
    +        for i in range(sys.getrecursionlimit() + 100):
    +            d = {42: d.viewvalues()}
             self.assertRaises(RuntimeError, repr, d)
     
         def test_abc_registry(self):
    diff --git a/lib-python/3/_osx_support.py b/lib-python/3/_osx_support.py
    deleted file mode 100644
    --- a/lib-python/3/_osx_support.py
    +++ /dev/null
    @@ -1,488 +0,0 @@
    -"""Shared OS X support functions."""
    -
    -import os
    -import re
    -import sys
    -
    -__all__ = [
    -    'compiler_fixup',
    -    'customize_config_vars',
    -    'customize_compiler',
    -    'get_platform_osx',
    -]
    -
    -# configuration variables that may contain universal build flags,
    -# like "-arch" or "-isdkroot", that may need customization for
    -# the user environment
    -_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
    -                            'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
    -                            'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
    -                            'PY_CORE_CFLAGS')
    -
    -# configuration variables that may contain compiler calls
    -_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
    -
    -# prefix added to original configuration variable names
    -_INITPRE = '_OSX_SUPPORT_INITIAL_'
    -
    -
    -def _find_executable(executable, path=None):
    -    """Tries to find 'executable' in the directories listed in 'path'.
    -
    -    A string listing directories separated by 'os.pathsep'; defaults to
    -    os.environ['PATH'].  Returns the complete filename or None if not found.
    -    """
    -    if path is None:
    -        path = os.environ['PATH']
    -
    -    paths = path.split(os.pathsep)
    -    base, ext = os.path.splitext(executable)
    -
    -    if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
    -        executable = executable + '.exe'
    -
    -    if not os.path.isfile(executable):
    -        for p in paths:
    -            f = os.path.join(p, executable)
    -            if os.path.isfile(f):
    -                # the file exists, we have a shot at spawn working
    -                return f
    -        return None
    -    else:
    -        return executable
    -
    -
    -def _read_output(commandstring):
    -    """Output from succesful command execution or None"""
    -    # Similar to os.popen(commandstring, "r").read(),
    -    # but without actually using os.popen because that
    -    # function is not usable during python bootstrap.
    -    # tempfile is also not available then.
    -    import contextlib
    -    try:
    -        import tempfile
    -        fp = tempfile.NamedTemporaryFile()
    -    except ImportError:
    -        fp = open("/tmp/_osx_support.%s"%(
    -            os.getpid(),), "w+b")
    -
    -    with contextlib.closing(fp) as fp:
    -        cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
    -        return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
    -
    -
    -def _find_build_tool(toolname):
    -    """Find a build tool on current path or using xcrun"""
    -    return (_find_executable(toolname)
    -                or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
    -                or ''
    -            )
    -
    -_SYSTEM_VERSION = None
    -
    -def _get_system_version():
    -    """Return the OS X system version as a string"""
    -    # Reading this plist is a documented way to get the system
    -    # version (see the documentation for the Gestalt Manager)
    -    # We avoid using platform.mac_ver to avoid possible bootstrap issues during
    -    # the build of Python itself (distutils is used to build standard library
    -    # extensions).
    -
    -    global _SYSTEM_VERSION
    -
    -    if _SYSTEM_VERSION is None:
    -        _SYSTEM_VERSION = ''
    -        try:
    -            f = open('/System/Library/CoreServices/SystemVersion.plist')
    -        except IOError:
    -            # We're on a plain darwin box, fall back to the default
    -            # behaviour.
    -            pass
    -        else:
    -            try:
    -                m = re.search(r'ProductUserVisibleVersion\s*'
    -                              r'(.*?)', f.read())
    -            finally:
    -                f.close()
    -            if m is not None:
    -                _SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
    -            # else: fall back to the default behaviour
    -
    -    return _SYSTEM_VERSION
    -
    -def _remove_original_values(_config_vars):
    -    """Remove original unmodified values for testing"""
    -    # This is needed for higher-level cross-platform tests of get_platform.
    -    for k in list(_config_vars):
    -        if k.startswith(_INITPRE):
    -            del _config_vars[k]
    -
    -def _save_modified_value(_config_vars, cv, newvalue):
    -    """Save modified and original unmodified value of configuration var"""
    -
    -    oldvalue = _config_vars.get(cv, '')
    -    if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
    -        _config_vars[_INITPRE + cv] = oldvalue
    -    _config_vars[cv] = newvalue
    -
    -def _supports_universal_builds():
    -    """Returns True if universal builds are supported on this system"""
    -    # As an approximation, we assume that if we are running on 10.4 or above,
    -    # then we are running with an Xcode environment that supports universal
    -    # builds, in particular -isysroot and -arch arguments to the compiler. This
    -    # is in support of allowing 10.4 universal builds to run on 10.3.x systems.
    -
    -    osx_version = _get_system_version()
    -    if osx_version:
    -        try:
    -            osx_version = tuple(int(i) for i in osx_version.split('.'))
    -        except ValueError:
    -            osx_version = ''
    -    return bool(osx_version >= (10, 4)) if osx_version else False
    -
    -
    -def _find_appropriate_compiler(_config_vars):
    -    """Find appropriate C compiler for extension module builds"""
    -
    -    # Issue #13590:
    -    #    The OSX location for the compiler varies between OSX
    -    #    (or rather Xcode) releases.  With older releases (up-to 10.5)
    -    #    the compiler is in /usr/bin, with newer releases the compiler
    -    #    can only be found inside Xcode.app if the "Command Line Tools"
    -    #    are not installed.
    -    #
    -    #    Futhermore, the compiler that can be used varies between
    -    #    Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2'
    -    #    as the compiler, after that 'clang' should be used because
    -    #    gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
    -    #    miscompiles Python.
    -
    -    # skip checks if the compiler was overriden with a CC env variable
    -    if 'CC' in os.environ:
    -        return _config_vars
    -
    -    # The CC config var might contain additional arguments.
    -    # Ignore them while searching.
    -    cc = oldcc = _config_vars['CC'].split()[0]
    -    if not _find_executable(cc):
    -        # Compiler is not found on the shell search PATH.
    -        # Now search for clang, first on PATH (if the Command LIne
    -        # Tools have been installed in / or if the user has provided
    -        # another location via CC).  If not found, try using xcrun
    -        # to find an uninstalled clang (within a selected Xcode).
    -
    -        # NOTE: Cannot use subprocess here because of bootstrap
    -        # issues when building Python itself (and os.popen is
    -        # implemented on top of subprocess and is therefore not
    -        # usable as well)
    -
    -        cc = _find_build_tool('clang')
    -
    -    elif os.path.basename(cc).startswith('gcc'):
    -        # Compiler is GCC, check if it is LLVM-GCC
    -        data = _read_output("'%s' --version"
    -                             % (cc.replace("'", "'\"'\"'"),))
    -        if 'llvm-gcc' in data:
    -            # Found LLVM-GCC, fall back to clang
    -            cc = _find_build_tool('clang')
    -
    -    if not cc:
    -        raise SystemError(
    -               "Cannot locate working compiler")
    -
    -    if cc != oldcc:
    -        # Found a replacement compiler.
    -        # Modify config vars using new compiler, if not already explictly
    -        # overriden by an env variable, preserving additional arguments.
    -        for cv in _COMPILER_CONFIG_VARS:
    -            if cv in _config_vars and cv not in os.environ:
    -                cv_split = _config_vars[cv].split()
    -                cv_split[0] = cc if cv != 'CXX' else cc + '++'
    -                _save_modified_value(_config_vars, cv, ' '.join(cv_split))
    -
    -    return _config_vars
    -
    -
    -def _remove_universal_flags(_config_vars):
    -    """Remove all universal build arguments from config vars"""
    -
    -    for cv in _UNIVERSAL_CONFIG_VARS:
    -        # Do not alter a config var explicitly overriden by env var
    -        if cv in _config_vars and cv not in os.environ:
    -            flags = _config_vars[cv]
    -            flags = re.sub('-arch\s+\w+\s', ' ', flags, re.ASCII)
    -            flags = re.sub('-isysroot [^ \t]*', ' ', flags)
    -            _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def _remove_unsupported_archs(_config_vars):
    -    """Remove any unsupported archs from config vars"""
    -    # Different Xcode releases support different sets for '-arch'
    -    # flags. In particular, Xcode 4.x no longer supports the
    -    # PPC architectures.
    -    #
    -    # This code automatically removes '-arch ppc' and '-arch ppc64'
    -    # when these are not supported. That makes it possible to
    -    # build extensions on OSX 10.7 and later with the prebuilt
    -    # 32-bit installer on the python.org website.
    -
    -    # skip checks if the compiler was overriden with a CC env variable
    -    if 'CC' in os.environ:
    -        return _config_vars
    -
    -    if re.search('-arch\s+ppc', _config_vars['CFLAGS']) is not None:
    -        # NOTE: Cannot use subprocess here because of bootstrap
    -        # issues when building Python itself
    -        status = os.system("'%s' -arch ppc -x c /dev/null 2>/dev/null"%(
    -            _config_vars['CC'].replace("'", "'\"'\"'"),))
    -        # The Apple compiler drivers return status 255 if no PPC
    -        if (status >> 8) == 255:
    -            # Compiler doesn't support PPC, remove the related
    -            # '-arch' flags if not explicitly overridden by an
    -            # environment variable
    -            for cv in _UNIVERSAL_CONFIG_VARS:
    -                if cv in _config_vars and cv not in os.environ:
    -                    flags = _config_vars[cv]
    -                    flags = re.sub('-arch\s+ppc\w*\s', ' ', flags)
    -                    _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def _override_all_archs(_config_vars):
    -    """Allow override of all archs with ARCHFLAGS env var"""
    -    # NOTE: This name was introduced by Apple in OSX 10.5 and
    -    # is used by several scripting languages distributed with
    -    # that OS release.
    -    if 'ARCHFLAGS' in os.environ:
    -        arch = os.environ['ARCHFLAGS']
    -        for cv in _UNIVERSAL_CONFIG_VARS:
    -            if cv in _config_vars and '-arch' in _config_vars[cv]:
    -                flags = _config_vars[cv]
    -                flags = re.sub('-arch\s+\w+\s', ' ', flags)
    -                flags = flags + ' ' + arch
    -                _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def _check_for_unavailable_sdk(_config_vars):
    -    """Remove references to any SDKs not available"""
    -    # If we're on OSX 10.5 or later and the user tries to
    -    # compile an extension using an SDK that is not present
    -    # on the current machine it is better to not use an SDK
    -    # than to fail.  This is particularly important with
    -    # the standalong Command Line Tools alternative to a
    -    # full-blown Xcode install since the CLT packages do not
    -    # provide SDKs.  If the SDK is not present, it is assumed
    -    # that the header files and dev libs have been installed
    -    # to /usr and /System/Library by either a standalone CLT
    -    # package or the CLT component within Xcode.
    -    cflags = _config_vars.get('CFLAGS', '')
    -    m = re.search(r'-isysroot\s+(\S+)', cflags)
    -    if m is not None:
    -        sdk = m.group(1)
    -        if not os.path.exists(sdk):
    -            for cv in _UNIVERSAL_CONFIG_VARS:
    -                # Do not alter a config var explicitly overriden by env var
    -                if cv in _config_vars and cv not in os.environ:
    -                    flags = _config_vars[cv]
    -                    flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
    -                    _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def compiler_fixup(compiler_so, cc_args):
    -    """
    -    This function will strip '-isysroot PATH' and '-arch ARCH' from the
    -    compile flags if the user has specified one them in extra_compile_flags.
    -
    -    This is needed because '-arch ARCH' adds another architecture to the
    -    build, without a way to remove an architecture. Furthermore GCC will
    -    barf if multiple '-isysroot' arguments are present.
    -    """
    -    stripArch = stripSysroot = False
    -
    -    compiler_so = list(compiler_so)
    -
    -    if not _supports_universal_builds():
    -        # OSX before 10.4.0, these don't support -arch and -isysroot at
    -        # all.
    -        stripArch = stripSysroot = True
    -    else:
    -        stripArch = '-arch' in cc_args
    -        stripSysroot = '-isysroot' in cc_args
    -
    -    if stripArch or 'ARCHFLAGS' in os.environ:
    -        while True:
    -            try:
    -                index = compiler_so.index('-arch')
    -                # Strip this argument and the next one:
    -                del compiler_so[index:index+2]
    -            except ValueError:
    -                break
    -
    -    if 'ARCHFLAGS' in os.environ and not stripArch:
    -        # User specified different -arch flags in the environ,
    -        # see also distutils.sysconfig
    -        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
    -
    -    if stripSysroot:
    -        while True:
    -            try:
    -                index = compiler_so.index('-isysroot')
    -                # Strip this argument and the next one:
    -                del compiler_so[index:index+2]
    -            except ValueError:
    -                break
    -
    -    # Check if the SDK that is used during compilation actually exists,
    -    # the universal build requires the usage of a universal SDK and not all
    -    # users have that installed by default.
    -    sysroot = None
    -    if '-isysroot' in cc_args:
    -        idx = cc_args.index('-isysroot')
    -        sysroot = cc_args[idx+1]
    -    elif '-isysroot' in compiler_so:
    -        idx = compiler_so.index('-isysroot')
    -        sysroot = compiler_so[idx+1]
    -
    -    if sysroot and not os.path.isdir(sysroot):
    -        from distutils import log
    -        log.warn("Compiling with an SDK that doesn't seem to exist: %s",
    -                sysroot)
    -        log.warn("Please check your Xcode installation")
    -
    -    return compiler_so
    -
    -
    -def customize_config_vars(_config_vars):
    -    """Customize Python build configuration variables.
    -
    -    Called internally from sysconfig with a mutable mapping
    -    containing name/value pairs parsed from the configured
    -    makefile used to build this interpreter.  Returns
    -    the mapping updated as needed to reflect the environment
    -    in which the interpreter is running; in the case of
    -    a Python from a binary installer, the installed
    -    environment may be very different from the build
    -    environment, i.e. different OS levels, different
    -    built tools, different available CPU architectures.
    -
    -    This customization is performed whenever
    -    distutils.sysconfig.get_config_vars() is first
    -    called.  It may be used in environments where no
    -    compilers are present, i.e. when installing pure
    -    Python dists.  Customization of compiler paths
    -    and detection of unavailable archs is deferred
    -    until the first extention module build is
    -    requested (in distutils.sysconfig.customize_compiler).
    -
    -    Currently called from distutils.sysconfig
    -    """
    -
    -    if not _supports_universal_builds():
    -        # On Mac OS X before 10.4, check if -arch and -isysroot
    -        # are in CFLAGS or LDFLAGS and remove them if they are.
    -        # This is needed when building extensions on a 10.3 system
    -        # using a universal build of python.
    -        _remove_universal_flags(_config_vars)
    -
    -    # Allow user to override all archs with ARCHFLAGS env var
    -    _override_all_archs(_config_vars)
    -
    -    # Remove references to sdks that are not found
    -    _check_for_unavailable_sdk(_config_vars)
    -
    -    return _config_vars
    -
    -
    -def customize_compiler(_config_vars):
    -    """Customize compiler path and configuration variables.
    -
    -    This customization is performed when the first
    -    extension module build is requested
    -    in distutils.sysconfig.customize_compiler).
    -    """
    -
    -    # Find a compiler to use for extension module builds
    -    _find_appropriate_compiler(_config_vars)
    -
    -    # Remove ppc arch flags if not supported here
    -    _remove_unsupported_archs(_config_vars)
    -
    -    # Allow user to override all archs with ARCHFLAGS env var
    -    _override_all_archs(_config_vars)
    -
    -    return _config_vars
    -
    -
    -def get_platform_osx(_config_vars, osname, release, machine):
    -    """Filter values for get_platform()"""
    -    # called from get_platform() in sysconfig and distutils.util
    -    #
    -    # For our purposes, we'll assume that the system version from
    -    # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
    -    # to. This makes the compatibility story a bit more sane because the
    -    # machine is going to compile and link as if it were
    -    # MACOSX_DEPLOYMENT_TARGET.
    -
    -    macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
    -    macrelease = _get_system_version() or macver
    -    macver = macver or macrelease
    -
    -    if macver:
    -        release = macver
    -        osname = "macosx"
    -
    -        # Use the original CFLAGS value, if available, so that we
    -        # return the same machine type for the platform string.
    -        # Otherwise, distutils may consider this a cross-compiling
    -        # case and disallow installs.
    -        cflags = _config_vars.get(_INITPRE+'CFLAGS',
    -                                    _config_vars.get('CFLAGS', ''))
    -        if ((macrelease + '.') >= '10.4.' and
    -            '-arch' in cflags.strip()):
    -            # The universal build will build fat binaries, but not on
    -            # systems before 10.4
    -
    -            machine = 'fat'
    -
    -            archs = re.findall('-arch\s+(\S+)', cflags)
    -            archs = tuple(sorted(set(archs)))
    -
    -            if len(archs) == 1:
    -                machine = archs[0]
    -            elif archs == ('i386', 'ppc'):
    -                machine = 'fat'
    -            elif archs == ('i386', 'x86_64'):
    -                machine = 'intel'
    -            elif archs == ('i386', 'ppc', 'x86_64'):
    -                machine = 'fat3'
    -            elif archs == ('ppc64', 'x86_64'):
    -                machine = 'fat64'
    -            elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
    -                machine = 'universal'
    -            else:
    -                raise ValueError(
    -                   "Don't know machine value for archs=%r" % (archs,))
    -
    -        elif machine == 'i386':
    -            # On OSX the machine type returned by uname is always the
    -            # 32-bit variant, even if the executable architecture is
    -            # the 64-bit variant
    -            if sys.maxsize >= 2**32:
    -                machine = 'x86_64'
    -
    -        elif machine in ('PowerPC', 'Power_Macintosh'):
    -            # Pick a sane name for the PPC architecture.
    -            # See 'i386' case
    -            if sys.maxsize >= 2**32:
    -                machine = 'ppc64'
    -            else:
    -                machine = 'ppc'
    -
    -    return (osname, release, machine)
    diff --git a/lib-python/3/test/crashers/trace_at_recursion_limit.py b/lib-python/3/test/crashers/trace_at_recursion_limit.py
    deleted file mode 100644
    --- a/lib-python/3/test/crashers/trace_at_recursion_limit.py
    +++ /dev/null
    @@ -1,27 +0,0 @@
    -"""
    -From http://bugs.python.org/issue6717
    -
    -A misbehaving trace hook can trigger a segfault by exceeding the recursion
    -limit.
    -"""
    -import sys
    -
    -
    -def x():
    -    pass
    -
    -def g(*args):
    -    if True: # change to True to crash interpreter
    -        try:
    -            x()
    -        except:
    -            pass
    -    return g
    -
    -def f():
    -    print(sys.getrecursionlimit())
    -    f()
    -
    -sys.settrace(g)
    -
    -f()
    diff --git a/lib-python/3/test/json_tests/test_tool.py b/lib-python/3/test/json_tests/test_tool.py
    deleted file mode 100644
    --- a/lib-python/3/test/json_tests/test_tool.py
    +++ /dev/null
    @@ -1,69 +0,0 @@
    -import os
    -import sys
    -import textwrap
    -import unittest
    -import subprocess
    -from test import support
    -from test.script_helper import assert_python_ok
    -
    -class TestTool(unittest.TestCase):
    -    data = """
    -
    -        [["blorpie"],[ "whoops" ] , [
    -                                 ],\t"d-shtaeou",\r"d-nthiouh",
    -        "i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field"
    -            :"yes"}  ]
    -           """
    -
    -    expect = textwrap.dedent("""\
    -    [
    -        [
    -            "blorpie"
    -        ],
    -        [
    -            "whoops"
    -        ],
    -        [],
    -        "d-shtaeou",
    -        "d-nthiouh",
    -        "i-vhbjkhnth",
    -        {
    -            "nifty": 87
    -        },
    -        {
    -            "field": "yes",
    -            "morefield": false
    -        }
    -    ]
    -    """)
    -
    -    def test_stdin_stdout(self):
    -        with subprocess.Popen(
    -                (sys.executable, '-m', 'json.tool'),
    -                stdin=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
    -            out, err = proc.communicate(self.data.encode())
    -        self.assertEqual(out.splitlines(), self.expect.encode().splitlines())
    -        self.assertEqual(err, None)
    -
    -    def _create_infile(self):
    -        infile = support.TESTFN
    -        with open(infile, "w") as fp:
    -            self.addCleanup(os.remove, infile)
    -            fp.write(self.data)
    -        return infile
    -
    -    def test_infile_stdout(self):
    -        infile = self._create_infile()
    -        rc, out, err = assert_python_ok('-m', 'json.tool', infile)
    -        self.assertEqual(out.splitlines(), self.expect.encode().splitlines())
    -        self.assertEqual(err, b'')
    -
    -    def test_infile_outfile(self):
    -        infile = self._create_infile()
    -        outfile = support.TESTFN + '.out'
    -        rc, out, err = assert_python_ok('-m', 'json.tool', infile, outfile)
    -        self.addCleanup(os.remove, outfile)
    -        with open(outfile, "r") as fp:
    -            self.assertEqual(fp.read(), self.expect)
    -        self.assertEqual(out, b'')
    -        self.assertEqual(err, b'')
    diff --git a/lib-python/3/test/mp_fork_bomb.py b/lib-python/3/test/mp_fork_bomb.py
    deleted file mode 100644
    --- a/lib-python/3/test/mp_fork_bomb.py
    +++ /dev/null
    @@ -1,13 +0,0 @@
    -import multiprocessing, sys
    -
    -def foo():
    -    print("123")
    -
    -# Because "if __name__ == '__main__'" is missing this will not work
    -# correctly on Windows.  However, we should get a RuntimeError rather
    -# than the Windows equivalent of a fork bomb.
    -
    -p = multiprocessing.Process(target=foo)
    -p.start()
    -p.join()
    -sys.exit(p.exitcode)
    diff --git a/lib-python/3/test/sample_doctest_no_docstrings.py b/lib-python/3/test/sample_doctest_no_docstrings.py
    deleted file mode 100644
    --- a/lib-python/3/test/sample_doctest_no_docstrings.py
    +++ /dev/null
    @@ -1,12 +0,0 @@
    -# This is a sample module used for testing doctest.
    -#
    -# This module is for testing how doctest handles a module with no
    -# docstrings.
    -
    -
    -class Foo(object):
    -
    -    # A class with no docstring.
    -
    -    def __init__(self):
    -        pass
    diff --git a/lib-python/3/test/sample_doctest_no_doctests.py b/lib-python/3/test/sample_doctest_no_doctests.py
    deleted file mode 100644
    --- a/lib-python/3/test/sample_doctest_no_doctests.py
    +++ /dev/null
    @@ -1,15 +0,0 @@
    -"""This is a sample module used for testing doctest.
    -
    -This module is for testing how doctest handles a module with docstrings
    -but no doctest examples.
    -
    -"""
    -
    -
    -class Foo(object):
    -    """A docstring with no doctest examples.
    -
    -    """
    -
    -    def __init__(self):
    -        pass
    diff --git a/lib-python/3/test/test__osx_support.py b/lib-python/3/test/test__osx_support.py
    deleted file mode 100644
    --- a/lib-python/3/test/test__osx_support.py
    +++ /dev/null
    @@ -1,279 +0,0 @@
    -"""
    -Test suite for _osx_support: shared OS X support functions.
    -"""
    -
    -import os
    -import platform
    -import shutil
    -import stat
    -import sys
    -import unittest
    -
    -import test.support
    -
    -import _osx_support
    -
    - at unittest.skipUnless(sys.platform.startswith("darwin"), "requires OS X")
    -class Test_OSXSupport(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.maxDiff = None
    -        self.prog_name = 'bogus_program_xxxx'
    -        self.temp_path_dir = os.path.abspath(os.getcwd())
    -        self.env = test.support.EnvironmentVarGuard()
    -        self.addCleanup(self.env.__exit__)
    -        for cv in ('CFLAGS', 'LDFLAGS', 'CPPFLAGS',
    -                            'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC',
    -                            'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
    -                            'PY_CORE_CFLAGS'):
    -            if cv in self.env:
    -                self.env.unset(cv)
    -
    -    def add_expected_saved_initial_values(self, config_vars, expected_vars):
    -        # Ensure that the initial values for all modified config vars
    -        # are also saved with modified keys.
    -        expected_vars.update(('_OSX_SUPPORT_INITIAL_'+ k,
    -                config_vars[k]) for k in config_vars
    -                    if config_vars[k] != expected_vars[k])
    -
    -    def test__find_executable(self):
    -        if self.env['PATH']:
    -            self.env['PATH'] = self.env['PATH'] + ':'
    -        self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
    -        test.support.unlink(self.prog_name)
    -        self.assertIsNone(_osx_support._find_executable(self.prog_name))
    -        self.addCleanup(test.support.unlink, self.prog_name)
    -        with open(self.prog_name, 'w') as f:
    -            f.write("#!/bin/sh\n/bin/echo OK\n")
    -        os.chmod(self.prog_name, stat.S_IRWXU)
    -        self.assertEqual(self.prog_name,
    -                            _osx_support._find_executable(self.prog_name))
    -
    -    def test__read_output(self):
    -        if self.env['PATH']:
    -            self.env['PATH'] = self.env['PATH'] + ':'
    -        self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
    -        test.support.unlink(self.prog_name)
    -        self.addCleanup(test.support.unlink, self.prog_name)
    -        with open(self.prog_name, 'w') as f:
    -            f.write("#!/bin/sh\n/bin/echo ExpectedOutput\n")
    -        os.chmod(self.prog_name, stat.S_IRWXU)
    -        self.assertEqual('ExpectedOutput',
    -                            _osx_support._read_output(self.prog_name))
    -
    -    def test__find_build_tool(self):
    -        out = _osx_support._find_build_tool('cc')
    -        self.assertTrue(os.path.isfile(out),
    -                            'cc not found - check xcode-select')
    -
    -    def test__get_system_version(self):
    -        self.assertTrue(platform.mac_ver()[0].startswith(
    -                                    _osx_support._get_system_version()))
    -
    -    def test__remove_original_values(self):
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        }
    -        expected_vars = {
    -        'CC': 'clang -pthreads',
    -        }
    -        cv = 'CC'
    -        newvalue = 'clang -pthreads'
    -        _osx_support._save_modified_value(config_vars, cv, newvalue)
    -        self.assertNotEqual(expected_vars, config_vars)
    -        _osx_support._remove_original_values(config_vars)
    -        self.assertEqual(expected_vars, config_vars)
    -
    -    def test__save_modified_value(self):
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        }
    -        expected_vars = {
    -        'CC': 'clang -pthreads',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -        cv = 'CC'
    -        newvalue = 'clang -pthreads'
    -        _osx_support._save_modified_value(config_vars, cv, newvalue)
    -        self.assertEqual(expected_vars, config_vars)
    -
    -    def test__save_modified_value_unchanged(self):
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        }
    -        expected_vars = config_vars.copy()
    -        cv = 'CC'
    -        newvalue = 'gcc-test -pthreads'
    -        _osx_support._save_modified_value(config_vars, cv, newvalue)
    -        self.assertEqual(expected_vars, config_vars)
    -
    -    def test__supports_universal_builds(self):
    -        import platform
    -        self.assertEqual(platform.mac_ver()[0].split('.') >= ['10', '4'],
    -                            _osx_support._supports_universal_builds())
    -
    -    def test__find_appropriate_compiler(self):
    -        compilers = (
    -                        ('gcc-test', 'i686-apple-darwin11-llvm-gcc-4.2'),
    -                        ('clang', 'clang version 3.1'),
    -                    )
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        'CXX': 'cc++-test',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-test -bundle -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-test -bundle -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang -pthreads',
    -        'CXX': 'clang++',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'clang -bundle -arch ppc -arch i386 -g',
    -        'LDSHARED': 'clang -bundle -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
    -        self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
    -        for c_name, c_output in compilers:
    -            test.support.unlink(c_name)
    -            self.addCleanup(test.support.unlink, c_name)
    -            with open(c_name, 'w') as f:
    -                f.write("#!/bin/sh\n/bin/echo " + c_output)
    -            os.chmod(c_name, stat.S_IRWXU)
    -        self.assertEqual(expected_vars,
    -                            _osx_support._find_appropriate_compiler(
    -                                    config_vars))
    -
    -    def test__remove_universal_flags(self):
    -        config_vars = {
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3    ',
    -        'LDFLAGS': '    -g',
    -        'CPPFLAGS': '-I.  ',
    -        'BLDSHARED': 'gcc-4.0 -bundle    -g',
    -        'LDSHARED': 'gcc-4.0 -bundle      -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        self.assertEqual(expected_vars,
    -                            _osx_support._remove_universal_flags(
    -                                    config_vars))
    -
    -    def test__remove_unsupported_archs(self):
    -        config_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3  -arch i386  ',
    -        'LDFLAGS': ' -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle   -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle   -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
    -        self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
    -        c_name = 'clang'
    -        test.support.unlink(c_name)
    -        self.addCleanup(test.support.unlink, c_name)
    -        # exit status 255 means no PPC support in this compiler chain
    -        with open(c_name, 'w') as f:
    -            f.write("#!/bin/sh\nexit 255")
    -        os.chmod(c_name, stat.S_IRWXU)
    -        self.assertEqual(expected_vars,
    -                            _osx_support._remove_unsupported_archs(
    -                                    config_vars))
    -
    -    def test__override_all_archs(self):
    -        self.env['ARCHFLAGS'] = '-arch x86_64'
    -        config_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3     -arch x86_64',
    -        'LDFLAGS': '    -g -arch x86_64',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle    -g -arch x86_64',
    -        'LDSHARED': 'gcc-4.0 -bundle   -isysroot '
    -                        '/Developer/SDKs/MacOSX10.4u.sdk -g -arch x86_64',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        self.assertEqual(expected_vars,
    -                            _osx_support._override_all_archs(
    -                                    config_vars))
    -
    -    def test__check_for_unavailable_sdk(self):
    -        config_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
    -                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.1.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
    -                        ' ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I.  ',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        ' -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        self.assertEqual(expected_vars,
    -                            _osx_support._check_for_unavailable_sdk(
    -                                    config_vars))
    -
    -    def test_get_platform_osx(self):
    -        # Note, get_platform_osx is currently tested more extensively
    -        # indirectly by test_sysconfig and test_distutils
    -        config_vars = {
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
    -                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk',
    -        'MACOSX_DEPLOYMENT_TARGET': '10.6',
    -        }
    -        result = _osx_support.get_platform_osx(config_vars, ' ', ' ', ' ')
    -        self.assertEqual(('macosx', '10.6', 'fat'), result)
    -
    -def test_main():
    -    if sys.platform == 'darwin':
    -        test.support.run_unittest(Test_OSXSupport)
    -
    -if __name__ == "__main__":
    -    test_main()
    diff --git a/lib-python/3/test/test_file_eintr.py b/lib-python/3/test/test_file_eintr.py
    deleted file mode 100644
    --- a/lib-python/3/test/test_file_eintr.py
    +++ /dev/null
    @@ -1,236 +0,0 @@
    -# Written to test interrupted system calls interfering with our many buffered
    -# IO implementations.  http://bugs.python.org/issue12268
    -#
    -# It was suggested that this code could be merged into test_io and the tests
    -# made to work using the same method as the existing signal tests in test_io.
    -# I was unable to get single process tests using alarm or setitimer that way
    -# to reproduce the EINTR problems.  This process based test suite reproduces
    -# the problems prior to the issue12268 patch reliably on Linux and OSX.
    -#  - gregory.p.smith
    -
    -import os
    -import select
    -import signal
    -import subprocess
    -import sys
    -from test.support import run_unittest
    -import time
    -import unittest
    -
    -# Test import all of the things we're about to try testing up front.
    -from _io import FileIO
    -
    -
    - at unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.')
    -class TestFileIOSignalInterrupt(unittest.TestCase):
    -    def setUp(self):
    -        self._process = None
    -
    -    def tearDown(self):
    -        if self._process and self._process.poll() is None:
    -            try:
    -                self._process.kill()
    -            except OSError:
    -                pass
    -
    -    def _generate_infile_setup_code(self):
    -        """Returns the infile = ... line of code for the reader process.
    -
    -        subclasseses should override this to test different IO objects.
    -        """
    -        return ('import _io ;'
    -                'infile = _io.FileIO(sys.stdin.fileno(), "rb")')
    -
    -    def fail_with_process_info(self, why, stdout=b'', stderr=b'',
    -                               communicate=True):
    -        """A common way to cleanup and fail with useful debug output.
    -
    -        Kills the process if it is still running, collects remaining output
    -        and fails the test with an error message including the output.
    -
    -        Args:
    -            why: Text to go after "Error from IO process" in the message.
    -            stdout, stderr: standard output and error from the process so
    -                far to include in the error message.
    -            communicate: bool, when True we call communicate() on the process
    -                after killing it to gather additional output.
    -        """
    -        if self._process.poll() is None:
    -            time.sleep(0.1)  # give it time to finish printing the error.
    -            try:
    -                self._process.terminate()  # Ensure it dies.
    -            except OSError:
    -                pass
    -        if communicate:
    -            stdout_end, stderr_end = self._process.communicate()
    -            stdout += stdout_end
    -            stderr += stderr_end
    -        self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' %
    -                  (why, stdout.decode(), stderr.decode()))
    -
    -    def _test_reading(self, data_to_write, read_and_verify_code):
    -        """Generic buffered read method test harness to validate EINTR behavior.
    -
    -        Also validates that Python signal handlers are run during the read.
    -
    -        Args:
    -            data_to_write: String to write to the child process for reading
    -                before sending it a signal, confirming the signal was handled,
    -                writing a final newline and closing the infile pipe.
    -            read_and_verify_code: Single "line" of code to read from a file
    -                object named 'infile' and validate the result.  This will be
    -                executed as part of a python subprocess fed data_to_write.
    -        """
    -        infile_setup_code = self._generate_infile_setup_code()
    -        # Total pipe IO in this function is smaller than the minimum posix OS
    -        # pipe buffer size of 512 bytes.  No writer should block.
    -        assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.'
    -
    -        # Start a subprocess to call our read method while handling a signal.
    -        self._process = subprocess.Popen(
    -                [sys.executable, '-u', '-c',
    -                 'import signal, sys ;'
    -                 'signal.signal(signal.SIGINT, '
    -                               'lambda s, f: sys.stderr.write("$\\n")) ;'
    -                 + infile_setup_code + ' ;' +
    -                 'sys.stderr.write("Worm Sign!\\n") ;'
    -                 + read_and_verify_code + ' ;' +
    -                 'infile.close()'
    -                ],
    -                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
    -                stderr=subprocess.PIPE)
    -
    -        # Wait for the signal handler to be installed.
    -        worm_sign = self._process.stderr.read(len(b'Worm Sign!\n'))
    -        if worm_sign != b'Worm Sign!\n':  # See also, Dune by Frank Herbert.
    -            self.fail_with_process_info('while awaiting a sign',
    -                                        stderr=worm_sign)
    -        self._process.stdin.write(data_to_write)
    -
    -        signals_sent = 0
    -        rlist = []
    -        # We don't know when the read_and_verify_code in our child is actually
    -        # executing within the read system call we want to interrupt.  This
    -        # loop waits for a bit before sending the first signal to increase
    -        # the likelihood of that.  Implementations without correct EINTR
    -        # and signal handling usually fail this test.
    -        while not rlist:
    -            rlist, _, _ = select.select([self._process.stderr], (), (), 0.05)
    -            self._process.send_signal(signal.SIGINT)
    -            signals_sent += 1
    -            if signals_sent > 200:
    -                self._process.kill()
    -                self.fail('reader process failed to handle our signals.')
    -        # This assumes anything unexpected that writes to stderr will also
    -        # write a newline.  That is true of the traceback printing code.
    -        signal_line = self._process.stderr.readline()
    -        if signal_line != b'$\n':
    -            self.fail_with_process_info('while awaiting signal',
    -                                        stderr=signal_line)
    -
    -        # We append a newline to our input so that a readline call can
    -        # end on its own before the EOF is seen and so that we're testing
    -        # the read call that was interrupted by a signal before the end of
    -        # the data stream has been reached.
    -        stdout, stderr = self._process.communicate(input=b'\n')
    -        if self._process.returncode:
    -            self.fail_with_process_info(
    -                    'exited rc=%d' % self._process.returncode,
    -                    stdout, stderr, communicate=False)
    -        # PASS!
    -
    -    # String format for the read_and_verify_code used by read methods.
    -    _READING_CODE_TEMPLATE = (
    -            'got = infile.{read_method_name}() ;'
    -            'expected = {expected!r} ;'
    -            'assert got == expected, ('
    -                    '"{read_method_name} returned wrong data.\\n"'
    -                    '"got data %r\\nexpected %r" % (got, expected))'
    -            )
    -
    -    def test_readline(self):
    -        """readline() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello, world!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readline',
    -                        expected=b'hello, world!\n'))
    -
    -    def test_readlines(self):
    -        """readlines() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readlines',
    -                        expected=[b'hello\n', b'world!\n']))
    -
    -    def test_readall(self):
    -        """readall() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readall',
    -                        expected=b'hello\nworld!\n'))
    -        # read() is the same thing as readall().
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='read',
    -                        expected=b'hello\nworld!\n'))
    -
    -
    -class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt):
    -    def _generate_infile_setup_code(self):
    -        """Returns the infile = ... line of code to make a BufferedReader."""
    -        return ('infile = open(sys.stdin.fileno(), "rb") ;'
    -                'import _io ;assert isinstance(infile, _io.BufferedReader)')
    -
    -    def test_readall(self):
    -        """BufferedReader.read() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='read',
    -                        expected=b'hello\nworld!\n'))
    -
    -
    -class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt):
    -    def _generate_infile_setup_code(self):
    -        """Returns the infile = ... line of code to make a TextIOWrapper."""
    -        return ('infile = open(sys.stdin.fileno(), "rt", newline=None) ;'
    -                'import _io ;assert isinstance(infile, _io.TextIOWrapper)')
    -
    -    def test_readline(self):
    -        """readline() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello, world!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readline',
    -                        expected='hello, world!\n'))
    -
    -    def test_readlines(self):
    -        """readlines() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\r\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readlines',
    -                        expected=['hello\n', 'world!\n']))
    -
    -    def test_readall(self):
    -        """read() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='read',
    -                        expected="hello\nworld!\n"))
    -
    -
    -def test_main():
    -    test_cases = [
    -            tc for tc in globals().values()
    -            if isinstance(tc, type) and issubclass(tc, unittest.TestCase)]
    -    run_unittest(*test_cases)
    -
    -
    -if __name__ == '__main__':
    -    test_main()
    diff --git a/lib-python/3/test/test_tools.py b/lib-python/3/test/test_tools.py
    deleted file mode 100644
    --- a/lib-python/3/test/test_tools.py
    +++ /dev/null
    @@ -1,433 +0,0 @@
    -"""Tests for scripts in the Tools directory.
    -
    -This file contains regression tests for some of the scripts found in the
    -Tools directory of a Python checkout or tarball, such as reindent.py.
    -"""
    -
    -import os
    -import sys
    -import imp
    -import unittest
    -import shutil
    -import subprocess
    -import sysconfig
    -import tempfile
    -import textwrap
    -from test import support
    -from test.script_helper import assert_python_ok, temp_dir
    -
    -if not sysconfig.is_python_build():
    -    # XXX some installers do contain the tools, should we detect that
    -    # and run the tests in that case too?
    -    raise unittest.SkipTest('test irrelevant for an installed Python')
    -
    -basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
    -                        'Tools')
    -scriptsdir = os.path.join(basepath, 'scripts')
    -
    -
    -class ReindentTests(unittest.TestCase):
    -    script = os.path.join(scriptsdir, 'reindent.py')
    -
    -    def test_noargs(self):
    -        assert_python_ok(self.script)
    -
    -    def test_help(self):
    -        rc, out, err = assert_python_ok(self.script, '-h')
    -        self.assertEqual(out, b'')
    -        self.assertGreater(err, b'')
    -
    -
    -class PindentTests(unittest.TestCase):
    -    script = os.path.join(scriptsdir, 'pindent.py')
    -
    -    def assertFileEqual(self, fn1, fn2):
    -        with open(fn1) as f1, open(fn2) as f2:
    -            self.assertEqual(f1.readlines(), f2.readlines())
    -
    -    def pindent(self, source, *args):
    -        with subprocess.Popen(
    -                (sys.executable, self.script) + args,
    -                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
    -                universal_newlines=True) as proc:
    -            out, err = proc.communicate(source)
    -        self.assertIsNone(err)
    -        return out
    -
    -    def lstriplines(self, data):
    -        return '\n'.join(line.lstrip() for line in data.splitlines()) + '\n'
    -
    -    def test_selftest(self):
    -        self.maxDiff = None
    -        with temp_dir() as directory:
    -            data_path = os.path.join(directory, '_test.py')
    -            with open(self.script) as f:
    -                closed = f.read()
    -            with open(data_path, 'w') as f:
    -                f.write(closed)
    -
    -            rc, out, err = assert_python_ok(self.script, '-d', data_path)
    -            self.assertEqual(out, b'')
    -            self.assertEqual(err, b'')
    -            backup = data_path + '~'
    -            self.assertTrue(os.path.exists(backup))
    -            with open(backup) as f:
    -                self.assertEqual(f.read(), closed)
    -            with open(data_path) as f:
    -                clean = f.read()
    -            compile(clean, '_test.py', 'exec')
    -            self.assertEqual(self.pindent(clean, '-c'), closed)
    -            self.assertEqual(self.pindent(closed, '-d'), clean)
    -
    -            rc, out, err = assert_python_ok(self.script, '-c', data_path)
    -            self.assertEqual(out, b'')
    -            self.assertEqual(err, b'')
    -            with open(backup) as f:
    -                self.assertEqual(f.read(), clean)
    -            with open(data_path) as f:
    -                self.assertEqual(f.read(), closed)
    -
    -            broken = self.lstriplines(closed)
    -            with open(data_path, 'w') as f:
    -                f.write(broken)
    -            rc, out, err = assert_python_ok(self.script, '-r', data_path)
    -            self.assertEqual(out, b'')
    -            self.assertEqual(err, b'')
    -            with open(backup) as f:
    -                self.assertEqual(f.read(), broken)
    -            with open(data_path) as f:
    -                indented = f.read()
    -            compile(indented, '_test.py', 'exec')
    -            self.assertEqual(self.pindent(broken, '-r'), indented)
    -
    -    def pindent_test(self, clean, closed):
    -        self.assertEqual(self.pindent(clean, '-c'), closed)
    -        self.assertEqual(self.pindent(closed, '-d'), clean)
    -        broken = self.lstriplines(closed)
    -        self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '4'), closed)
    -
    -    def test_statements(self):
    -        clean = textwrap.dedent("""\
    -            if a:
    -                pass
    -
    -            if a:
    -                pass
    -            else:
    -                pass
    -
    -            if a:
    -                pass
    -            elif:
    -                pass
    -            else:
    -                pass
    -
    -            while a:
    -                break
    -
    -            while a:
    -                break
    -            else:
    -                pass
    -
    -            for i in a:
    -                break
    -
    -            for i in a:
    -                break
    -            else:
    -                pass
    -
    -            try:
    -                pass
    -            finally:
    -                pass
    -
    -            try:
    -                pass
    -            except TypeError:
    -                pass
    -            except ValueError:
    -                pass
    -            else:
    -                pass
    -
    -            try:
    -                pass
    -            except TypeError:
    -                pass
    -            except ValueError:
    -                pass
    -            finally:
    -                pass
    -
    -            with a:
    -                pass
    -
    -            class A:
    -                pass
    -
    -            def f():
    -                pass
    -            """)
    -
    -        closed = textwrap.dedent("""\
    -            if a:
    -                pass
    -            # end if
    -
    -            if a:
    -                pass
    -            else:
    -                pass
    -            # end if
    -
    -            if a:
    -                pass
    -            elif:
    -                pass
    -            else:
    -                pass
    -            # end if
    -
    -            while a:
    -                break
    -            # end while
    -
    -            while a:
    -                break
    -            else:
    -                pass
    -            # end while
    -
    -            for i in a:
    -                break
    -            # end for
    -
    -            for i in a:
    -                break
    -            else:
    -                pass
    -            # end for
    -
    -            try:
    -                pass
    -            finally:
    -                pass
    -            # end try
    -
    -            try:
    -                pass
    -            except TypeError:
    -                pass
    -            except ValueError:
    -                pass
    -            else:
    -                pass
    -            # end try
    -
    -            try:
    -                pass
    -            except TypeError:
    -                pass
    -            except ValueError:
    -                pass
    -            finally:
    -                pass
    -            # end try
    -
    -            with a:
    -                pass
    -            # end with
    -
    -            class A:
    -                pass
    -            # end class A
    -
    -            def f():
    -                pass
    -            # end def f
    -            """)
    -        self.pindent_test(clean, closed)
    -
    -    def test_multilevel(self):
    -        clean = textwrap.dedent("""\
    -            def foobar(a, b):
    -                if a == b:
    -                    a = a+1
    -                elif a < b:
    -                    b = b-1
    -                    if b > a: a = a-1
    -                else:
    -                    print 'oops!'
    -            """)
    -        closed = textwrap.dedent("""\
    -            def foobar(a, b):
    -                if a == b:
    -                    a = a+1
    -                elif a < b:
    -                    b = b-1
    -                    if b > a: a = a-1
    -                    # end if
    -                else:
    -                    print 'oops!'
    -                # end if
    -            # end def foobar
    -            """)
    -        self.pindent_test(clean, closed)
    -
    -    def test_preserve_indents(self):
    -        clean = textwrap.dedent("""\
    -            if a:
    -                     if b:
    -                              pass
    -            """)
    -        closed = textwrap.dedent("""\
    -            if a:
    -                     if b:
    -                              pass
    -                     # end if
    -            # end if
    -            """)
    -        self.assertEqual(self.pindent(clean, '-c'), closed)
    -        self.assertEqual(self.pindent(closed, '-d'), clean)
    -        broken = self.lstriplines(closed)
    -        self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '9'), closed)
    -        clean = textwrap.dedent("""\
    -            if a:
    -            \tif b:
    -            \t\tpass
    -            """)
    -        closed = textwrap.dedent("""\
    -            if a:
    -            \tif b:
    -            \t\tpass
    -            \t# end if
    -            # end if
    -            """)
    -        self.assertEqual(self.pindent(clean, '-c'), closed)
    -        self.assertEqual(self.pindent(closed, '-d'), clean)
    -        broken = self.lstriplines(closed)
    -        self.assertEqual(self.pindent(broken, '-r'), closed)
    -
    -    def test_escaped_newline(self):
    -        clean = textwrap.dedent("""\
    -            class\\
    -            \\
    -             A:
    -               def\
    -            \\
    -            f:
    -                  pass
    -            """)
    -        closed = textwrap.dedent("""\
    -            class\\
    -            \\
    -             A:
    -               def\
    -            \\
    -            f:
    -                  pass
    -               # end def f
    -            # end class A
    -            """)
    -        self.assertEqual(self.pindent(clean, '-c'), closed)
    -        self.assertEqual(self.pindent(closed, '-d'), clean)
    -
    -    def test_empty_line(self):
    -        clean = textwrap.dedent("""\
    -            if a:
    -
    -                pass
    -            """)
    -        closed = textwrap.dedent("""\
    -            if a:
    -
    -                pass
    -            # end if
    -            """)
    -        self.pindent_test(clean, closed)
    -
    -    def test_oneline(self):
    -        clean = textwrap.dedent("""\
    -            if a: pass
    -            """)
    -        closed = textwrap.dedent("""\
    -            if a: pass
    -            # end if
    -            """)
    -        self.pindent_test(clean, closed)
    -
    -
    -class TestSundryScripts(unittest.TestCase):
    -    # At least make sure the rest don't have syntax errors.  When tests are
    -    # added for a script it should be added to the whitelist below.
    -
    -    # scripts that have independent tests.
    -    whitelist = ['reindent.py']
    -    # scripts that can't be imported without running
    -    blacklist = ['make_ctype.py']
    -    # scripts that use windows-only modules
    -    windows_only = ['win_add2path.py']
    -    # blacklisted for other reasons
    -    other = ['analyze_dxp.py']
    -
    -    skiplist = blacklist + whitelist + windows_only + other
    -
    -    def setUp(self):
    -        cm = support.DirsOnSysPath(scriptsdir)
    -        cm.__enter__()
    -        self.addCleanup(cm.__exit__)
    -
    -    def test_sundry(self):
    -        for fn in os.listdir(scriptsdir):
    -            if fn.endswith('.py') and fn not in self.skiplist:
    -                __import__(fn[:-3])
    -
    -    @unittest.skipIf(sys.platform != "win32", "Windows-only test")
    -    def test_sundry_windows(self):
    -        for fn in self.windows_only:
    -            __import__(fn[:-3])
    -
    -    @unittest.skipIf(not support.threading, "test requires _thread module")
    -    def test_analyze_dxp_import(self):
    -        if hasattr(sys, 'getdxp'):
    -            import analyze_dxp
    -        else:
    -            with self.assertRaises(RuntimeError):
    -                import analyze_dxp
    -
    -
    -class PdepsTests(unittest.TestCase):
    -
    -    @classmethod
    -    def setUpClass(self):
    -        path = os.path.join(scriptsdir, 'pdeps.py')
    -        self.pdeps = imp.load_source('pdeps', path)
    -
    -    @classmethod
    -    def tearDownClass(self):
    -        if 'pdeps' in sys.modules:
    -            del sys.modules['pdeps']
    -
    -    def test_process_errors(self):
    -        # Issue #14492: m_import.match(line) can be None.
    -        with tempfile.TemporaryDirectory() as tmpdir:
    -            fn = os.path.join(tmpdir, 'foo')
    -            with open(fn, 'w') as stream:
    -                stream.write("#!/this/will/fail")
    -            self.pdeps.process(fn, {})
    -
    -    def test_inverse_attribute_error(self):
    -        # Issue #14492: this used to fail with an AttributeError.
    -        self.pdeps.inverse({'a': []})
    -
    -
    -def test_main():
    -    support.run_unittest(*[obj for obj in globals().values()
    -                               if isinstance(obj, type)])
    -
    -
    -if __name__ == '__main__':
    -    unittest.main()
    diff --git a/lib-python/3/tkinter/test/test_tkinter/test_misc.py b/lib-python/3/tkinter/test/test_tkinter/test_misc.py
    deleted file mode 100644
    --- a/lib-python/3/tkinter/test/test_tkinter/test_misc.py
    +++ /dev/null
    @@ -1,45 +0,0 @@
    -import unittest
    -import tkinter
    -from tkinter import ttk
    -from test import support
    -
    -support.requires('gui')
    -
    -class MiscTest(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.root = ttk.setup_master()
    -
    -    def test_tk_setPalette(self):
    -        root = self.root
    -        root.tk_setPalette('black')
    -        self.assertEqual(root['background'], 'black')
    -        root.tk_setPalette('white')
    -        self.assertEqual(root['background'], 'white')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^unknown color name "spam"$',
    -                root.tk_setPalette, 'spam')
    -
    -        root.tk_setPalette(background='black')
    -        self.assertEqual(root['background'], 'black')
    -        root.tk_setPalette(background='blue', highlightColor='yellow')
    -        self.assertEqual(root['background'], 'blue')
    -        self.assertEqual(root['highlightcolor'], 'yellow')
    -        root.tk_setPalette(background='yellow', highlightColor='blue')
    -        self.assertEqual(root['background'], 'yellow')
    -        self.assertEqual(root['highlightcolor'], 'blue')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^unknown color name "spam"$',
    -                root.tk_setPalette, background='spam')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^must specify a background color$',
    -                root.tk_setPalette, spam='white')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^must specify a background color$',
    -                root.tk_setPalette, highlightColor='blue')
    -
    -
    -tests_gui = (MiscTest, )
    -
    -if __name__ == "__main__":
    -    support.run_unittest(*tests_gui)
    diff --git a/lib_pypy/_cffi_ssl/README.md b/lib_pypy/_cffi_ssl/README.md
    --- a/lib_pypy/_cffi_ssl/README.md
    +++ b/lib_pypy/_cffi_ssl/README.md
    @@ -1,21 +1,22 @@
     # PyPy's SSL module
     
    -All of the CFFI code is copied from cryptography, wich patches contributed
    -back to cryptography. PyPy vendors it's own copy of the cffi backend thus
    -it renames the compiled shared object to _pypy_openssl.so (which means
    -that cryptography can ship their own cffi backend)
    +All of the CFFI code is copied from cryptography. PyPy vendors it's own copy of
    +the cffi backend thus it renames the compiled shared object to _pypy_openssl.so
    +(which means that cryptography can ship their own cffi backend)
     
    -NOTE: currently, we have the following changes:
    +# Modifications to cryptography 2.7
     
    -* ``_cffi_src/openssl/callbacks.py`` to not rely on the CPython C API
    -  (this change is now backported)
    -
    -* ``_cffi_src/utils.py`` for issue #2575 (29c9a89359e4)
    -
    -* ``_cffi_src/openssl/x509_vfy.py`` for issue #2605 (ca4d0c90f5a1)
    -
    -* ``_cffi_src/openssl/pypy_win32_extra.py`` for Win32-only functionality like ssl.enum_certificates()
    -
    +- `_cffi_src/openssl/asn1.py` : revert removal of `ASN1_TIME_print`,
    +  `ASN1_ITEM`, `ASN1_ITEM_EXP`, `ASN1_VALUE`, `ASN1_item_d2i`
    +- `_cffi_src/openssl/bio.py` : revert removal of `BIO_s_file`, `BIO_read_filename`
    +- `_cffi_src/openssl/evp.py` : revert removal of `EVP_MD_size`
    +- `_cffi_src/openssl/nid.py` : revert removal of `NID_ad_OCSP`,
    +  `NID_info_access`, `NID_ad_ca_issuers`, `NID_crl_distribution_points`
    +- `_cffi_src/openssl/pem.py` : revert removal of `PEM_read_bio_X509_AUX`
    +- `_cffi_src/openssl/x509.py` : revert removal of `X509_get_ext_by_NID`,
    +  `i2d_X509`
    +- `_cffi_src/openssl/x509v3.py` : revert removal of `X509V3_EXT_get`,
    +  `X509V3_EXT_METHOD`
     
     # Tests?
     
    @@ -25,11 +26,8 @@
     
     Copy over all the sources into the folder `lib_pypy/_cffi_ssl/*`. Updating the cffi backend can be simply done by the following command::
     
    -    $ cp -r /src/_cffi_src/* .
    -
    -NOTE: you need to keep our version of ``_cffi_src/openssl/callbacks.py``
    -for now!
    +    $ cp -r /src/* .
     
     # Crpytography version
     
    -Copied over release version `1.7.2`
    +Copied over release version `2.7`
    diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py b/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py
    deleted file mode 100644
    --- a/lib_pypy/_cffi_ssl/_cffi_src/build_commoncrypto.py
    +++ /dev/null
    @@ -1,33 +0,0 @@
    -# This file is dual licensed under the terms of the Apache License, Version
    -# 2.0, and the BSD License. See the LICENSE file in the root of this repository
    -# for complete details.
    -
    -from __future__ import absolute_import, division, print_function
    -
    -from _cffi_src.utils import build_ffi_for_binding
    -
    -
    -ffi = build_ffi_for_binding(
    -    module_name="_commoncrypto",
    -    module_prefix="_cffi_src.commoncrypto.",
    -    modules=[
    -        "cf",
    -        "common_digest",
    -        "common_hmac",
    -        "common_key_derivation",
    -        "common_cryptor",
    -        "common_symmetric_key_wrap",
    -        "seccertificate",
    -        "secimport",
    -        "secitem",
    -        "seckey",
    -        "seckeychain",
    -        "secpolicy",
    -        "sectransform",
    -        "sectrust",
    -        "secure_transport",
    -    ],
    -    extra_link_args=[
    -        "-framework", "Security", "-framework", "CoreFoundation"
    -    ],
    -)
    diff --git a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py
    --- a/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py
    +++ b/lib_pypy/_cffi_ssl/_cffi_src/build_openssl.py
    @@ -13,31 +13,43 @@
     
     
     def _get_openssl_libraries(platform):
    +    if os.environ.get("CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS", None):
    +        return []
         # OpenSSL goes by a different library name on different operating systems.
    -    if platform == "darwin":
    -        return _osx_libraries(
    -            os.environ.get("CRYPTOGRAPHY_OSX_NO_LINK_FLAGS")
    +    if platform == "win32" and compiler_type() == "msvc":
    +        windows_link_legacy_openssl = os.environ.get(
    +            "CRYPTOGRAPHY_WINDOWS_LINK_LEGACY_OPENSSL", None
             )
    -    elif platform == "win32":
    -        if compiler_type() == "msvc":
    +        if 0 and windows_link_legacy_openssl is None:
    +            # Link against the 1.1.0 names
    +            libs = ["libssl", "libcrypto"]
    +        else:
    +            # Link against the 1.0.2 and lower names
                 libs = ["libeay32", "ssleay32"]
    -        else:
    -            libs = ["ssl", "crypto"]
             return libs + ["advapi32", "crypt32", "gdi32", "user32", "ws2_32"]
         else:
    +        # darwin, linux, mingw all use this path
             # In some circumstances, the order in which these libs are
             # specified on the linker command-line is significant;
             # libssl must come before libcrypto
    -        # (http://marc.info/?l=openssl-users&m=135361825921871)
    +        # (https://marc.info/?l=openssl-users&m=135361825921871)
             return ["ssl", "crypto"]
     
     
    -def _osx_libraries(build_static):
    -    # For building statically we don't want to pass the -lssl or -lcrypto flags
    -    if build_static == "1":
    +def _extra_compile_args(platform):
    +    """
    +    We set -Wconversion args here so that we only do Wconversion checks on the
    +    code we're compiling and not on cffi itself (as passing -Wconversion in
    +    CFLAGS would do). We set no error on sign conversion because some
    +    function signatures in OpenSSL have changed from long -> unsigned long
    +    in the past. Since that isn't a precision issue we don't care.
    +    When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 we can
    +    revisit this.
    +    """
    +    if platform not in ["win32", "hp-ux11", "sunos5"]:
    +        return ["-Wconversion", "-Wno-error=sign-conversion"]
    +    else:
             return []
    -    else:
    -        return ["ssl", "crypto"]
     
     
     ffi = build_ffi_for_binding(
    @@ -52,9 +64,9 @@
             "bignum",
             "bio",
             "cmac",
    -        "cms",
             "conf",
             "crypto",
    +        "ct",
             "dh",
             "dsa",
             "ec",
    @@ -63,6 +75,7 @@
             "engine",
    
    From pypy.commits at gmail.com  Mon Aug 26 11:11:57 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:11:57 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: hg merge default
    Message-ID: <5d63f6bd.1c69fb81.aee9c.0704@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97267:e4eba87b1754
    Date: 2019-08-26 17:04 +0200
    http://bitbucket.org/pypy/pypy/changeset/e4eba87b1754/
    
    Log:	hg merge default
    
    diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
    --- a/pypy/interpreter/app_main.py
    +++ b/pypy/interpreter/app_main.py
    @@ -222,6 +222,8 @@
         print('    turn off the JIT')
         print(' help')
         print('    print this page')
    +    print()
    +    print('The "pypyjit" module can be used to control the JIT from inside python')
     
     def print_version(*args):
         initstdio()
    diff --git a/pypy/module/_io/interp_bufferedio.py b/pypy/module/_io/interp_bufferedio.py
    --- a/pypy/module/_io/interp_bufferedio.py
    +++ b/pypy/module/_io/interp_bufferedio.py
    @@ -131,7 +131,7 @@
                             "%s() returned too much data: "
                             "%d bytes requested, %d returned",
                             methodname, length, len(data))
    -        rwbuffer.setslice(0, data)
    +        self.output_slice(space, rwbuffer, 0, data)
             return space.newint(len(data))
     
     W_BufferedIOBase.typedef = TypeDef(
    @@ -609,7 +609,7 @@
             remaining = n
             written = 0
             if current_size:
    -            result_buffer.setslice(
    +            self.output_slice(space, result_buffer,
                     written, self.buffer[self.pos:self.pos + current_size])
                 remaining -= current_size
                 written += current_size
    @@ -654,7 +654,7 @@
                 if remaining > 0:
                     if size > remaining:
                         size = remaining
    -                result_buffer.setslice(
    +                self.output_slice(space, result_buffer,
                         written, self.buffer[self.pos:self.pos + size])
                     self.pos += size
                     written += size
    diff --git a/pypy/module/_io/interp_bytesio.py b/pypy/module/_io/interp_bytesio.py
    --- a/pypy/module/_io/interp_bytesio.py
    +++ b/pypy/module/_io/interp_bytesio.py
    @@ -92,7 +92,7 @@
             size = rwbuffer.getlength()
     
             output = self.read(size)
    -        rwbuffer.setslice(0, output)
    +        self.output_slice(space, rwbuffer, 0, output)
             return space.newint(len(output))
     
         def write_w(self, space, w_data):
    diff --git a/pypy/module/_io/interp_fileio.py b/pypy/module/_io/interp_fileio.py
    --- a/pypy/module/_io/interp_fileio.py
    +++ b/pypy/module/_io/interp_fileio.py
    @@ -475,7 +475,7 @@
                             return space.w_None
                         wrap_oserror(space, e, exception_name='w_IOError',
                                      eintr_retry=True)
    -            rwbuffer.setslice(0, buf)
    +            self.output_slice(space, rwbuffer, 0, buf)
                 return space.newint(len(buf))
             else:
                 # optimized case: reading more than 64 bytes into a rwbuffer
    diff --git a/pypy/module/_io/interp_iobase.py b/pypy/module/_io/interp_iobase.py
    --- a/pypy/module/_io/interp_iobase.py
    +++ b/pypy/module/_io/interp_iobase.py
    @@ -308,6 +308,14 @@
                     else:
                         break
     
    +    @staticmethod
    +    def output_slice(space, rwbuffer, target_pos, data):
    +        if target_pos + len(data) > rwbuffer.getlength():
    +            raise oefmt(space.w_RuntimeError,
    +                        "target buffer has shrunk during operation")
    +        rwbuffer.setslice(target_pos, data)
    +
    +
     W_IOBase.typedef = TypeDef(
         '_io._IOBase',
         __new__ = generic_new_descr(W_IOBase),
    diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
    --- a/pypy/objspace/std/unicodeobject.py
    +++ b/pypy/objspace/std/unicodeobject.py
    @@ -1042,6 +1042,11 @@
             return rutf8.codepoint_position_at_index(
                 self._utf8, self._get_index_storage(), index)
     
    +    def _codepoints_in_utf8(self, start, end):
    +        if self.is_ascii():
    +            return end - start
    +        return rutf8.codepoints_in_utf8(self._utf8, start, end)
    +
         @always_inline
         def _unwrap_and_search(self, space, w_sub, w_start, w_end, forward=True):
             w_sub = self.convert_arg_to_w_unicode(space, w_sub)
    @@ -1063,7 +1068,7 @@
                 res_index = self._utf8.find(w_sub._utf8, start_index, end_index)
                 if res_index < 0:
                     return None
    -            skip = rutf8.codepoints_in_utf8(self._utf8, start_index, res_index)
    +            skip = self._codepoints_in_utf8(start_index, res_index)
                 res = start + skip
                 assert res >= 0
                 return space.newint(res)
    @@ -1071,7 +1076,7 @@
                 res_index = self._utf8.rfind(w_sub._utf8, start_index, end_index)
                 if res_index < 0:
                     return None
    -            skip = rutf8.codepoints_in_utf8(self._utf8, res_index, end_index)
    +            skip = self._codepoints_in_utf8(res_index, end_index)
                 res = end - skip
                 assert res >= 0
                 return space.newint(res)
    
    From pypy.commits at gmail.com  Mon Aug 26 11:11:59 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:11:59 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: merge heads
    Message-ID: <5d63f6bf.1c69fb81.5b395.2dfe@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97268:08f03166561f
    Date: 2019-08-26 17:06 +0200
    http://bitbucket.org/pypy/pypy/changeset/08f03166561f/
    
    Log:	merge heads
    
    diff --git a/lib-python/3/test/list_tests.py b/lib-python/3/test/list_tests.py
    --- a/lib-python/3/test/list_tests.py
    +++ b/lib-python/3/test/list_tests.py
    @@ -546,7 +546,7 @@
             u += "eggs"
             self.assertEqual(u, self.type2test("spameggs"))
     
    -        self.assertRaises(TypeError, u.__iadd__, None)
    +        self.assertRaises(TypeError, "u += None")  # PyPy change
     
         def test_imul(self):
             u = self.type2test([0, 1])
    diff --git a/lib-python/3/test/test_asyncio/test_futures.py b/lib-python/3/test/test_asyncio/test_futures.py
    --- a/lib-python/3/test/test_asyncio/test_futures.py
    +++ b/lib-python/3/test/test_asyncio/test_futures.py
    @@ -534,7 +534,7 @@
     @unittest.skipUnless(hasattr(futures, '_CFuture'),
                          'requires the C _asyncio module')
     class CFutureTests(BaseFutureTests, test_utils.TestCase):
    -    cls = getattr(futures, '_CFuture')
    +    cls = getattr(futures, '_CFuture', None)
     
     
     class PyFutureTests(BaseFutureTests, test_utils.TestCase):
    diff --git a/lib_pypy/_ctypes/array.py b/lib_pypy/_ctypes/array.py
    --- a/lib_pypy/_ctypes/array.py
    +++ b/lib_pypy/_ctypes/array.py
    @@ -108,27 +108,29 @@
             # array accepts very strange parameters as part of structure
             # or function argument...
             from ctypes import c_char, c_wchar
    -        if issubclass(self._type_, c_char):
    -            if isinstance(value, bytes):
    -                if len(value) > self._length_:
    -                    raise ValueError("Invalid length")
    -                value = self(*value)
    -            elif not isinstance(value, self):
    -                raise TypeError("expected bytes, %s found"
    -                                % (value.__class__.__name__,))
    -        elif issubclass(self._type_, c_wchar):
    -            if isinstance(value, str):
    -                if len(value) > self._length_:
    -                    raise ValueError("Invalid length")
    -                value = self(*value)
    -            elif not isinstance(value, self):
    -                raise TypeError("expected unicode string, %s found"
    -                                % (value.__class__.__name__,))
    -        else:
    -            if isinstance(value, tuple):
    -                if len(value) > self._length_:
    -                    raise RuntimeError("Invalid length")
    -                value = self(*value)
    +        if isinstance(value, self):
    +            return value
    +        if hasattr(self, '_type_'):
    +            if issubclass(self._type_, c_char):
    +                if isinstance(value, bytes):
    +                    if len(value) > self._length_:
    +                        raise ValueError("Invalid length")
    +                    value = self(*value)
    +                elif not isinstance(value, self):
    +                    raise TypeError("expected bytes, %s found"
    +                                    % (value.__class__.__name__,))
    +            elif issubclass(self._type_, c_wchar):
    +                if isinstance(value, str):
    +                    if len(value) > self._length_:
    +                        raise ValueError("Invalid length")
    +                    value = self(*value)
    +                elif not isinstance(value, self):
    +                    raise TypeError("expected unicode string, %s found"
    +                                    % (value.__class__.__name__,))
    +        if isinstance(value, tuple):
    +            if len(value) > self._length_:
    +                raise RuntimeError("Invalid length")
    +            value = self(*value)
             return _CDataMeta.from_param(self, value)
     
         def _build_ffiargtype(self):
    diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py
    --- a/lib_pypy/_ctypes/basics.py
    +++ b/lib_pypy/_ctypes/basics.py
    @@ -45,6 +45,9 @@
             self.details = details
     
     class _CDataMeta(type):
    +    def _is_abstract(self):
    +        return getattr(self, '_type_', 'abstract') == 'abstract'
    +
         def from_param(self, value):
             if isinstance(value, self):
                 return value
    @@ -95,6 +98,8 @@
             return self.from_address(dll.__pypy_dll__.getaddressindll(name))
     
         def from_buffer(self, obj, offset=0):
    +        if self._is_abstract():
    +            raise TypeError('abstract class')
             size = self._sizeofinstances()
             buf = memoryview(obj)
             if buf.nbytes < offset + size:
    @@ -111,6 +116,8 @@
             return result
     
         def from_buffer_copy(self, obj, offset=0):
    +        if self._is_abstract():
    +            raise TypeError('abstract class')
             size = self._sizeofinstances()
             buf = memoryview(obj)
             if buf.nbytes < offset + size:
    diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py
    --- a/lib_pypy/_ctypes/pointer.py
    +++ b/lib_pypy/_ctypes/pointer.py
    @@ -40,14 +40,17 @@
         def from_param(self, value):
             if value is None:
                 return self(None)
    -        # If we expect POINTER(), but receive a  instance, accept
    -        # it by calling byref().
    -        if isinstance(value, self._type_):
    -            return byref(value)
    -        # Array instances are also pointers when the item types are the same.
    -        if isinstance(value, (_Pointer, Array)):
    -            if issubclass(type(value)._type_, self._type_):
    -                return value
    +        if isinstance(value, self):
    +            return value
    +        if hasattr(self, '_type_'):
    +            # If we expect POINTER(), but receive a  instance, accept
    +            # it by calling byref().
    +            if isinstance(value, self._type_):
    +                return byref(value)
    +            # Array instances are also pointers when the item types are the same.
    +            if isinstance(value, (_Pointer, Array)):
    +                if issubclass(type(value)._type_, self._type_):
    +                    return value
             return _CDataMeta.from_param(self, value)
     
         def _sizeofinstances(self):
    @@ -60,6 +63,8 @@
             return True
     
         def set_type(self, TP):
    +        if self._is_abstract():
    +            raise TypeError('abstract class')
             ffiarray = _rawffi.Array('P')
             def __init__(self, value=None):
                 if not hasattr(self, '_buffer'):
    @@ -179,6 +184,7 @@
             klass = type(_Pointer)("LP_%s" % cls,
                                    (_Pointer,),
                                    {})
    +        klass._type_ = 'P'
             _pointer_type_cache[id(klass)] = klass
             return klass
         else:
    diff --git a/lib_pypy/_ctypes/primitive.py b/lib_pypy/_ctypes/primitive.py
    --- a/lib_pypy/_ctypes/primitive.py
    +++ b/lib_pypy/_ctypes/primitive.py
    @@ -158,6 +158,8 @@
                         break
                 else:
                     raise AttributeError("cannot find _type_ attribute")
    +        if tp == 'abstract':
    +            tp = 'i'
             if (not isinstance(tp, str) or
                 not len(tp) == 1 or
                 tp not in SIMPLE_TYPE_CHARS):
    @@ -341,7 +343,8 @@
         def from_param(self, value):
             if isinstance(value, self):
                 return value
    -
    +        if self._type_ == 'abstract':
    +            raise TypeError('abstract class')
             from_param_f = FROM_PARAM_BY_TYPE.get(self._type_)
             if from_param_f:
                 res = from_param_f(self, value)
    @@ -371,7 +374,7 @@
             return self._type_ in "sPzUZXO"
     
     class _SimpleCData(_CData, metaclass=SimpleType):
    -    _type_ = 'i'
    +    _type_ = 'abstract'
     
         def __init__(self, value=DEFAULT_VALUE):
             if not hasattr(self, '_buffer'):
    diff --git a/lib_pypy/_ctypes/structure.py b/lib_pypy/_ctypes/structure.py
    --- a/lib_pypy/_ctypes/structure.py
    +++ b/lib_pypy/_ctypes/structure.py
    @@ -119,6 +119,8 @@
             if self.is_bitfield:
                 # bitfield member, use direct access
                 return obj._buffer.__getattr__(self.name)
    +        elif not isinstance(obj, _CData):
    +            raise(TypeError, 'not a ctype instance') 
             else:
                 fieldtype = self.ctype
                 offset = self.num
    @@ -142,6 +144,8 @@
                 from ctypes import memmove
                 dest = obj._buffer.fieldaddress(self.name)
                 memmove(dest, arg, fieldtype._fficompositesize_)
    +        elif not isinstance(obj, _CData):
    +            raise(TypeError, 'not a ctype instance') 
             else:
                 obj._buffer.__setattr__(self.name, arg)
     
    @@ -209,6 +213,9 @@
     
         __setattr__ = struct_setattr
     
    +    def _is_abstract(self):
    +        return False
    +
         def from_address(self, address):
             instance = StructOrUnion.__new__(self)
             if isinstance(address, _rawffi.StructureInstance):
    diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
    --- a/pypy/interpreter/baseobjspace.py
    +++ b/pypy/interpreter/baseobjspace.py
    @@ -296,15 +296,17 @@
                         "expected %s, got %T object", expected, self)
     
         def int(self, space):
    -        from pypy.objspace.std.intobject import _new_int
    +        from pypy.objspace.std.intobject import W_AbstractIntObject
             w_impl = space.lookup(self, '__int__')
             if w_impl is None:
                 self._typed_unwrap_error(space, "integer")
             w_result = space.get_and_call_function(w_impl, self)
     
             if space.is_w(space.type(w_result), space.w_int):
    +            assert isinstance(w_result, W_AbstractIntObject)
                 return w_result
             if space.isinstance_w(w_result, space.w_int):
    +            assert isinstance(w_result, W_AbstractIntObject)
                 tp = space.type(w_result).name
                 space.warn(space.newtext(
                     "__int__ returned non-int (type %s).  "
    @@ -816,7 +818,7 @@
                 return self.w_None
             return w_obj
     
    -    @signature(types.any(), types.bool(), returns=types.instance(W_Root))
    +    @signature(types.any(), types.bool(), returns=types.any())
         def newbool(self, b):
             if b:
                 return self.w_True
    diff --git a/pypy/module/_codecs/interp_codecs.py b/pypy/module/_codecs/interp_codecs.py
    --- a/pypy/module/_codecs/interp_codecs.py
    +++ b/pypy/module/_codecs/interp_codecs.py
    @@ -618,10 +618,10 @@
     # ____________________________________________________________
     # Helpers for unicode.encode() and bytes.decode()
     def lookup_text_codec(space, action, encoding):
    -    codec_info = lookup_codec(space, encoding)
    +    w_codec_info = lookup_codec(space, encoding)
         try:
             is_text_encoding = space.is_true(
    -                space.getattr(codec_info, space.newtext('_is_text_encoding')))
    +                space.getattr(w_codec_info, space.newtext('_is_text_encoding')))
         except OperationError as e:
             if e.match(space, space.w_AttributeError):
                 is_text_encoding = True
    @@ -630,8 +630,8 @@
         if not is_text_encoding:
             raise oefmt(space.w_LookupError,
                         "'%s' is not a text encoding; "
    -                    "use %s to handle arbitrary codecs", encoding, action)
    -    return codec_info
    +                    "use codecs.%s() to handle arbitrary codecs", encoding, action)
    +    return w_codec_info
     
     # ____________________________________________________________
     
    diff --git a/pypy/module/_io/interp_bufferedio.py b/pypy/module/_io/interp_bufferedio.py
    --- a/pypy/module/_io/interp_bufferedio.py
    +++ b/pypy/module/_io/interp_bufferedio.py
    @@ -319,7 +319,6 @@
             with self.lock:
                 if self.writable:
                     self._writer_flush_unlocked(space)
    -                self._writer_reset_buf()
     
                 if whence == 1:
                     pos -= self._raw_offset()
    @@ -371,6 +370,7 @@
     
         def _writer_flush_unlocked(self, space):
             if self.write_end == -1 or self.write_pos == self.write_end:
    +            self._writer_reset_buf()
                 return
             # First, rewind
             rewind = self._raw_offset() + (self.pos - self.write_pos)
    diff --git a/pypy/module/_io/interp_iobase.py b/pypy/module/_io/interp_iobase.py
    --- a/pypy/module/_io/interp_iobase.py
    +++ b/pypy/module/_io/interp_iobase.py
    @@ -266,24 +266,16 @@
     
         def readlines_w(self, space, w_hint=None):
             hint = convert_size(space, w_hint)
    -
             if hint <= 0:
                 return space.newlist(space.unpackiterable(self))
     
    +        length = 0
             lines_w = []
    -        length = 0
    -        while True:
    -            w_line = space.call_method(self, "readline")
    -            line_length = space.len_w(w_line)
    -            if line_length == 0: # done
    -                break
    -
    +        for w_line in space.iteriterable(self):
                 lines_w.append(w_line)
    -
    -            length += line_length
    +            length += space.len_w(w_line)
                 if length > hint:
                     break
    -
             return space.newlist(lines_w)
     
         def writelines_w(self, space, w_lines):
    diff --git a/pypy/module/_io/interp_textio.py b/pypy/module/_io/interp_textio.py
    --- a/pypy/module/_io/interp_textio.py
    +++ b/pypy/module/_io/interp_textio.py
    @@ -663,12 +663,15 @@
                 # To prepare for tell(), we need to snapshot a point in the file
                 # where the decoder's input buffer is empty.
                 w_state = space.call_method(self.w_decoder, "getstate")
    +            if (not space.isinstance_w(w_state, space.w_tuple)
    +                    or space.len_w(w_state) != 2):
    +                raise oefmt(space.w_TypeError, "illegal decoder state")
                 # Given this, we know there was a valid snapshot point
                 # len(dec_buffer) bytes ago with decoder state (b'', dec_flags).
                 w_dec_buffer, w_dec_flags = space.unpackiterable(w_state, 2)
                 if not space.isinstance_w(w_dec_buffer, space.w_bytes):
    -                msg = "decoder getstate() should have returned a bytes " \
    -                      "object not '%T'"
    +                msg = ("illegal decoder state: the first value should be a "
    +                    "bytes object not '%T'")
                     raise oefmt(space.w_TypeError, msg, w_dec_buffer)
                 dec_buffer = space.bytes_w(w_dec_buffer)
                 dec_flags = space.int_w(w_dec_flags)
    diff --git a/pypy/module/_io/test/test_bufferedio.py b/pypy/module/_io/test/test_bufferedio.py
    --- a/pypy/module/_io/test/test_bufferedio.py
    +++ b/pypy/module/_io/test/test_bufferedio.py
    @@ -513,6 +513,17 @@
             assert b.truncate() == 8
             assert b.tell() == 8
     
    +    def test_truncate_after_write(self):
    +        import _io
    +        raw = _io.FileIO(self.tmpfile, 'rb+')
    +        raw.write(b'\x00' * 50)
    +        raw.seek(0)
    +        b = _io.BufferedRandom(raw, 10)
    +        b.write(b'\x00' * 11)
    +        b.read(1)
    +        b.truncate()
    +        assert b.tell() == 12
    +
         def test_write_non_blocking(self):
             import _io, io
             class MockNonBlockWriterIO(io.RawIOBase):
    diff --git a/pypy/module/select/interp_epoll.py b/pypy/module/select/interp_epoll.py
    --- a/pypy/module/select/interp_epoll.py
    +++ b/pypy/module/select/interp_epoll.py
    @@ -87,10 +87,12 @@
             self.register_finalizer(space)
     
         @unwrap_spec(sizehint=int, flags=int)
    -    def descr__new__(space, w_subtype, sizehint=0, flags=0):
    -        if sizehint < 0:     # 'sizehint' is otherwise ignored
    +    def descr__new__(space, w_subtype, sizehint=-1, flags=0):
    +        if sizehint == -1:
    +            sizehint = FD_SETSIZE - 1
    +        elif sizehint <= 0:     # 'sizehint' is otherwise ignored
                 raise oefmt(space.w_ValueError,
    -                        "sizehint must be greater than zero, got %d", sizehint)
    +                        "sizehint must be positive or -1")
             epfd = epoll_create1(flags | EPOLL_CLOEXEC)
             if epfd < 0:
                 raise exception_from_saved_errno(space, space.w_IOError)
    diff --git a/pypy/objspace/std/intobject.py b/pypy/objspace/std/intobject.py
    --- a/pypy/objspace/std/intobject.py
    +++ b/pypy/objspace/std/intobject.py
    @@ -9,7 +9,7 @@
     import sys
     
     from rpython.rlib import jit
    -from rpython.rlib.objectmodel import instantiate
    +from rpython.rlib.objectmodel import instantiate, enforceargs
     from rpython.rlib.rarithmetic import (
         LONG_BIT, intmask, is_valid_int, ovfcheck, r_longlong, r_uint,
         string_to_int)
    @@ -851,80 +851,49 @@
                 sys.maxint == 2147483647)
     
     
    -def _string_to_int_or_long(space, w_inttype, w_source, string, base=10):
    +def _string_to_int_or_long(space, w_source, string, base=10):
         try:
    -        value = string_to_int(string, base, allow_underscores=True, no_implicit_octal=True)
    +        value = string_to_int(
    +            string, base, allow_underscores=True, no_implicit_octal=True)
    +        return wrapint(space, value)
         except ParseStringError as e:
             raise wrap_parsestringerror(space, e, w_source)
         except ParseStringOverflowError as e:
    -        return _retry_to_w_long(space, e.parser, w_inttype, w_source)
    +        return _retry_to_w_long(space, e.parser, w_source)
     
    -    if space.is_w(w_inttype, space.w_int):
    -        w_result = wrapint(space, value)
    -    else:
    -        w_result = space.allocate_instance(W_IntObject, w_inttype)
    -        W_IntObject.__init__(w_result, value)
    -    return w_result
     
    -
    -def _retry_to_w_long(space, parser, w_inttype, w_source):
    +def _retry_to_w_long(space, parser, w_source):
         from pypy.objspace.std.longobject import newbigint
         parser.rewind()
         try:
             bigint = rbigint._from_numberstring_parser(parser)
         except ParseStringError as e:
             raise wrap_parsestringerror(space, e, w_source)
    -    return newbigint(space, w_inttype, bigint)
    +    return newbigint(space, space.w_int, bigint)
     
     
     def _new_int(space, w_inttype, w_x, w_base=None):
    -    from pypy.objspace.std.longobject import (
    -        W_AbstractLongObject, W_LongObject, newlong, newbigint)
    -    if space.config.objspace.std.withsmalllong:
    -        from pypy.objspace.std.smalllongobject import W_SmallLongObject
    +    w_value = w_x     # 'x' is the keyword argument name in CPython
    +    if w_inttype is space.w_int:
    +        return _new_baseint(space, w_x, w_base)
         else:
    -        W_SmallLongObject = None
    +        w_tmp = _new_baseint(space, w_x, w_base)
    +        return _as_subint(space, w_inttype, w_tmp)
     
    -    w_longval = None
    -    w_value = w_x     # 'x' is the keyword argument name in CPython
    -    value = 0
    +def _new_baseint(space, w_value, w_base=None):
         if w_base is None:
    -        #import pdb; pdb.set_trace()
    -        # check for easy cases
    -        if type(w_value) is W_IntObject:
    -            if space.is_w(w_inttype, space.w_int):
    -                return w_value
    -            value = w_value.intval
    -            w_obj = space.allocate_instance(W_IntObject, w_inttype)
    -            W_IntObject.__init__(w_obj, value)
    -            return w_obj
    -        elif type(w_value) is W_LongObject:
    -            if space.is_w(w_inttype, space.w_int):
    -                return w_value
    -            return newbigint(space, w_inttype, w_value.num)
    -        elif W_SmallLongObject and type(w_value) is W_SmallLongObject:
    -            if space.is_w(w_inttype, space.w_int):
    -                return w_value
    -            return newbigint(space, w_inttype, space.bigint_w(w_value))
    +        if space.is_w(space.type(w_value), space.w_int):
    +            assert isinstance(w_value, W_AbstractIntObject)
    +            return w_value
             elif space.lookup(w_value, '__int__') is not None:
                 w_intvalue = space.int(w_value)
    -            if isinstance(w_intvalue, W_IntObject):
    -                if type(w_intvalue) is not W_IntObject:
    -                    w_intvalue = wrapint(space, w_intvalue.intval)
    -                return _new_int(space, w_inttype, w_intvalue)
    -            elif isinstance(w_intvalue, W_AbstractLongObject):
    -                if type(w_intvalue) is not W_LongObject:
    -                    w_intvalue = newlong(space, w_intvalue.asbigint())
    -                return _new_int(space, w_inttype, w_intvalue)
    -            else:
    -                # shouldn't happen
    -                raise oefmt(space.w_RuntimeError,
    -                    "internal error in int.__new__()")
    +            return _ensure_baseint(space, w_intvalue)
             elif space.lookup(w_value, '__trunc__') is not None:
                 w_obj = space.trunc(w_value)
    -            if not space.is_w(space.type(w_obj), space.w_int):
    +            if not space.isinstance_w(w_obj, space.w_int):
                     w_obj = space.int(w_obj)
    -            return _from_intlike(space, w_inttype, w_obj)
    +            assert isinstance(w_obj, W_AbstractIntObject)
    +            return _ensure_baseint(space, w_obj)
             elif space.isinstance_w(w_value, space.w_unicode):
                 from pypy.objspace.std.unicodeobject import unicode_to_decimal_w
                 try:
    @@ -933,10 +902,10 @@
                     raise oefmt(space.w_ValueError,
                                 'invalid literal for int() with base 10: %R',
                                 w_value)
    -            return _string_to_int_or_long(space, w_inttype, w_value, b)
    +            return _string_to_int_or_long(space, w_value, b)
             elif (space.isinstance_w(w_value, space.w_bytearray) or
                   space.isinstance_w(w_value, space.w_bytes)):
    -            return _string_to_int_or_long(space, w_inttype, w_value,
    +            return _string_to_int_or_long(space, w_value,
                                               space.charbuf_w(w_value))
             else:
                 # If object supports the buffer interface
    @@ -949,7 +918,7 @@
                                 "int() argument must be a string, a bytes-like "
                                 "object or a number, not '%T'", w_value)
                 else:
    -                return _string_to_int_or_long(space, w_inttype, w_value, buf)
    +                return _string_to_int_or_long(space, w_value, buf)
         else:
             try:
                 base = space.getindex_w(w_base, None)
    @@ -973,14 +942,40 @@
                 raise oefmt(space.w_TypeError,
                             "int() can't convert non-string with explicit base")
     
    -        return _string_to_int_or_long(space, w_inttype, w_value, s, base)
    +        return _string_to_int_or_long(space, w_value, s, base)
     
    + at enforceargs(None, None, W_AbstractIntObject, typecheck=False)
    +def _as_subint(space, w_inttype, w_value):
    +    from pypy.objspace.std.longobject import W_LongObject, newbigint
    +    if space.config.objspace.std.withsmalllong:
    +        from pypy.objspace.std.smalllongobject import W_SmallLongObject
    +    else:
    +        W_SmallLongObject = None
    +    if type(w_value) is W_IntObject:
    +        w_obj = space.allocate_instance(W_IntObject, w_inttype)
    +        W_IntObject.__init__(w_obj, w_value.intval)
    +        return w_obj
    +    elif type(w_value) is W_LongObject:
    +        return newbigint(space, w_inttype, w_value.num)
    +    elif W_SmallLongObject and type(w_value) is W_SmallLongObject:
    +        return newbigint(space, w_inttype, space.bigint_w(w_value))
     
    -def _from_intlike(space, w_inttype, w_intlike):
    -    if space.is_w(w_inttype, space.w_int):
    -        return w_intlike
    -    from pypy.objspace.std.longobject import newbigint
    -    return newbigint(space, w_inttype, space.bigint_w(w_intlike))
    + at enforceargs(None, W_AbstractIntObject, typecheck=False)
    +def _ensure_baseint(space, w_intvalue):
    +    from pypy.objspace.std.longobject import (
    +        W_LongObject, W_AbstractLongObject, newlong)
    +    if isinstance(w_intvalue, W_IntObject):
    +        if type(w_intvalue) is not W_IntObject:
    +            w_intvalue = wrapint(space, w_intvalue.intval)
    +        return w_intvalue
    +    elif isinstance(w_intvalue, W_AbstractLongObject):
    +        if type(w_intvalue) is not W_LongObject:
    +            w_intvalue = newlong(space, w_intvalue.asbigint())
    +        return w_intvalue
    +    else:
    +        # shouldn't happen
    +        raise oefmt(space.w_RuntimeError,
    +            "internal error in int.__new__()")
     
     
     W_AbstractIntObject.typedef = TypeDef("int",
    diff --git a/pypy/objspace/std/test/test_intobject.py b/pypy/objspace/std/test/test_intobject.py
    --- a/pypy/objspace/std/test/test_intobject.py
    +++ b/pypy/objspace/std/test/test_intobject.py
    @@ -533,6 +533,19 @@
                 assert n == 1
                 assert type(n) is int
     
    +    def test_trunc_returns_int_subclass_2(self):
    +        class BadInt:
    +            def __int__(self):
    +                return True
    +
    +        class TruncReturnsBadInt:
    +            def __trunc__(self):
    +                return BadInt()
    +        bad_int = TruncReturnsBadInt()
    +        n = int(bad_int)
    +        assert n == 1
    +        assert type(n) is int
    +
         def test_int_before_string(self):
             class Integral(str):
                 def __int__(self):
    diff --git a/pypy/objspace/std/test/test_typeobject.py b/pypy/objspace/std/test/test_typeobject.py
    --- a/pypy/objspace/std/test/test_typeobject.py
    +++ b/pypy/objspace/std/test/test_typeobject.py
    @@ -71,6 +71,22 @@
             raises(AttributeError, getattr, type, "__abstractmethods__")
             raises(TypeError, "int.__abstractmethods__ = ('abc', )")
     
    +    def test_is_abstract_flag(self):
    +        # IS_ABSTRACT flag should always be in sync with
    +        # cls.__dict__['__abstractmethods__']
    +        FLAG_IS_ABSTRACT = 1 << 20
    +
    +        class Base:
    +            pass
    +        Base.__abstractmethods__ = {'x'}
    +        assert Base.__flags__ & FLAG_IS_ABSTRACT
    +
    +        class Derived(Base):
    +            pass
    +        assert not (Derived.__flags__ & FLAG_IS_ABSTRACT)
    +        Derived.__abstractmethods__ = {'x'}
    +        assert Derived.__flags__ & FLAG_IS_ABSTRACT
    +
         def test_attribute_error(self):
             class X(object):
                 __module__ = 'test'
    diff --git a/pypy/objspace/std/test/test_unicodeobject.py b/pypy/objspace/std/test/test_unicodeobject.py
    --- a/pypy/objspace/std/test/test_unicodeobject.py
    +++ b/pypy/objspace/std/test/test_unicodeobject.py
    @@ -780,6 +780,11 @@
             raises(UnicodeError, b"\xc2".decode, "utf-8")
             assert b'\xe1\x80'.decode('utf-8', 'replace') == "\ufffd"
     
    +    def test_invalid_lookup(self):
    +
    +        raises(LookupError, u"abcd".encode, "hex")
    +        raises(LookupError, b"abcd".decode, "hex")
    +
         def test_repr_printable(self):
             # PEP 3138: __repr__ respects printable characters.
             x = '\u027d'
    diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py
    --- a/pypy/objspace/std/typeobject.py
    +++ b/pypy/objspace/std/typeobject.py
    @@ -816,7 +816,7 @@
                 return space.call_function(newfunc, w_winner, w_name, w_bases, w_dict)
             w_typetype = w_winner
     
    -    name = space.text_w(w_name) 
    +    name = space.text_w(w_name)
         if '\x00' in name:
             raise oefmt(space.w_ValueError, "type name must not contain null characters")
         pos = surrogate_in_utf8(name)
    @@ -1339,7 +1339,6 @@
             if not isinstance(w_base, W_TypeObject):
                 continue
             w_self.flag_cpytype |= w_base.flag_cpytype
    -        w_self.flag_abstract |= w_base.flag_abstract
             if w_self.flag_map_or_seq == '?':
                 w_self.flag_map_or_seq = w_base.flag_map_or_seq
     
    diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
    --- a/pypy/objspace/std/unicodeobject.py
    +++ b/pypy/objspace/std/unicodeobject.py
    @@ -1241,7 +1241,7 @@
         return encoding, errors
     
     def encode_object(space, w_obj, encoding, errors):
    -    from pypy.module._codecs.interp_codecs import encode
    +    from pypy.module._codecs.interp_codecs import _call_codec, lookup_text_codec
         if errors is None or errors == 'strict':
             # fast paths
             utf8 = space.utf8_w(w_obj)
    @@ -1263,7 +1263,11 @@
                         a.pos, a.pos + 1)
                     assert False, "always raises"
                 return space.newbytes(utf8)
    -    w_retval = encode(space, w_obj, encoding, errors)
    +    if encoding is None:
    +        encoding = space.sys.defaultencoding
    +    w_codec_info = lookup_text_codec(space, 'encode', encoding)
    +    w_encfunc = space.getitem(w_codec_info, space.newint(0))
    +    w_retval = _call_codec(space, w_encfunc, w_obj, "encoding", encoding, errors)
         if not space.isinstance_w(w_retval, space.w_bytes):
             raise oefmt(space.w_TypeError,
                         "'%s' encoder returned '%T' instead of 'bytes'; "
    @@ -1274,6 +1278,7 @@
     
     
     def decode_object(space, w_obj, encoding, errors=None):
    +    from pypy.module._codecs.interp_codecs import _call_codec, lookup_text_codec
         if errors == 'strict' or errors is None:
             # fast paths
             if encoding == 'ascii':
    @@ -1284,8 +1289,11 @@
                 s = space.charbuf_w(w_obj)
                 lgt = unicodehelper.check_utf8_or_raise(space, s)
                 return space.newutf8(s, lgt)
    -    from pypy.module._codecs.interp_codecs import decode
    -    w_retval = decode(space, w_obj, encoding, errors)
    +    if encoding is None:
    +        encoding = space.sys.defaultencoding
    +    w_codec_info = lookup_text_codec(space, 'decode', encoding)
    +    w_encfunc = space.getitem(w_codec_info, space.newint(1))
    +    w_retval = _call_codec(space, w_encfunc, w_obj, "decoding", encoding, errors)
         if not isinstance(w_retval, W_UnicodeObject):
             raise oefmt(space.w_TypeError,
                         "'%s' decoder returned '%T' instead of 'str'; "
    
    From pypy.commits at gmail.com  Mon Aug 26 11:12:01 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:12:01 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Copy here the logic to check for shrunk
     buffers
    Message-ID: <5d63f6c1.1c69fb81.fb185.fa7e@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97269:bfbf7d164327
    Date: 2019-08-26 17:11 +0200
    http://bitbucket.org/pypy/pypy/changeset/bfbf7d164327/
    
    Log:	Copy here the logic to check for shrunk buffers
    
    diff --git a/pypy/module/_io/interp_bufferedio.py b/pypy/module/_io/interp_bufferedio.py
    --- a/pypy/module/_io/interp_bufferedio.py
    +++ b/pypy/module/_io/interp_bufferedio.py
    @@ -887,12 +887,14 @@
             with self.lock:
                 have = self._readahead()
                 if have >= length:
    -                rwbuffer.setslice(0, self.buffer[self.pos:self.pos + length])
    +                self.output_slice(space, rwbuffer,
    +                    0, self.buffer[self.pos:self.pos + length])
                     self.pos += length
                     return space.newint(length)
                 written = 0
                 if have > 0:
    -                rwbuffer.setslice(0, self.buffer[self.pos:self.read_end])
    +                self.output_slice(space, rwbuffer,
    +                    0, self.buffer[self.pos:self.read_end])
                     written = have
     
                 while written < length:
    @@ -920,7 +922,8 @@
                             break
                         endpos = min(have, length - written)
                         assert endpos >= 0
    -                    rwbuffer.setslice(written, self.buffer[0:endpos])
    +                    self.output_slice(space, rwbuffer,
    +                        written, self.buffer[0:endpos])
                         written += endpos
                         self.pos = endpos
                     if read_once:
    
    From pypy.commits at gmail.com  Mon Aug 26 11:27:47 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:27:47 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: merge 3f34199b0d1a
    Message-ID: <5d63fa73.1c69fb81.fc37.3ee1@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97271:b577a806f857
    Date: 2019-08-26 17:18 +0200
    http://bitbucket.org/pypy/pypy/changeset/b577a806f857/
    
    Log:	merge 3f34199b0d1a
    
    
    From pypy.commits at gmail.com  Mon Aug 26 11:27:49 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:27:49 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: merge 8bd962a47352
    Message-ID: <5d63fa75.1c69fb81.a8e73.c339@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97272:3d2a0a3328e7
    Date: 2019-08-26 17:20 +0200
    http://bitbucket.org/pypy/pypy/changeset/3d2a0a3328e7/
    
    Log:	merge 8bd962a47352
    
    diff --git a/pypy/module/__pypy__/interp_debug.py b/pypy/module/__pypy__/interp_debug.py
    --- a/pypy/module/__pypy__/interp_debug.py
    +++ b/pypy/module/__pypy__/interp_debug.py
    @@ -1,7 +1,13 @@
     from pypy.interpreter.gateway import unwrap_spec
     from rpython.rlib import debug, jit
     from rpython.rlib import rtimer
    +from rpython.rlib.objectmodel import sandbox_review
     
    +# In sandbox mode, the debug_start/debug_print functions are disabled,
    +# because they could allow the attacker to write arbitrary bytes to stderr
    +
    +
    + at sandbox_review(abort=True)
     @jit.dont_look_inside
     @unwrap_spec(category='text', timestamp=bool)
     def debug_start(space, category, timestamp=False):
    @@ -10,11 +16,13 @@
             return space.newint(res)
         return space.w_None
     
    + at sandbox_review(abort=True)
     @jit.dont_look_inside
     def debug_print(space, args_w):
         parts = [space.text_w(space.str(w_item)) for w_item in args_w]
         debug.debug_print(' '.join(parts))
     
    + at sandbox_review(abort=True)
     @jit.dont_look_inside
     @unwrap_spec(category='text', timestamp=bool)
     def debug_stop(space, category, timestamp=False):
    @@ -23,6 +31,7 @@
             return space.newint(res)
         return space.w_None
     
    + at sandbox_review(abort=True)
     @unwrap_spec(category='text')
     def debug_print_once(space, category, args_w):
         debug_start(space, category)
    @@ -34,9 +43,16 @@
     def debug_flush(space):
         debug.debug_flush()
     
    +
    +# In sandbox mode, these two helpers are disabled because they give unlimited
    +# access to the real time (if you enable them, note that they use lloperations
    +# that must also be white-listed in graphchecker.py)
    +
    + at sandbox_review(abort=True)
     def debug_read_timestamp(space):
         return space.newint(rtimer.read_timestamp())
     
    + at sandbox_review(abort=True)
     def debug_get_timestamp_unit(space):
         unit = rtimer.get_timestamp_unit()
         if unit == rtimer.UNIT_TSC:
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -1,6 +1,6 @@
     from rpython.rlib import jit, rgc, rutf8
     from rpython.rlib.buffer import RawBuffer, SubBuffer
    -from rpython.rlib.objectmodel import keepalive_until_here
    +from rpython.rlib.objectmodel import keepalive_until_here, sandbox_review
     from rpython.rlib.rarithmetic import ovfcheck, widen, r_uint
     from rpython.rlib.unroll import unrolling_iterable
     from rpython.rtyper.annlowlevel import llstr
    @@ -179,6 +179,7 @@
             if self._buffer:
                 lltype.free(self._buffer, flavor='raw')
     
    +    @sandbox_review(reviewed=True)
         def setlen(self, size, zero=False, overallocate=True):
             if self._buffer:
                 delta_memory_pressure = -self.allocated * self.itemsize
    @@ -253,6 +254,7 @@
         def _charbuf_stop(self):
             keepalive_until_here(self)
     
    +    @sandbox_review(reviewed=True)
         def delitem(self, space, i, j):
             if i < 0:
                 i += self.len
    @@ -407,6 +409,7 @@
             self._charbuf_stop()
             return self.space.newbytes(s)
     
    +    @sandbox_review(reviewed=True)
         def descr_fromstring(self, space, w_s):
             """ fromstring(string)
     
    @@ -565,6 +568,7 @@
                                     w_bytes]),
                     w_dict])
     
    +    @sandbox_review(reviewed=True)
         def descr_copy(self, space):
             """ copy(array)
     
    @@ -579,6 +583,7 @@
             )
             return w_a
     
    +    @sandbox_review(reviewed=True)
         def descr_byteswap(self, space):
             """ byteswap()
     
    @@ -659,6 +664,7 @@
         def descr_iter(self, space):
             return space.newseqiter(self)
     
    +    @sandbox_review(reviewed=True)
         def descr_add(self, space, w_other):
             if (not isinstance(w_other, W_ArrayBase)
                     or w_other.typecode != self.typecode):
    @@ -682,6 +688,7 @@
             keepalive_until_here(a)
             return a
     
    +    @sandbox_review(reviewed=True)
         def descr_inplace_add(self, space, w_other):
             if (not isinstance(w_other, W_ArrayBase)
                     or w_other.typecode != self.typecode):
    @@ -700,6 +707,7 @@
             keepalive_until_here(w_other)
             return self
     
    +    @sandbox_review(reviewed=True)
         def _mul_helper(self, space, w_repeat, is_inplace):
             try:
                 repeat = space.getindex_w(w_repeat, space.w_OverflowError)
    @@ -1071,6 +1079,7 @@
                                              self.space.newtext(msg))
                 return result
     
    +        @sandbox_review(reviewed=True)
             def fromsequence(self, w_seq):
                 space = self.space
                 oldlen = self.len
    @@ -1119,6 +1128,7 @@
     
                 self._fromiterable(w_seq)
     
    +        @sandbox_review(reviewed=True)
             def extend(self, w_iterable, accept_different_array=False):
                 space = self.space
                 if isinstance(w_iterable, W_Array):
    @@ -1170,6 +1180,7 @@
     
             # interface
     
    +        @sandbox_review(reviewed=True)
             def descr_append(self, space, w_x):
                 x = self.item_w(w_x)
                 index = self.len
    @@ -1179,12 +1190,14 @@
     
             # List interface
     
    +        @sandbox_review(reviewed=True)
             def descr_reverse(self, space):
                 b = self.get_buffer()
                 for i in range(self.len / 2):
                     b[i], b[self.len - i - 1] = b[self.len - i - 1], b[i]
                 keepalive_until_here(self)
     
    +        @sandbox_review(reviewed=True)
             def descr_pop(self, space, i):
                 if i < 0:
                     i += self.len
    @@ -1199,6 +1212,7 @@
                 self.setlen(self.len - 1)
                 return w_val
     
    +        @sandbox_review(reviewed=True)
             def descr_insert(self, space, idx, w_val):
                 if idx < 0:
                     idx += self.len
    @@ -1217,6 +1231,7 @@
                 b[i] = val
                 keepalive_until_here(self)
     
    +        @sandbox_review(reviewed=True)
             def getitem_slice(self, space, w_idx):
                 start, stop, step, size = space.decode_index4(w_idx, self.len)
                 w_a = mytype.w_class(self.space)
    @@ -1232,6 +1247,7 @@
                 keepalive_until_here(w_a)
                 return w_a
     
    +        @sandbox_review(reviewed=True)
             def setitem(self, space, w_idx, w_item):
                 idx, stop, step = space.decode_index(w_idx, self.len)
                 if step != 0:
    @@ -1241,6 +1257,7 @@
                 self.get_buffer()[idx] = item
                 keepalive_until_here(self)
     
    +        @sandbox_review(reviewed=True)
             def setitem_slice(self, space, w_idx, w_item):
                 if not isinstance(w_item, W_Array):
                     raise oefmt(space.w_TypeError,
    @@ -1268,6 +1285,7 @@
                     keepalive_until_here(w_item)
                     keepalive_until_here(self)
     
    +        @sandbox_review(check_caller=True)
             def _repeat_single_item(self, a, start, repeat):
                 # 
                 assert isinstance(a, W_Array)
    diff --git a/pypy/module/gc/interp_gc.py b/pypy/module/gc/interp_gc.py
    --- a/pypy/module/gc/interp_gc.py
    +++ b/pypy/module/gc/interp_gc.py
    @@ -46,7 +46,8 @@
         If they were already enabled, no-op.
         If they were disabled even several times, enable them anyway.
         """
    -    rgc.enable()
    +    if not space.config.translation.sandbox:    # not available in sandbox
    +        rgc.enable()
         if not space.user_del_action.enabled_at_app_level:
             space.user_del_action.enabled_at_app_level = True
             enable_finalizers(space)
    @@ -55,7 +56,8 @@
         """Non-recursive version.  Disable major collections and finalizers.
         Multiple calls to this function are ignored.
         """
    -    rgc.disable()
    +    if not space.config.translation.sandbox:    # not available in sandbox
    +        rgc.disable()
         if space.user_del_action.enabled_at_app_level:
             space.user_del_action.enabled_at_app_level = False
             disable_finalizers(space)
    diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py
    --- a/pypy/module/time/interp_time.py
    +++ b/pypy/module/time/interp_time.py
    @@ -12,6 +12,7 @@
     from rpython.rlib.rtime import (GETTIMEOFDAY_NO_TZ, TIMEVAL,
                                     HAVE_GETTIMEOFDAY, HAVE_FTIME)
     from rpython.rlib import rposix, rtime
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.translator.tool.cbuild import ExternalCompilationInfo
     import math
     import os
    @@ -365,6 +366,7 @@
     c_strftime = external('strftime', [rffi.CCHARP, rffi.SIZE_T, rffi.CCHARP, TM_P],
                           rffi.SIZE_T, sandboxsafe=True)
     
    + at sandbox_review(reviewed=True)
     def _init_timezone(space):
         timezone = daylight = altzone = 0
         tzname = ["", ""]
    @@ -564,6 +566,7 @@
         w_obj = space.call_function(w_struct_time, w_time_tuple)
         return w_obj
     
    + at sandbox_review(reviewed=True)
     def _gettmarg(space, w_tup, allowNone=True):
         if space.is_none(w_tup):
             if not allowNone:
    @@ -679,6 +682,7 @@
                     return space.newfloat(_timespec_to_seconds(timespec))
         return gettimeofday(space, w_info)
     
    + at sandbox_review(reviewed=True)
     def ctime(space, w_seconds=None):
         """ctime([seconds]) -> string
     
    @@ -725,6 +729,7 @@
         return space.mod(space.newtext("%.3s %.3s%3d %.2d:%.2d:%.2d %d"),
                          space.newtuple(args))
     
    + at sandbox_review(reviewed=True)
     def gmtime(space, w_seconds=None):
         """gmtime([seconds]) -> (tm_year, tm_mon, tm_day, tm_hour, tm_min,
                               tm_sec, tm_wday, tm_yday, tm_isdst)
    @@ -746,6 +751,7 @@
                                  space.newtext(*_get_error_msg()))
         return _tm_to_tuple(space, p)
     
    + at sandbox_review(reviewed=True)
     def localtime(space, w_seconds=None):
         """localtime([seconds]) -> (tm_year, tm_mon, tm_day, tm_hour, tm_min,
                                  tm_sec, tm_wday, tm_yday, tm_isdst)
    @@ -764,6 +770,7 @@
                                  space.newtext(*_get_error_msg()))
         return _tm_to_tuple(space, p)
     
    + at sandbox_review(reviewed=True)
     def mktime(space, w_tup):
         """mktime(tuple) -> floating point number
     
    diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
    --- a/rpython/memory/gc/incminimark.py
    +++ b/rpython/memory/gc/incminimark.py
    @@ -1176,6 +1176,8 @@
     
     
         def unpin(self, obj):
    +        if self.safer_variant():
    +            out_of_memory("unpin() unexpected")
             ll_assert(self._is_pinned(obj),
                 "unpin: object is already not pinned")
             #
    @@ -1186,6 +1188,8 @@
             return (self.header(obj).tid & GCFLAG_PINNED) != 0
     
         def shrink_array(self, obj, smallerlength):
    +        if self.safer_variant():    # no shrinking in the safer variant
    +            return False       # (because the original 'obj' is kind of broken)
             #
             # Only objects in the nursery can be "resized".  Resizing them
             # means recording that they have a smaller size, so that when
    diff --git a/rpython/memory/gc/inspector.py b/rpython/memory/gc/inspector.py
    --- a/rpython/memory/gc/inspector.py
    +++ b/rpython/memory/gc/inspector.py
    @@ -89,7 +89,7 @@
     raw_os_write = rffi.llexternal(rposix.UNDERSCORE_ON_WIN32 + 'write',
                                    [rffi.INT, llmemory.Address, rffi.SIZE_T],
                                    rffi.SIZE_T,
    -                               sandboxsafe=True, _nowrapper=True)
    +                               _nowrapper=True)
     
     AddressStack = get_address_stack()
     
    diff --git a/rpython/rlib/buffer.py b/rpython/rlib/buffer.py
    --- a/rpython/rlib/buffer.py
    +++ b/rpython/rlib/buffer.py
    @@ -7,6 +7,7 @@
     from rpython.rtyper.lltypesystem.rlist import LIST_OF
     from rpython.rtyper.annlowlevel import llstr
     from rpython.rlib.objectmodel import specialize, we_are_translated
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib import jit
     from rpython.rlib.rgc import (resizable_list_supporting_raw_ptr,
                                   nonmoving_raw_ptr_for_resizable_list,
    @@ -143,6 +144,7 @@
             ptr = self.get_raw_address()
             return llop.raw_load(TP, ptr, byte_offset)
     
    +    @sandbox_review(check_caller=True)
         @specialize.ll_and_arg(1)
         def typed_write(self, TP, byte_offset, value):
             """
    @@ -179,6 +181,7 @@
             base_ofs = targetcls._get_gc_data_offset()
             scale_factor = llmemory.sizeof(lltype.Char)
     
    +        @sandbox_review(check_caller=True)
             @specialize.ll_and_arg(1)
             def typed_read(self, TP, byte_offset):
                 if not is_alignment_correct(TP, byte_offset):
    @@ -188,6 +191,7 @@
                 return llop.gc_load_indexed(TP, lldata, byte_offset,
                                             scale_factor, base_ofs)
     
    +        @sandbox_review(check_caller=True)
             @specialize.ll_and_arg(1)
             def typed_write(self, TP, byte_offset, value):
                 if self.readonly or not is_alignment_correct(TP, byte_offset):
    @@ -362,10 +366,12 @@
             ptr = self.buffer.get_raw_address()
             return rffi.ptradd(ptr, self.offset)
     
    +    @sandbox_review(check_caller=True)
         @specialize.ll_and_arg(1)
         def typed_read(self, TP, byte_offset):
             return self.buffer.typed_read(TP, byte_offset + self.offset)
     
    +    @sandbox_review(check_caller=True)
         @specialize.ll_and_arg(1)
         def typed_write(self, TP, byte_offset, value):
             return self.buffer.typed_write(TP, byte_offset + self.offset, value)
    diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py
    --- a/rpython/rlib/objectmodel.py
    +++ b/rpython/rlib/objectmodel.py
    @@ -226,6 +226,38 @@
         func._not_rpython_ = True
         return func
     
    +def sandbox_review(reviewed=False, check_caller=False, abort=False):
    +    """Mark a function as reviewed for sandboxing purposes.
    +    This should not be necessary on any function written in "normal" RPython
    +    code, but only on functions using some lloperation that is not
    +    whitelisted in rpython.translator.sandbox.graphchecker.
    +
    +    Call this with one of the three flags set to True:
    +
    +      *reviewed*: This function is fine and any other code can call it.
    +      If the function contains external calls, they will still be replaced with
    +      stubs using I/O to communicate with the parent process (as long as they
    +      are not marked sandboxsafe themselves).
    +
    +      *check_caller*: This function is fine, but you should still check the
    +      callers; they must all have a sandbox_review() as well.
    +
    +      *abort*: An abort is prepended to the function's code, making the
    +      whole process abort if it is called at runtime.
    +
    +    """
    +    assert reviewed + check_caller + abort == 1
    +    def wrap(func):
    +        assert not hasattr(func, '_sandbox_review_') or abort
    +        if reviewed:
    +            func._sandbox_review_ = 'reviewed'
    +        if check_caller:
    +            func._sandbox_review_ = 'check_caller'
    +        if abort:
    +            func._sandbox_review_ = 'abort'
    +        return func
    +    return wrap
    +
     
     # ____________________________________________________________
     
    @@ -347,6 +379,10 @@
         # XXX this can be made more efficient in the future
         return bytearray(str(i))
     
    +def sandboxed_translation():
    +    config = fetch_translated_config()
    +    return config is not None and config.translation.sandbox
    +
     def fetch_translated_config():
         """Returns the config that is current when translating.
         Returns None if not translated.
    diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py
    --- a/rpython/rlib/rgc.py
    +++ b/rpython/rlib/rgc.py
    @@ -6,6 +6,7 @@
     from rpython.rlib import jit
     from rpython.rlib.objectmodel import we_are_translated, enforceargs, specialize
     from rpython.rlib.objectmodel import CDefinedIntSymbolic, not_rpython
    +from rpython.rlib.objectmodel import sandbox_review, sandboxed_translation
     from rpython.rtyper.extregistry import ExtRegistryEntry
     from rpython.rtyper.lltypesystem import lltype, llmemory
     
    @@ -358,14 +359,23 @@
                 return True
         return False
     
    + at not_rpython
    +def _ll_arraycopy_of_nongc_not_for_sandboxed():
    +    pass
     
     @jit.oopspec('list.ll_arraycopy(source, dest, source_start, dest_start, length)')
     @enforceargs(None, None, int, int, int)
    + at sandbox_review(reviewed=True)
     @specialize.ll()
     def ll_arraycopy(source, dest, source_start, dest_start, length):
         from rpython.rtyper.lltypesystem.lloperation import llop
         from rpython.rlib.objectmodel import keepalive_until_here
     
    +    TP = lltype.typeOf(source).TO
    +    assert TP == lltype.typeOf(dest).TO
    +    if TP._gckind != 'gc' and sandboxed_translation():
    +        _ll_arraycopy_of_nongc_not_for_sandboxed()
    +
         # XXX: Hack to ensure that we get a proper effectinfo.write_descrs_arrays
         # and also, maybe, speed up very small cases
         if length <= 1:
    @@ -379,9 +389,6 @@
                 assert (source_start + length <= dest_start or
                         dest_start + length <= source_start)
     
    -    TP = lltype.typeOf(source).TO
    -    assert TP == lltype.typeOf(dest).TO
    -
         slowpath = False
         if must_split_gc_address_space():
             slowpath = True
    @@ -415,6 +422,7 @@
     
     @jit.oopspec('rgc.ll_shrink_array(p, smallerlength)')
     @enforceargs(None, int)
    + at sandbox_review(reviewed=True)
     @specialize.ll()
     def ll_shrink_array(p, smallerlength):
         from rpython.rtyper.lltypesystem.lloperation import llop
    @@ -454,6 +462,7 @@
         return newp
     
     @jit.dont_look_inside
    + at sandbox_review(reviewed=True)
     @specialize.ll()
     def ll_arrayclear(p):
         # Equivalent to memset(array, 0).  Only for GcArray(primitive-type) for now.
    @@ -1096,6 +1105,7 @@
             hop.exception_cannot_occur()
             return hop.genop('gc_gcflag_extra', vlist, resulttype = hop.r_result)
     
    + at specialize.memo()
     def lltype_is_gc(TP):
         return getattr(getattr(TP, "TO", None), "_gckind", "?") == 'gc'
     
    @@ -1419,7 +1429,7 @@
         return _ResizableListSupportingRawPtr(lst)
     
     def nonmoving_raw_ptr_for_resizable_list(lst):
    -    if must_split_gc_address_space():
    +    if must_split_gc_address_space() or sandboxed_translation():
             raise ValueError
         return _nonmoving_raw_ptr_for_resizable_list(lst)
     
    @@ -1501,6 +1511,7 @@
     
     
     @jit.dont_look_inside
    + at sandbox_review(check_caller=True)
     def ll_nonmovable_raw_ptr_for_resizable_list(ll_list):
         """
         WARNING: dragons ahead.
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -10,7 +10,8 @@
         _CYGWIN, _MACRO_ON_POSIX, UNDERSCORE_ON_WIN32, _WIN32,
         _prefer_unicode, _preferred_traits, _preferred_traits2)
     from rpython.rlib.objectmodel import (
    -    specialize, enforceargs, register_replacement_for, NOT_CONSTANT)
    +    specialize, enforceargs, register_replacement_for, NOT_CONSTANT,
    +    sandbox_review)
     from rpython.rlib.rarithmetic import intmask, widen
     from rpython.rlib.signature import signature
     from rpython.tool.sourcetools import func_renamer
    @@ -988,6 +989,7 @@
                          [rffi.INTP, rffi.VOIDP, rffi.VOIDP, rffi.VOIDP],
                          rffi.PID_T, _nowrapper = True)
     
    + at sandbox_review(abort=True)
     @replace_os_function('fork')
     @jit.dont_look_inside
     def fork():
    @@ -1017,6 +1019,7 @@
             lltype.free(master_p, flavor='raw')
             lltype.free(slave_p, flavor='raw')
     
    + at sandbox_review(abort=True)
     @replace_os_function('forkpty')
     @jit.dont_look_inside
     def forkpty():
    @@ -1058,6 +1061,7 @@
                              [rffi.PID_T, rffi.INTP, rffi.INT], rffi.PID_T,
                              save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('waitpid')
     def waitpid(pid, options):
         status_p = lltype.malloc(rffi.INTP.TO, 1, flavor='raw')
    @@ -1743,6 +1747,7 @@
         finally:
             lltype.free(groups, flavor='raw')
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('setgroups')
     def setgroups(gids):
         n = len(gids)
    diff --git a/rpython/rlib/rstack.py b/rpython/rlib/rstack.py
    --- a/rpython/rlib/rstack.py
    +++ b/rpython/rlib/rstack.py
    @@ -6,6 +6,7 @@
     import py
     
     from rpython.rlib.objectmodel import we_are_translated, fetch_translated_config
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib.rarithmetic import r_uint
     from rpython.rlib import rgc
     from rpython.rtyper.lltypesystem import lltype, rffi
    @@ -15,7 +16,7 @@
     
     def llexternal(name, args, res, _callable=None):
         return rffi.llexternal(name, args, res,
    -                           sandboxsafe=True, _nowrapper=True,
    +                           sandboxsafe='check_caller', _nowrapper=True,
                                _callable=_callable)
     
     _stack_get_end = llexternal('LL_stack_get_end', [], lltype.Signed,
    @@ -39,6 +40,7 @@
     _stack_criticalcode_stop = llexternal('LL_stack_criticalcode_stop', [],
                                           lltype.Void, lambda: None)
     
    + at sandbox_review(reviewed=True)
     def stack_check():
         if not we_are_translated():
             return
    @@ -64,6 +66,7 @@
     stack_check._always_inline_ = True
     stack_check._dont_insert_stackcheck_ = True
     
    + at sandbox_review(check_caller=True)
     @rgc.no_collect
     def stack_check_slowpath(current):
         if ord(_stack_too_big_slowpath(current)):
    @@ -72,6 +75,7 @@
     stack_check_slowpath._dont_inline_ = True
     stack_check_slowpath._dont_insert_stackcheck_ = True
     
    + at sandbox_review(reviewed=True)
     def stack_almost_full():
         """Return True if the stack is more than 15/16th full."""
         if not we_are_translated():
    diff --git a/rpython/rlib/rstruct/standardfmttable.py b/rpython/rlib/rstruct/standardfmttable.py
    --- a/rpython/rlib/rstruct/standardfmttable.py
    +++ b/rpython/rlib/rstruct/standardfmttable.py
    @@ -7,7 +7,7 @@
     
     import struct
     
    -from rpython.rlib.objectmodel import specialize
    +from rpython.rlib.objectmodel import specialize, sandbox_review
     from rpython.rlib.rarithmetic import r_uint, r_longlong, r_ulonglong
     from rpython.rlib.rstruct import ieee
     from rpython.rlib.rstruct.error import StructError, StructOverflowError
    @@ -30,6 +30,7 @@
         Create a fast path packer for TYPE. The packer returns True is it succeded
         or False otherwise.
         """
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(0)
         def do_pack_fastpath(fmtiter, value):
             size = rffi.sizeof(TYPE)
    @@ -39,6 +40,7 @@
                 raise CannotWrite
             #
             # typed_write() might raise CannotWrite
    +        # (note that we assume the write cannot overflow its buffer)
             fmtiter.wbuf.typed_write(TYPE, fmtiter.pos, value)
             if not ALLOW_FASTPATH:
                 # if we are here it means that typed_write did not raise, and thus
    @@ -211,6 +213,7 @@
     
     @specialize.memo()
     def unpack_fastpath(TYPE):
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(0)
         def do_unpack_fastpath(fmtiter):
             size = rffi.sizeof(TYPE)
    @@ -289,9 +292,15 @@
                 # because of alignment issues. So we copy the slice into a new
                 # string, which is guaranteed to be properly aligned, and read the
                 # float/double from there
    -            input = fmtiter.read(size)
    -            val = StringBuffer(input).typed_read(TYPE, 0)
    +            val = read_slowpath(fmtiter)
             fmtiter.appendobj(float(val))
    +
    +    @sandbox_review(reviewed=True)
    +    def read_slowpath(fmtiter):
    +        size = rffi.sizeof(TYPE)
    +        input = fmtiter.read(size)
    +        return StringBuffer(input).typed_read(TYPE, 0)
    +
         return unpack_ieee
     
     @specialize.argtype(0)
    diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py
    --- a/rpython/rlib/rthread.py
    +++ b/rpython/rlib/rthread.py
    @@ -6,6 +6,7 @@
     from rpython.rlib.debug import ll_assert
     from rpython.rlib.objectmodel import we_are_translated, specialize
     from rpython.rlib.objectmodel import CDefinedIntSymbolic, not_rpython
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rtyper.lltypesystem.lloperation import llop
     from rpython.rtyper.tool import rffi_platform
     from rpython.rtyper.extregistry import ExtRegistryEntry
    @@ -225,7 +226,7 @@
     
     get_stacksize = llexternal('RPyThreadGetStackSize', [], lltype.Signed)
     set_stacksize = llexternal('RPyThreadSetStackSize', [lltype.Signed],
    -                           lltype.Signed)
    +                           lltype.Signed, sandboxsafe='abort')
     
     # ____________________________________________________________
     #
    diff --git a/rpython/rlib/rtime.py b/rpython/rlib/rtime.py
    --- a/rpython/rlib/rtime.py
    +++ b/rpython/rlib/rtime.py
    @@ -8,7 +8,7 @@
     from rpython.translator.tool.cbuild import ExternalCompilationInfo
     from rpython.rtyper.tool import rffi_platform
     from rpython.rtyper.lltypesystem import rffi, lltype
    -from rpython.rlib.objectmodel import register_replacement_for
    +from rpython.rlib.objectmodel import register_replacement_for, sandbox_review
     from rpython.rlib.rarithmetic import intmask, UINT_MAX
     from rpython.rlib import rposix
     
    @@ -262,6 +262,7 @@
                                        lltype.Ptr(TIMEVAL)], rffi.INT,
                             save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_time_function('sleep')
     def sleep(secs):
         if _WIN32:
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -9,6 +9,7 @@
     from rpython.tool.sourcetools import func_with_new_name
     from rpython.rlib.objectmodel import Symbolic, specialize, not_rpython
     from rpython.rlib.objectmodel import keepalive_until_here, enforceargs
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib import rarithmetic, rgc
     from rpython.rtyper.extregistry import ExtRegistryEntry
     from rpython.rlib.unroll import unrolling_iterable
    @@ -97,6 +98,12 @@
                     don't bother releasing the GIL.  An explicit True or False
                     overrides this logic.
     
    +    sandboxsafe: if True, the process really calls the C function even if it
    +                 is sandboxed.  If False, it will turn into a stdin/stdout
    +                 communication with the parent process.  If "check_caller",
    +                 it is like True but we call @sandbox_review(check_caller=True)
    +                 which means that we need to also check the callers.
    +
         calling_conv: if 'unknown' or 'win', the C function is not directly seen
                       by the JIT.  If 'c', it can be seen (depending on
                       releasegil=False).  For tests only, or if _nowrapper,
    @@ -214,6 +221,8 @@
             #
             call_external_function = func_with_new_name(call_external_function,
                                                         'ccall_' + name)
    +        call_external_function = sandbox_review(check_caller=True)(
    +            call_external_function)
             # don't inline, as a hack to guarantee that no GC pointer is alive
             # anywhere in call_external_function
         else:
    @@ -251,6 +260,8 @@
                                                             'ccall_' + name)
                 call_external_function = jit.dont_look_inside(
                     call_external_function)
    +            call_external_function = sandbox_review(check_caller=True)(
    +                call_external_function)
     
         def _oops():
             raise AssertionError("can't pass (any more) a unicode string"
    @@ -329,8 +340,18 @@
         # for debugging, stick ll func ptr to that
         wrapper._ptr = funcptr
         wrapper = func_with_new_name(wrapper, name)
    +    if sandboxsafe == 'check_caller':
    +        wrapper = sandbox_review(check_caller=True)(wrapper)
    +    elif sandboxsafe == 'abort':
    +        wrapper = sandbox_review(abort=True)(wrapper)
    +    else:
    +        assert isinstance(sandboxsafe, bool)
    +        wrapper = sandbox_review(reviewed=True)(wrapper)
         return wrapper
     
    +def sandbox_check_type(TYPE):
    +    return not isinstance(TYPE, lltype.Primitive) or TYPE == llmemory.Address
    +
     
     class CallbackHolder:
         def __init__(self):
    @@ -792,6 +813,7 @@
             lastchar = u'\x00'
     
         # str -> char*
    +    @sandbox_review(reviewed=True)
         def str2charp(s, track_allocation=True):
             """ str -> char*
             """
    @@ -806,6 +828,7 @@
             return array
         str2charp._annenforceargs_ = [strtype, bool]
     
    +    @sandbox_review(reviewed=True)
         def free_charp(cp, track_allocation=True):
             if track_allocation:
                 lltype.free(cp, flavor='raw', track_allocation=True)
    @@ -838,6 +861,7 @@
     
         # str -> (buf, llobj, flag)
         # Can't inline this because of the raw address manipulation.
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def get_nonmovingbuffer_ll(data):
             """
    @@ -891,6 +915,7 @@
         get_nonmovingbuffer_ll._annenforceargs_ = [strtype]
     
     
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def get_nonmovingbuffer_ll_final_null(data):
             tup = get_nonmovingbuffer_ll(data)
    @@ -902,6 +927,7 @@
     
         # args-from-tuple-returned-by-get_nonmoving_buffer() -> None
         # Can't inline this because of the raw address manipulation.
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def free_nonmovingbuffer_ll(buf, llobj, flag):
             """
    @@ -918,6 +944,7 @@
     
         # int -> (char*, str, int)
         # Can't inline this because of the raw address manipulation.
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def alloc_buffer(count):
             """
    @@ -1096,6 +1123,7 @@
     CCHARPP = lltype.Ptr(lltype.Array(CCHARP, hints={'nolength': True}))
     CWCHARPP = lltype.Ptr(lltype.Array(CWCHARP, hints={'nolength': True}))
     
    + at sandbox_review(reviewed=True)
     def liststr2charpp(l):
         """ list[str] -> char**, NULL terminated
         """
    @@ -1241,6 +1269,7 @@
             return v_ptr
     
     
    + at sandbox_review(check_caller=True)
     def structcopy(pdst, psrc):
         """Copy all the fields of the structure given by 'psrc'
         into the structure given by 'pdst'.
    @@ -1258,6 +1287,7 @@
                                                  if name not in padding]
             unrollfields = unrolling_iterable(fields)
     
    +        @sandbox_review(check_caller=True)
             def copyfn(pdst, psrc):
                 for name, TYPE in unrollfields:
                     if isinstance(TYPE, lltype.ContainerType):
    @@ -1271,6 +1301,7 @@
     _get_structcopy_fn._annspecialcase_ = 'specialize:memo'
     
     
    + at sandbox_review(check_caller=True)
     def setintfield(pdst, fieldname, value):
         """Maybe temporary: a helper to set an integer field into a structure,
         transparently casting between the various integer types.
    @@ -1405,14 +1436,14 @@
                 lltype.Void,
                 releasegil=False,
                 calling_conv='c',
    -            sandboxsafe=True,
    +            sandboxsafe='check_caller',
             )
     c_memset = llexternal("memset",
                 [VOIDP, lltype.Signed, SIZE_T],
                 lltype.Void,
                 releasegil=False,
                 calling_conv='c',
    -            sandboxsafe=True,
    +            sandboxsafe='check_caller',
             )
     
     
    diff --git a/rpython/rtyper/lltypesystem/rstr.py b/rpython/rtyper/lltypesystem/rstr.py
    --- a/rpython/rtyper/lltypesystem/rstr.py
    +++ b/rpython/rtyper/lltypesystem/rstr.py
    @@ -3,7 +3,8 @@
     from rpython.annotator import model as annmodel
     from rpython.rlib import jit, types, objectmodel, rgc
     from rpython.rlib.objectmodel import (malloc_zero_filled, we_are_translated,
    -    ll_hash_string, keepalive_until_here, specialize, enforceargs, dont_inline)
    +    ll_hash_string, keepalive_until_here, specialize, enforceargs, dont_inline,
    +    sandbox_review)
     from rpython.rlib.signature import signature
     from rpython.rlib.rarithmetic import ovfcheck
     from rpython.rtyper.error import TyperError
    @@ -59,6 +60,7 @@
                     llmemory.itemoffsetof(TP.chars, 0) +
                     llmemory.sizeof(CHAR_TP) * item)
     
    +    @sandbox_review(check_caller=True)
         @signature(types.any(), types.any(), types.int(), returns=types.any())
         @specialize.arg(0)
         def _get_raw_buf(TP, src, ofs):
    @@ -75,6 +77,7 @@
         _get_raw_buf._always_inline_ = True
     
         @jit.oopspec('stroruni.copy_contents(src, dst, srcstart, dststart, length)')
    +    @sandbox_review(reviewed=True)
         @signature(types.any(), types.any(), types.int(), types.int(), types.int(), returns=types.none())
         def copy_string_contents(src, dst, srcstart, dststart, length):
             """Copies 'length' characters from the 'src' string to the 'dst'
    @@ -112,6 +115,7 @@
         copy_string_contents = func_with_new_name(copy_string_contents,
                                                   'copy_%s_contents' % name)
     
    +    @sandbox_review(check_caller=True)
         @jit.oopspec('stroruni.copy_string_to_raw(src, ptrdst, srcstart, length)')
         def copy_string_to_raw(src, ptrdst, srcstart, length):
             """
    @@ -141,6 +145,7 @@
         copy_string_to_raw._always_inline_ = True
         copy_string_to_raw = func_with_new_name(copy_string_to_raw, 'copy_%s_to_raw' % name)
     
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         @signature(types.any(), types.any(), types.int(), types.int(),
                    returns=types.none())
    @@ -1258,6 +1263,7 @@
             return hop.gendirectcall(cls.ll_join_strs, size, vtemp)
     
         @staticmethod
    +    @sandbox_review(reviewed=True)
         @jit.dont_look_inside
         def ll_string2list(RESLIST, src):
             length = len(src.chars)
    diff --git a/rpython/translator/backendopt/all.py b/rpython/translator/backendopt/all.py
    --- a/rpython/translator/backendopt/all.py
    +++ b/rpython/translator/backendopt/all.py
    @@ -113,7 +113,7 @@
         if config.profile_based_inline and not secondary:
             threshold = config.profile_based_inline_threshold
             heuristic = get_function(config.profile_based_inline_heuristic)
    -        inline.instrument_inline_candidates(graphs, threshold)
    +        inline.instrument_inline_candidates(translator, graphs, threshold)
             counters = translator.driver_instrument_result(
                 config.profile_based_inline)
             n = len(counters)
    diff --git a/rpython/translator/backendopt/inline.py b/rpython/translator/backendopt/inline.py
    --- a/rpython/translator/backendopt/inline.py
    +++ b/rpython/translator/backendopt/inline.py
    @@ -548,7 +548,8 @@
         return (0.9999 * measure_median_execution_cost(graph) +
                 count), True       # may be NaN
     
    -def inlinable_static_callers(graphs, store_calls=False, ok_to_call=None):
    +def inlinable_static_callers(translator, graphs, store_calls=False,
    +                             ok_to_call=None):
         if ok_to_call is None:
             ok_to_call = set(graphs)
         result = []
    @@ -558,6 +559,7 @@
             else:
                 result.append((parentgraph, graph))
         #
    +    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 for op in block.operations:
    @@ -565,13 +567,12 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None and graph in ok_to_call:
    -                        if getattr(getattr(funcobj, '_callable', None),
    -                                   '_dont_inline_', False):
    +                        if dont_inline(funcobj):
                                 continue
                             add(parentgraph, block, op, graph)
         return result
     
    -def instrument_inline_candidates(graphs, threshold):
    +def instrument_inline_candidates(translator, graphs, threshold):
         cache = {None: False}
         def candidate(graph):
             try:
    @@ -581,6 +582,7 @@
                 cache[graph] = res
                 return res
         n = 0
    +    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 ops = block.operations
    @@ -592,8 +594,7 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None:
    -                        if getattr(getattr(funcobj, '_callable', None),
    -                                   '_dont_inline_', False):
    +                        if dont_inline(funcobj):
                                 continue
                         if candidate(graph):
                             tag = Constant('inline', Void)
    @@ -610,6 +611,17 @@
         return (hasattr(graph, 'func') and
                 getattr(graph.func, '_always_inline_', None))
     
    +def make_dont_inline_checker(translator):
    +    sandbox = translator.config.translation.sandbox
    +
    +    def dont_inline(funcobj):
    +        func = getattr(funcobj, '_callable', None)
    +        if sandbox:
    +            if hasattr(func, '_sandbox_review_'):
    +                return True
    +        return getattr(func, '_dont_inline_', False)
    +    return dont_inline
    +
     def auto_inlining(translator, threshold=None,
                       callgraph=None,
                       call_count_pred=None,
    @@ -621,7 +633,7 @@
         callers = {}     # {graph: {graphs-that-call-it}}
         callees = {}     # {graph: {graphs-that-it-calls}}
         if callgraph is None:
    -        callgraph = inlinable_static_callers(translator.graphs)
    +        callgraph = inlinable_static_callers(translator, translator.graphs)
         for graph1, graph2 in callgraph:
             callers.setdefault(graph2, {})[graph1] = True
             callees.setdefault(graph1, {})[graph2] = True
    @@ -727,7 +739,8 @@
                                     if not hasattr(graph, 'exceptiontransformed')])
         else:
             ok_to_call = None
    -    callgraph = inlinable_static_callers(graphs, ok_to_call=ok_to_call)
    +    callgraph = inlinable_static_callers(translator, graphs,
    +                                         ok_to_call=ok_to_call)
         count = auto_inlining(translator, threshold, callgraph=callgraph,
                               heuristic=heuristic,
                               call_count_pred=call_count_pred)
    diff --git a/rpython/translator/backendopt/test/test_inline.py b/rpython/translator/backendopt/test/test_inline.py
    --- a/rpython/translator/backendopt/test/test_inline.py
    +++ b/rpython/translator/backendopt/test/test_inline.py
    @@ -100,7 +100,7 @@
             call_count_pred = None
             if call_count_check:
                 call_count_pred = lambda lbl: True
    -            instrument_inline_candidates(t.graphs, threshold)
    +            instrument_inline_candidates(t, t.graphs, threshold)
     
             if remove_same_as:
                 for graph in t.graphs:
    diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py
    --- a/rpython/translator/c/genc.py
    +++ b/rpython/translator/c/genc.py
    @@ -65,6 +65,11 @@
     
         def __init__(self, translator, entrypoint, config, gcpolicy=None,
                      gchooks=None, secondary_entrypoints=()):
    +        #
    +        if config.translation.sandbox:
    +            assert not config.translation.thread
    +            gchooks = None     # no custom gc hooks
    +        #
             self.translator = translator
             self.entrypoint = entrypoint
             self.entrypoint_name = getattr(self.entrypoint, 'func_name', None)
    diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py
    --- a/rpython/translator/c/node.py
    +++ b/rpython/translator/c/node.py
    @@ -882,9 +882,9 @@
     
     def new_funcnode(db, T, obj, forcename=None):
         from rpython.rtyper.rtyper import llinterp_backend
    -    if db.sandbox:
    -        if (getattr(obj, 'external', None) is not None and
    -                not obj._safe_not_sandboxed):
    +    if db.sandbox and getattr(obj, 'external', None) is not None:
    +        safe_flag = obj._safe_not_sandboxed
    +        if not (safe_flag is True or safe_flag == "check_caller"):
                 try:
                     sandbox_mapping = db.sandbox_mapping
                 except AttributeError:
    diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py
    --- a/rpython/translator/driver.py
    +++ b/rpython/translator/driver.py
    @@ -412,6 +412,10 @@
             if translator.annotator is not None:
                 translator.frozen = True
     
    +        if self.config.translation.sandbox:
    +            from rpython.translator.sandbox import graphchecker
    +            graphchecker.check_all_graphs(self.translator)
    +
             standalone = self.standalone
             get_gchooks = self.extra.get('get_gchooks', lambda: None)
             gchooks = get_gchooks()
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    new file mode 100644
    --- /dev/null
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -0,0 +1,138 @@
    +"""Logic to check the operations in all the user graphs.
    +This runs at the start of the database-c step, so it excludes the
    +graphs produced later, notably for the GC.  These are "low-level"
    +graphs that are assumed to be safe.
    +"""
    +
    +from rpython.flowspace.model import SpaceOperation, Constant
    +from rpython.rtyper.rmodel import inputconst
    +from rpython.rtyper.lltypesystem import lltype, llmemory, rstr
    +from rpython.rtyper.lltypesystem.lloperation import LL_OPERATIONS
    +from rpython.translator.unsimplify import varoftype
    +from rpython.tool.ansi_print import AnsiLogger
    +
    +class UnsafeException(Exception):
    +    pass
    +
    +log = AnsiLogger("sandbox")
    +
    +safe_operations = set([
    +    'keepalive', 'threadlocalref_get', 'threadlocalref_store',
    +    'malloc', 'malloc_varsize', 'free',
    +    'getfield', 'getarrayitem', 'getinteriorfield', 'raw_load',
    +    'cast_opaque_ptr', 'cast_ptr_to_int',
    +    'gc_thread_run', 'gc_stack_bottom', 'gc_thread_after_fork',
    +    'shrink_array', 'gc_pin', 'gc_unpin', 'gc_can_move', 'gc_id',
    +    'gc_identityhash', 'weakref_create', 'weakref_deref',
    +    'gc_fq_register', 'gc_fq_next_dead',
    +    'gc_set_max_heap_size', 'gc_ignore_finalizer', 'gc_add_memory_pressure',
    +    'gc_writebarrier', 'gc__collect',
    +    'length_of_simple_gcarray_from_opaque',
    +    'debug_fatalerror', 'debug_print_traceback', 'debug_flush',
    +    'hint', 'debug_start', 'debug_stop', 'debug_print', 'debug_offset',
    +    'jit_force_quasi_immutable', 'jit_force_virtual', 'jit_marker',
    +    'jit_is_virtual',
    +    ])
    +gc_set_operations = set([
    +    'setfield', 'setarrayitem', 'setinteriorfield',
    +    ])
    +for opname, opdesc in LL_OPERATIONS.items():
    +    if opdesc.tryfold:
    +        safe_operations.add(opname)
    +
    +def graph_review(graph):
    +    return getattr(getattr(graph, 'func', None), '_sandbox_review_', None)
    +
    +def make_abort_graph(graph):
    +    ll_err = rstr.conststr("reached forbidden function %r" % (graph.name,))
    +    c_err = inputconst(lltype.typeOf(ll_err), ll_err)
    +    op = SpaceOperation('debug_fatalerror', [c_err], varoftype(lltype.Void))
    +    graph.startblock.operations.insert(0, op)
    +
    +def is_gc_ptr(TYPE):
    +    return isinstance(TYPE, lltype.Ptr) and TYPE.TO._gckind == 'gc'
    +
    +
    +class GraphChecker(object):
    +
    +    def __init__(self, translator):
    +        self.translator = translator
    +
    +    def graph_is_unsafe(self, graph):
    +        for block, op in graph.iterblockops():
    +            opname = op.opname
    +
    +            if opname in safe_operations:
    +                pass
    +
    +            elif opname in gc_set_operations:
    +                if op.args[0].concretetype.TO._gckind != 'gc':
    +                    return "non-GC memory write: %r" % (op,)
    +
    +            elif opname == 'direct_call':
    +                c_target = op.args[0]
    +                assert isinstance(c_target, Constant)
    +                TYPE = lltype.typeOf(c_target.value)
    +                assert isinstance(TYPE.TO, lltype.FuncType)
    +                obj = c_target.value._obj
    +                if hasattr(obj, 'graph'):
    +                    g2 = obj.graph
    +                    if graph_review(g2) == 'check_caller':
    +                        return ("direct_call to a graph with "
    +                                "check_caller=True: %r" % (op,))
    +                elif getattr(obj, '_safe_not_sandboxed', False) is not False:
    +                    ss = obj._safe_not_sandboxed
    +                    if ss is not True:
    +                        return ("direct_call to llfunc with "
    +                                "sandboxsafe=%r: %r" % (ss, obj))
    +                elif getattr(obj, 'external', None) is not None:
    +                    # either obj._safe_not_sandboxed is True, and then it's
    +                    # fine; or obj._safe_not_sandboxed is False, and then
    +                    # this will be transformed into a stdin/stdout stub
    +                    pass
    +                else:
    +                    # not 'external', but no 'graph' either?
    +                    return "direct_call to %r" % (obj,)
    +
    +            elif opname == 'indirect_call':
    +                graph_list = op.args[-1].value
    +                for g2 in graph_list:
    +                    if graph_review(g2) == 'check_caller':
    +                        return ("indirect_call that can go to at least one "
    +                                "graph with check_caller=True: %r" % (op,))
    +
    +            elif opname in ('cast_ptr_to_adr', 'force_cast',
    +                            'cast_int_to_ptr'):
    +                if is_gc_ptr(op.args[0].concretetype):
    +                    return "argument is a GC ptr: %r" % (opname,)
    +                if is_gc_ptr(op.result.concretetype):
    +                    return "result is a GC ptr: %r" % (opname,)
    +
    +            else:
    +                return "unsupported llop: %r" % (opname,)
    +
    +    def check(self):
    +        unsafe = {}
    +        for graph in self.translator.graphs:
    +            review = graph_review(graph)
    +            if review is not None:
    +                if review in ('reviewed', 'check_caller'):
    +                    continue
    +                elif review == 'abort':
    +                    make_abort_graph(graph)
    +                    continue
    +                else:
    +                    assert False, repr(review)
    +
    +            problem = self.graph_is_unsafe(graph)
    +            if problem is not None:
    +                unsafe[graph] = problem
    +        if unsafe:
    +            raise UnsafeException(
    +                '\n'.join('%r: %s' % kv for kv in unsafe.items()))
    +
    +
    +def check_all_graphs(translator):
    +    log("Checking the graphs for sandbox-unsafe operations")
    +    checker = GraphChecker(translator)
    +    checker.check()
    diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py
    --- a/rpython/translator/sandbox/rsandbox.py
    +++ b/rpython/translator/sandbox/rsandbox.py
    @@ -6,7 +6,7 @@
     import py
     import sys
     
    -from rpython.rlib import types
    +from rpython.rlib import types, debug
     from rpython.rlib.objectmodel import specialize
     from rpython.rlib.signature import signature
     from rpython.rlib.unroll import unrolling_iterable
    @@ -20,6 +20,7 @@
     from rpython.rtyper.llannotation import lltype_to_annotation
     from rpython.rtyper.annlowlevel import MixLevelHelperAnnotator
     from rpython.tool.ansi_print import AnsiLogger
    +from rpython.translator.sandbox.graphchecker import make_abort_graph
     
     log = AnsiLogger("sandbox")
     
    @@ -99,34 +100,43 @@
              lltype.typeOf(rpy_sandbox_arg[arg_kind]).TO.ARGS[0])
             for arg_kind in arg_kinds])
     
    -    result_func = rpy_sandbox_res[result_kind]
    -    RESTYPE = FUNCTYPE.RESULT
    +    if fnobj._safe_not_sandboxed == 'abort':
     
    -    try:
    -        lst = rtyper._sandboxed_functions
    -    except AttributeError:
    -        lst = rtyper._sandboxed_functions = []
    -    name_and_sig = '%s(%s)%s' % (fnname, ''.join(arg_kinds), result_kind)
    -    lst.append(name_and_sig)
    -    log(name_and_sig)
    -    name_and_sig = rffi.str2charp(name_and_sig, track_allocation=False)
    +        msg = "sandboxed subprocess aborts on call to %r" % (fnname,)
    +        def execute(*args):
    +            debug.fatalerror(msg)
     
    -    def execute(*args):
    -        #
    -        # serialize the arguments
    -        i = 0
    -        for arg_kind, func, ARGTYPE in unroll_args:
    -            if arg_kind == 'v':
    -                continue
    -            func(rffi.cast(ARGTYPE, args[i]))
    -            i = i + 1
    -        #
    -        # send the function name and the arguments and wait for an answer
    -        result = result_func(name_and_sig)
    -        #
    -        # result the answer, if any
    -        if RESTYPE is not lltype.Void:
    -            return rffi.cast(RESTYPE, result)
    +    else:
    +
    +        result_func = rpy_sandbox_res[result_kind]
    +        RESTYPE = FUNCTYPE.RESULT
    +
    +        try:
    +            lst = rtyper._sandboxed_functions
    +        except AttributeError:
    +            lst = rtyper._sandboxed_functions = []
    +        name_and_sig = '%s(%s)%s' % (fnname, ''.join(arg_kinds), result_kind)
    +        lst.append(name_and_sig)
    +        log(name_and_sig)
    +        name_and_sig = rffi.str2charp(name_and_sig, track_allocation=False)
    +
    +        def execute(*args):
    +            #
    +            # serialize the arguments
    +            i = 0
    +            for arg_kind, func, ARGTYPE in unroll_args:
    +                if arg_kind == 'v':
    +                    continue
    +                func(rffi.cast(ARGTYPE, args[i]))
    +                i = i + 1
    +            #
    +            # send the function name and the arguments and wait for an answer
    +            result = result_func(name_and_sig)
    +            #
    +            # result the answer, if any
    +            if RESTYPE is not lltype.Void:
    +                return rffi.cast(RESTYPE, result)
    +    #
         execute.__name__ = 'sandboxed_%s' % (fnname,)
         #
         args_s, s_result = sig_ll(fnobj)
    diff --git a/rpython/translator/sandbox/test/test_graphchecker.py b/rpython/translator/sandbox/test/test_graphchecker.py
    new file mode 100644
    --- /dev/null
    +++ b/rpython/translator/sandbox/test/test_graphchecker.py
    @@ -0,0 +1,128 @@
    +from rpython.translator.translator import TranslationContext, graphof
    +from rpython.rtyper.lltypesystem import lltype, rffi
    +from rpython.rtyper.lltypesystem.lloperation import llop
    +from rpython.rlib.objectmodel import sandbox_review
    +
    +from rpython.translator.sandbox.graphchecker import GraphChecker
    +from rpython.translator.sandbox.graphchecker import make_abort_graph
    +
    +
    +class TestGraphIsUnsafe(object):
    +
    +    def graph_is_unsafe(self, fn, signature=[]):
    +        t = TranslationContext()
    +        self.t = t
    +        t.buildannotator().build_types(fn, signature)
    +        t.buildrtyper().specialize()
    +        graph = graphof(t, fn)
    +
    +        checker = GraphChecker(t)
    +        return checker.graph_is_unsafe(graph)
    +
    +    def check_safe(self, fn, signature=[]):
    +        result = self.graph_is_unsafe(fn, signature)
    +        assert result is None
    +
    +    def check_unsafe(self, error_substring, fn, signature=[]):
    +        result = self.graph_is_unsafe(fn, signature)
    +        assert result is not None
    +        assert error_substring in result
    +
    +    def test_simple(self):
    +        def f():
    +            pass
    +        self.check_safe(f)
    +
    +    def test_unsafe_setfield(self):
    +        S = lltype.Struct('S', ('x', lltype.Signed))
    +        s = lltype.malloc(S, flavor='raw', immortal=True)
    +        def f():
    +            s.x = 42
    +        self.check_unsafe("non-GC memory write", f)
    +
    +    def test_unsafe_operation(self):
    +        def f():
    +            llop.debug_forked(lltype.Void)
    +        self.check_unsafe("unsupported llop", f)
    +
    +    def test_force_cast(self):
    +        SRAW = lltype.Struct('SRAW', ('x', lltype.Signed))
    +        SGC = lltype.GcStruct('SGC', ('x', lltype.Signed))
    +        def f(x):
    +            return llop.force_cast(lltype.Signed, x)
    +        self.check_safe(f, [float])
    +        self.check_safe(f, [lltype.Ptr(SRAW)])
    +        self.check_unsafe("argument is a GC ptr", f, [lltype.Ptr(SGC)])
    +
    +    def test_direct_call_to_check_caller(self):
    +        @sandbox_review(check_caller=True)
    +        def g():
    +            pass
    +        def f():
    +            g()
    +        self.check_unsafe("direct_call to a graph with check_caller=True", f)
    +
    +    def test_direct_call_to_reviewed(self):
    +        @sandbox_review(reviewed=True)
    +        def g():
    +            pass
    +        def f():
    +            g()
    +        self.check_safe(f)
    +
    +    def test_direct_call_to_abort(self):
    +        @sandbox_review(abort=True)
    +        def g():
    +            pass
    +        def f():
    +            g()
    +        self.check_safe(f)
    +
    +    def test_indirect_call_to_check_caller(self):
    +        class A:
    +            def meth(self, i):
    +                pass
    +        class B(A):
    +            def meth(self, i):
    +                pass
    +        class C(A):
    +            @sandbox_review(check_caller=True)
    +            def meth(self, i):
    +                pass
    +        def f(i):
    +            if i > 5:
    +                x = B()
    +            else:
    +                x = C()
    +            x.meth(i)
    +        self.check_unsafe("indirect_call that can go to at least one "
    +                          "graph with check_caller=True", f, [int])
    +
    +    def test_direct_call_external(self):
    +        llfn1 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=True,
    +                                _nowrapper=True)
    +        self.check_safe(lambda: llfn1())
    +        #
    +        llfn2 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=False,
    +                                _nowrapper=True)
    +        self.check_safe(lambda: llfn2())   # will be turned into an I/O stub
    +        #
    +        llfn2b = rffi.llexternal("foobar", [], lltype.Void,
    +                                 sandboxsafe="check_caller",
    +                                 _nowrapper=True)
    +        self.check_unsafe("direct_call to llfunc with "
    +                          "sandboxsafe='check_caller'", lambda: llfn2b())
    +        #
    +        llfn3 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=True)
    +        self.check_safe(lambda: llfn3())
    +        #
    +        llfn4 = rffi.llexternal("foobar", [], lltype.Void, sandboxsafe=False)
    +        self.check_safe(lambda: llfn4())
    +
    +    def test_make_abort_graph(self):
    +        def dummy():
    +            pass
    +        self.check_safe(dummy)
    +        graph = graphof(self.t, dummy)
    +        make_abort_graph(graph)
    +        assert graph.startblock.operations[0].opname == 'debug_fatalerror'
    
    From pypy.commits at gmail.com  Mon Aug 26 11:27:51 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:27:51 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: merge 0d5f51e7a2c8
    Message-ID: <5d63fa77.1c69fb81.69a6e.a6a3@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97273:36a029317cc9
    Date: 2019-08-26 17:21 +0200
    http://bitbucket.org/pypy/pypy/changeset/36a029317cc9/
    
    Log:	merge 0d5f51e7a2c8
    
    diff --git a/lib-python/3/_osx_support.py b/lib-python/3/_osx_support.py
    deleted file mode 100644
    --- a/lib-python/3/_osx_support.py
    +++ /dev/null
    @@ -1,504 +0,0 @@
    -"""Shared OS X support functions."""
    -
    -import os
    -import re
    -import sys
    -
    -__all__ = [
    -    'compiler_fixup',
    -    'customize_config_vars',
    -    'customize_compiler',
    -    'get_platform_osx',
    -]
    -
    -# configuration variables that may contain universal build flags,
    -# like "-arch" or "-isdkroot", that may need customization for
    -# the user environment
    -_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
    -                            'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
    -                            'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
    -                            'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
    -
    -# configuration variables that may contain compiler calls
    -_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
    -
    -# prefix added to original configuration variable names
    -_INITPRE = '_OSX_SUPPORT_INITIAL_'
    -
    -
    -def _find_executable(executable, path=None):
    -    """Tries to find 'executable' in the directories listed in 'path'.
    -
    -    A string listing directories separated by 'os.pathsep'; defaults to
    -    os.environ['PATH'].  Returns the complete filename or None if not found.
    -    """
    -    if path is None:
    -        path = os.environ['PATH']
    -
    -    paths = path.split(os.pathsep)
    -    base, ext = os.path.splitext(executable)
    -
    -    if (sys.platform == 'win32') and (ext != '.exe'):
    -        executable = executable + '.exe'
    -
    -    if not os.path.isfile(executable):
    -        for p in paths:
    -            f = os.path.join(p, executable)
    -            if os.path.isfile(f):
    -                # the file exists, we have a shot at spawn working
    -                return f
    -        return None
    -    else:
    -        return executable
    -
    -
    -def _read_output(commandstring):
    -    """Output from successful command execution or None"""
    -    # Similar to os.popen(commandstring, "r").read(),
    -    # but without actually using os.popen because that
    -    # function is not usable during python bootstrap.
    -    # tempfile is also not available then.
    -    import contextlib
    -    try:
    -        import tempfile
    -        fp = tempfile.NamedTemporaryFile()
    -    except ImportError:
    -        fp = open("/tmp/_osx_support.%s"%(
    -            os.getpid(),), "w+b")
    -
    -    with contextlib.closing(fp) as fp:
    -        cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
    -        return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
    -
    -
    -def _find_build_tool(toolname):
    -    """Find a build tool on current path or using xcrun"""
    -    return (_find_executable(toolname)
    -                or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
    -                or ''
    -            )
    -
    -_SYSTEM_VERSION = None
    -
    -def _get_system_version():
    -    """Return the OS X system version as a string"""
    -    # Reading this plist is a documented way to get the system
    -    # version (see the documentation for the Gestalt Manager)
    -    # We avoid using platform.mac_ver to avoid possible bootstrap issues during
    -    # the build of Python itself (distutils is used to build standard library
    -    # extensions).
    -
    -    global _SYSTEM_VERSION
    -
    -    if _SYSTEM_VERSION is None:
    -        _SYSTEM_VERSION = ''
    -        try:
    -            f = open('/System/Library/CoreServices/SystemVersion.plist')
    -        except OSError:
    -            # We're on a plain darwin box, fall back to the default
    -            # behaviour.
    -            pass
    -        else:
    -            try:
    -                m = re.search(r'ProductUserVisibleVersion\s*'
    -                              r'(.*?)', f.read())
    -            finally:
    -                f.close()
    -            if m is not None:
    -                _SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
    -            # else: fall back to the default behaviour
    -    if not _SYSTEM_VERSION:
    -        # minimum supported MACOSX_DEPLOYMENT_TARGET version
    -        return '10.14'
    -    return _SYSTEM_VERSION
    -
    -def _remove_original_values(_config_vars):
    -    """Remove original unmodified values for testing"""
    -    # This is needed for higher-level cross-platform tests of get_platform.
    -    for k in list(_config_vars):
    -        if k.startswith(_INITPRE):
    -            del _config_vars[k]
    -
    -def _save_modified_value(_config_vars, cv, newvalue):
    -    """Save modified and original unmodified value of configuration var"""
    -
    -    oldvalue = _config_vars.get(cv, '')
    -    if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
    -        _config_vars[_INITPRE + cv] = oldvalue
    -    _config_vars[cv] = newvalue
    -
    -def _supports_universal_builds():
    -    """Returns True if universal builds are supported on this system"""
    -    # As an approximation, we assume that if we are running on 10.4 or above,
    -    # then we are running with an Xcode environment that supports universal
    -    # builds, in particular -isysroot and -arch arguments to the compiler. This
    -    # is in support of allowing 10.4 universal builds to run on 10.3.x systems.
    -
    -    osx_version = _get_system_version()
    -    if osx_version:
    -        try:
    -            osx_version = tuple(int(i) for i in osx_version.split('.'))
    -        except ValueError:
    -            osx_version = ''
    -    return bool(osx_version >= (10, 4)) if osx_version else False
    -
    -
    -def _find_appropriate_compiler(_config_vars):
    -    """Find appropriate C compiler for extension module builds"""
    -
    -    # Issue #13590:
    -    #    The OSX location for the compiler varies between OSX
    -    #    (or rather Xcode) releases.  With older releases (up-to 10.5)
    -    #    the compiler is in /usr/bin, with newer releases the compiler
    -    #    can only be found inside Xcode.app if the "Command Line Tools"
    -    #    are not installed.
    -    #
    -    #    Furthermore, the compiler that can be used varies between
    -    #    Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2'
    -    #    as the compiler, after that 'clang' should be used because
    -    #    gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
    -    #    miscompiles Python.
    -
    -    # skip checks if the compiler was overridden with a CC env variable
    -    if 'CC' in os.environ:
    -        return _config_vars
    -
    -    # The CC config var might contain additional arguments.
    -    # Ignore them while searching.
    -    cc = oldcc = _config_vars['CC'].split()[0]
    -    if not _find_executable(cc):
    -        # Compiler is not found on the shell search PATH.
    -        # Now search for clang, first on PATH (if the Command LIne
    -        # Tools have been installed in / or if the user has provided
    -        # another location via CC).  If not found, try using xcrun
    -        # to find an uninstalled clang (within a selected Xcode).
    -
    -        # NOTE: Cannot use subprocess here because of bootstrap
    -        # issues when building Python itself (and os.popen is
    -        # implemented on top of subprocess and is therefore not
    -        # usable as well)
    -
    -        cc = _find_build_tool('clang')
    -
    -    elif os.path.basename(cc).startswith('gcc'):
    -        # Compiler is GCC, check if it is LLVM-GCC
    -        data = _read_output("'%s' --version"
    -                             % (cc.replace("'", "'\"'\"'"),))
    -        if data and 'llvm-gcc' in data:
    -            # Found LLVM-GCC, fall back to clang
    -            cc = _find_build_tool('clang')
    -
    -    if not cc:
    -        raise SystemError(
    -               "Cannot locate working compiler")
    -
    -    if cc != oldcc:
    -        # Found a replacement compiler.
    -        # Modify config vars using new compiler, if not already explicitly
    -        # overridden by an env variable, preserving additional arguments.
    -        for cv in _COMPILER_CONFIG_VARS:
    -            if cv in _config_vars and cv not in os.environ:
    -                cv_split = _config_vars[cv].split()
    -                cv_split[0] = cc if cv != 'CXX' else cc + '++'
    -                _save_modified_value(_config_vars, cv, ' '.join(cv_split))
    -
    -    return _config_vars
    -
    -
    -def _remove_universal_flags(_config_vars):
    -    """Remove all universal build arguments from config vars"""
    -
    -    for cv in _UNIVERSAL_CONFIG_VARS:
    -        # Do not alter a config var explicitly overridden by env var
    -        if cv in _config_vars and cv not in os.environ:
    -            flags = _config_vars[cv]
    -            flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
    -            flags = re.sub('-isysroot [^ \t]*', ' ', flags)
    -            _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def _remove_unsupported_archs(_config_vars):
    -    """Remove any unsupported archs from config vars"""
    -    # Different Xcode releases support different sets for '-arch'
    -    # flags. In particular, Xcode 4.x no longer supports the
    -    # PPC architectures.
    -    #
    -    # This code automatically removes '-arch ppc' and '-arch ppc64'
    -    # when these are not supported. That makes it possible to
    -    # build extensions on OSX 10.7 and later with the prebuilt
    -    # 32-bit installer on the python.org website.
    -
    -    # skip checks if the compiler was overridden with a CC env variable
    -    if 'CC' in os.environ:
    -        return _config_vars
    -
    -    if re.search(r'-arch\s+ppc', _config_vars['CFLAGS']) is not None:
    -        # NOTE: Cannot use subprocess here because of bootstrap
    -        # issues when building Python itself
    -        status = os.system(
    -            """echo 'int main{};' | """
    -            """'%s' -c -arch ppc -x c -o /dev/null /dev/null 2>/dev/null"""
    -            %(_config_vars['CC'].replace("'", "'\"'\"'"),))
    -        if status:
    -            # The compile failed for some reason.  Because of differences
    -            # across Xcode and compiler versions, there is no reliable way
    -            # to be sure why it failed.  Assume here it was due to lack of
    -            # PPC support and remove the related '-arch' flags from each
    -            # config variables not explicitly overridden by an environment
    -            # variable.  If the error was for some other reason, we hope the
    -            # failure will show up again when trying to compile an extension
    -            # module.
    -            for cv in _UNIVERSAL_CONFIG_VARS:
    -                if cv in _config_vars and cv not in os.environ:
    -                    flags = _config_vars[cv]
    -                    flags = re.sub(r'-arch\s+ppc\w*\s', ' ', flags)
    -                    _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def _override_all_archs(_config_vars):
    -    """Allow override of all archs with ARCHFLAGS env var"""
    -    # NOTE: This name was introduced by Apple in OSX 10.5 and
    -    # is used by several scripting languages distributed with
    -    # that OS release.
    -    if 'ARCHFLAGS' in os.environ:
    -        arch = os.environ['ARCHFLAGS']
    -        for cv in _UNIVERSAL_CONFIG_VARS:
    -            if cv in _config_vars and '-arch' in _config_vars[cv]:
    -                flags = _config_vars[cv]
    -                flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
    -                flags = flags + ' ' + arch
    -                _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def _check_for_unavailable_sdk(_config_vars):
    -    """Remove references to any SDKs not available"""
    -    # If we're on OSX 10.5 or later and the user tries to
    -    # compile an extension using an SDK that is not present
    -    # on the current machine it is better to not use an SDK
    -    # than to fail.  This is particularly important with
    -    # the standalone Command Line Tools alternative to a
    -    # full-blown Xcode install since the CLT packages do not
    -    # provide SDKs.  If the SDK is not present, it is assumed
    -    # that the header files and dev libs have been installed
    -    # to /usr and /System/Library by either a standalone CLT
    -    # package or the CLT component within Xcode.
    -    cflags = _config_vars.get('CFLAGS', '')
    -    m = re.search(r'-isysroot\s+(\S+)', cflags)
    -    if m is not None:
    -        sdk = m.group(1)
    -        if not os.path.exists(sdk):
    -            for cv in _UNIVERSAL_CONFIG_VARS:
    -                # Do not alter a config var explicitly overridden by env var
    -                if cv in _config_vars and cv not in os.environ:
    -                    flags = _config_vars[cv]
    -                    flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
    -                    _save_modified_value(_config_vars, cv, flags)
    -
    -    return _config_vars
    -
    -
    -def compiler_fixup(compiler_so, cc_args):
    -    """
    -    This function will strip '-isysroot PATH' and '-arch ARCH' from the
    -    compile flags if the user has specified one them in extra_compile_flags.
    -
    -    This is needed because '-arch ARCH' adds another architecture to the
    -    build, without a way to remove an architecture. Furthermore GCC will
    -    barf if multiple '-isysroot' arguments are present.
    -    """
    -    stripArch = stripSysroot = False
    -
    -    compiler_so = list(compiler_so)
    -
    -    if not _supports_universal_builds():
    -        # OSX before 10.4.0, these don't support -arch and -isysroot at
    -        # all.
    -        stripArch = stripSysroot = True
    -    else:
    -        stripArch = '-arch' in cc_args
    -        stripSysroot = '-isysroot' in cc_args
    -
    -    if stripArch or 'ARCHFLAGS' in os.environ:
    -        while True:
    -            try:
    -                index = compiler_so.index('-arch')
    -                # Strip this argument and the next one:
    -                del compiler_so[index:index+2]
    -            except ValueError:
    -                break
    -
    -    if 'ARCHFLAGS' in os.environ and not stripArch:
    -        # User specified different -arch flags in the environ,
    -        # see also distutils.sysconfig
    -        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
    -
    -    if stripSysroot:
    -        while True:
    -            try:
    -                index = compiler_so.index('-isysroot')
    -                # Strip this argument and the next one:
    -                del compiler_so[index:index+2]
    -            except ValueError:
    -                break
    -
    -    # Check if the SDK that is used during compilation actually exists,
    -    # the universal build requires the usage of a universal SDK and not all
    -    # users have that installed by default.
    -    sysroot = None
    -    if '-isysroot' in cc_args:
    -        idx = cc_args.index('-isysroot')
    -        sysroot = cc_args[idx+1]
    -    elif '-isysroot' in compiler_so:
    -        idx = compiler_so.index('-isysroot')
    -        sysroot = compiler_so[idx+1]
    -
    -    if sysroot and not os.path.isdir(sysroot):
    -        from distutils import log
    -        log.warn("Compiling with an SDK that doesn't seem to exist: %s",
    -                sysroot)
    -        log.warn("Please check your Xcode installation")
    -
    -    return compiler_so
    -
    -
    -def customize_config_vars(_config_vars):
    -    """Customize Python build configuration variables.
    -
    -    Called internally from sysconfig with a mutable mapping
    -    containing name/value pairs parsed from the configured
    -    makefile used to build this interpreter.  Returns
    -    the mapping updated as needed to reflect the environment
    -    in which the interpreter is running; in the case of
    -    a Python from a binary installer, the installed
    -    environment may be very different from the build
    -    environment, i.e. different OS levels, different
    -    built tools, different available CPU architectures.
    -
    -    This customization is performed whenever
    -    distutils.sysconfig.get_config_vars() is first
    -    called.  It may be used in environments where no
    -    compilers are present, i.e. when installing pure
    -    Python dists.  Customization of compiler paths
    -    and detection of unavailable archs is deferred
    -    until the first extension module build is
    -    requested (in distutils.sysconfig.customize_compiler).
    -
    -    Currently called from distutils.sysconfig
    -    """
    -
    -    if not _supports_universal_builds():
    -        # On Mac OS X before 10.4, check if -arch and -isysroot
    -        # are in CFLAGS or LDFLAGS and remove them if they are.
    -        # This is needed when building extensions on a 10.3 system
    -        # using a universal build of python.
    -        _remove_universal_flags(_config_vars)
    -
    -    # Allow user to override all archs with ARCHFLAGS env var
    -    _override_all_archs(_config_vars)
    -
    -    # Remove references to sdks that are not found
    -    _check_for_unavailable_sdk(_config_vars)
    -
    -    return _config_vars
    -
    -
    -def customize_compiler(_config_vars):
    -    """Customize compiler path and configuration variables.
    -
    -    This customization is performed when the first
    -    extension module build is requested
    -    in distutils.sysconfig.customize_compiler).
    -    """
    -
    -    # Find a compiler to use for extension module builds
    -    _find_appropriate_compiler(_config_vars)
    -
    -    # Remove ppc arch flags if not supported here
    -    _remove_unsupported_archs(_config_vars)
    -
    -    # Allow user to override all archs with ARCHFLAGS env var
    -    _override_all_archs(_config_vars)
    -
    -    return _config_vars
    -
    -
    -def get_platform_osx(_config_vars, osname, release, machine):
    -    """Filter values for get_platform()"""
    -    # called from get_platform() in sysconfig and distutils.util
    -    #
    -    # For our purposes, we'll assume that the system version from
    -    # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
    -    # to. This makes the compatibility story a bit more sane because the
    -    # machine is going to compile and link as if it were
    -    # MACOSX_DEPLOYMENT_TARGET.
    -
    -    macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
    -    macrelease = _get_system_version() or macver
    -    macver = macver or macrelease
    -
    -    if macver:
    -        release = macver
    -        osname = "macosx"
    -
    -        # Use the original CFLAGS value, if available, so that we
    -        # return the same machine type for the platform string.
    -        # Otherwise, distutils may consider this a cross-compiling
    -        # case and disallow installs.
    -        cflags = _config_vars.get(_INITPRE+'CFLAGS',
    -                                    _config_vars.get('CFLAGS', ''))
    -        if macrelease:
    -            try:
    -                macrelease = tuple(int(i) for i in macrelease.split('.')[0:2])
    -            except ValueError:
    -                macrelease = (10, 0)
    -        else:
    -            # assume no universal support
    -            macrelease = (10, 0)
    -
    -        if (macrelease >= (10, 4)) and '-arch' in cflags.strip():
    -            # The universal build will build fat binaries, but not on
    -            # systems before 10.4
    -
    -            machine = 'fat'
    -
    -            archs = re.findall(r'-arch\s+(\S+)', cflags)
    -            archs = tuple(sorted(set(archs)))
    -
    -            if len(archs) == 1:
    -                machine = archs[0]
    -            elif archs == ('i386', 'ppc'):
    -                machine = 'fat'
    -            elif archs == ('i386', 'x86_64'):
    -                machine = 'intel'
    -            elif archs == ('i386', 'ppc', 'x86_64'):
    -                machine = 'fat3'
    -            elif archs == ('ppc64', 'x86_64'):
    -                machine = 'fat64'
    -            elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
    -                machine = 'universal'
    -            else:
    -                raise ValueError(
    -                   "Don't know machine value for archs=%r" % (archs,))
    -
    -        elif machine == 'i386':
    -            # On OSX the machine type returned by uname is always the
    -            # 32-bit variant, even if the executable architecture is
    -            # the 64-bit variant
    -            if sys.maxsize >= 2**32:
    -                machine = 'x86_64'
    -
    -        elif machine in ('PowerPC', 'Power_Macintosh'):
    -            # Pick a sane name for the PPC architecture.
    -            # See 'i386' case
    -            if sys.maxsize >= 2**32:
    -                machine = 'ppc64'
    -            else:
    -                machine = 'ppc'
    -
    -    return (osname, release, machine)
    diff --git a/lib-python/3/test/crashers/trace_at_recursion_limit.py b/lib-python/3/test/crashers/trace_at_recursion_limit.py
    deleted file mode 100644
    --- a/lib-python/3/test/crashers/trace_at_recursion_limit.py
    +++ /dev/null
    @@ -1,27 +0,0 @@
    -"""
    -From http://bugs.python.org/issue6717
    -
    -A misbehaving trace hook can trigger a segfault by exceeding the recursion
    -limit.
    -"""
    -import sys
    -
    -
    -def x():
    -    pass
    -
    -def g(*args):
    -    if True: # change to True to crash interpreter
    -        try:
    -            x()
    -        except:
    -            pass
    -    return g
    -
    -def f():
    -    print(sys.getrecursionlimit())
    -    f()
    -
    -sys.settrace(g)
    -
    -f()
    diff --git a/lib-python/3/test/json_tests/test_tool.py b/lib-python/3/test/json_tests/test_tool.py
    deleted file mode 100644
    --- a/lib-python/3/test/json_tests/test_tool.py
    +++ /dev/null
    @@ -1,69 +0,0 @@
    -import os
    -import sys
    -import textwrap
    -import unittest
    -import subprocess
    -from test import support
    -from test.script_helper import assert_python_ok
    -
    -class TestTool(unittest.TestCase):
    -    data = """
    -
    -        [["blorpie"],[ "whoops" ] , [
    -                                 ],\t"d-shtaeou",\r"d-nthiouh",
    -        "i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field"
    -            :"yes"}  ]
    -           """
    -
    -    expect = textwrap.dedent("""\
    -    [
    -        [
    -            "blorpie"
    -        ],
    -        [
    -            "whoops"
    -        ],
    -        [],
    -        "d-shtaeou",
    -        "d-nthiouh",
    -        "i-vhbjkhnth",
    -        {
    -            "nifty": 87
    -        },
    -        {
    -            "field": "yes",
    -            "morefield": false
    -        }
    -    ]
    -    """)
    -
    -    def test_stdin_stdout(self):
    -        with subprocess.Popen(
    -                (sys.executable, '-m', 'json.tool'),
    -                stdin=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
    -            out, err = proc.communicate(self.data.encode())
    -        self.assertEqual(out.splitlines(), self.expect.encode().splitlines())
    -        self.assertEqual(err, None)
    -
    -    def _create_infile(self):
    -        infile = support.TESTFN
    -        with open(infile, "w") as fp:
    -            self.addCleanup(os.remove, infile)
    -            fp.write(self.data)
    -        return infile
    -
    -    def test_infile_stdout(self):
    -        infile = self._create_infile()
    -        rc, out, err = assert_python_ok('-m', 'json.tool', infile)
    -        self.assertEqual(out.splitlines(), self.expect.encode().splitlines())
    -        self.assertEqual(err, b'')
    -
    -    def test_infile_outfile(self):
    -        infile = self._create_infile()
    -        outfile = support.TESTFN + '.out'
    -        rc, out, err = assert_python_ok('-m', 'json.tool', infile, outfile)
    -        self.addCleanup(os.remove, outfile)
    -        with open(outfile, "r") as fp:
    -            self.assertEqual(fp.read(), self.expect)
    -        self.assertEqual(out, b'')
    -        self.assertEqual(err, b'')
    diff --git a/lib-python/3/test/mp_fork_bomb.py b/lib-python/3/test/mp_fork_bomb.py
    deleted file mode 100644
    --- a/lib-python/3/test/mp_fork_bomb.py
    +++ /dev/null
    @@ -1,18 +0,0 @@
    -import multiprocessing, sys
    -
    -def foo():
    -    print("123")
    -
    -# Because "if __name__ == '__main__'" is missing this will not work
    -# correctly on Windows.  However, we should get a RuntimeError rather
    -# than the Windows equivalent of a fork bomb.
    -
    -if len(sys.argv) > 1:
    -    multiprocessing.set_start_method(sys.argv[1])
    -else:
    -    multiprocessing.set_start_method('spawn')
    -
    -p = multiprocessing.Process(target=foo)
    -p.start()
    -p.join()
    -sys.exit(p.exitcode)
    diff --git a/lib-python/3/test/sample_doctest_no_docstrings.py b/lib-python/3/test/sample_doctest_no_docstrings.py
    deleted file mode 100644
    --- a/lib-python/3/test/sample_doctest_no_docstrings.py
    +++ /dev/null
    @@ -1,12 +0,0 @@
    -# This is a sample module used for testing doctest.
    -#
    -# This module is for testing how doctest handles a module with no
    -# docstrings.
    -
    -
    -class Foo(object):
    -
    -    # A class with no docstring.
    -
    -    def __init__(self):
    -        pass
    diff --git a/lib-python/3/test/sample_doctest_no_doctests.py b/lib-python/3/test/sample_doctest_no_doctests.py
    deleted file mode 100644
    --- a/lib-python/3/test/sample_doctest_no_doctests.py
    +++ /dev/null
    @@ -1,15 +0,0 @@
    -"""This is a sample module used for testing doctest.
    -
    -This module is for testing how doctest handles a module with docstrings
    -but no doctest examples.
    -
    -"""
    -
    -
    -class Foo(object):
    -    """A docstring with no doctest examples.
    -
    -    """
    -
    -    def __init__(self):
    -        pass
    diff --git a/lib-python/3/test/test__osx_support.py b/lib-python/3/test/test__osx_support.py
    deleted file mode 100644
    --- a/lib-python/3/test/test__osx_support.py
    +++ /dev/null
    @@ -1,276 +0,0 @@
    -"""
    -Test suite for _osx_support: shared OS X support functions.
    -"""
    -
    -import os
    -import platform
    -import stat
    -import sys
    -import unittest
    -
    -import test.support
    -
    -import _osx_support
    -
    - at unittest.skipUnless(sys.platform.startswith("darwin"), "requires OS X")
    -class Test_OSXSupport(unittest.TestCase):
    -
    -    def setUp(self):
    -        self.maxDiff = None
    -        self.prog_name = 'bogus_program_xxxx'
    -        self.temp_path_dir = os.path.abspath(os.getcwd())
    -        self.env = test.support.EnvironmentVarGuard()
    -        self.addCleanup(self.env.__exit__)
    -        for cv in ('CFLAGS', 'LDFLAGS', 'CPPFLAGS',
    -                            'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC',
    -                            'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
    -                            'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS'):
    -            if cv in self.env:
    -                self.env.unset(cv)
    -
    -    def add_expected_saved_initial_values(self, config_vars, expected_vars):
    -        # Ensure that the initial values for all modified config vars
    -        # are also saved with modified keys.
    -        expected_vars.update(('_OSX_SUPPORT_INITIAL_'+ k,
    -                config_vars[k]) for k in config_vars
    -                    if config_vars[k] != expected_vars[k])
    -
    -    def test__find_executable(self):
    -        if self.env['PATH']:
    -            self.env['PATH'] = self.env['PATH'] + ':'
    -        self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
    -        test.support.unlink(self.prog_name)
    -        self.assertIsNone(_osx_support._find_executable(self.prog_name))
    -        self.addCleanup(test.support.unlink, self.prog_name)
    -        with open(self.prog_name, 'w') as f:
    -            f.write("#!/bin/sh\n/bin/echo OK\n")
    -        os.chmod(self.prog_name, stat.S_IRWXU)
    -        self.assertEqual(self.prog_name,
    -                            _osx_support._find_executable(self.prog_name))
    -
    -    def test__read_output(self):
    -        if self.env['PATH']:
    -            self.env['PATH'] = self.env['PATH'] + ':'
    -        self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
    -        test.support.unlink(self.prog_name)
    -        self.addCleanup(test.support.unlink, self.prog_name)
    -        with open(self.prog_name, 'w') as f:
    -            f.write("#!/bin/sh\n/bin/echo ExpectedOutput\n")
    -        os.chmod(self.prog_name, stat.S_IRWXU)
    -        self.assertEqual('ExpectedOutput',
    -                            _osx_support._read_output(self.prog_name))
    -
    -    def test__find_build_tool(self):
    -        out = _osx_support._find_build_tool('cc')
    -        self.assertTrue(os.path.isfile(out),
    -                            'cc not found - check xcode-select')
    -
    -    def test__get_system_version(self):
    -        self.assertTrue(platform.mac_ver()[0].startswith(
    -                                    _osx_support._get_system_version()))
    -
    -    def test__remove_original_values(self):
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        }
    -        expected_vars = {
    -        'CC': 'clang -pthreads',
    -        }
    -        cv = 'CC'
    -        newvalue = 'clang -pthreads'
    -        _osx_support._save_modified_value(config_vars, cv, newvalue)
    -        self.assertNotEqual(expected_vars, config_vars)
    -        _osx_support._remove_original_values(config_vars)
    -        self.assertEqual(expected_vars, config_vars)
    -
    -    def test__save_modified_value(self):
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        }
    -        expected_vars = {
    -        'CC': 'clang -pthreads',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -        cv = 'CC'
    -        newvalue = 'clang -pthreads'
    -        _osx_support._save_modified_value(config_vars, cv, newvalue)
    -        self.assertEqual(expected_vars, config_vars)
    -
    -    def test__save_modified_value_unchanged(self):
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        }
    -        expected_vars = config_vars.copy()
    -        cv = 'CC'
    -        newvalue = 'gcc-test -pthreads'
    -        _osx_support._save_modified_value(config_vars, cv, newvalue)
    -        self.assertEqual(expected_vars, config_vars)
    -
    -    def test__supports_universal_builds(self):
    -        import platform
    -        mac_ver_tuple = tuple(int(i) for i in
    -                            platform.mac_ver()[0].split('.')[0:2])
    -        self.assertEqual(mac_ver_tuple >= (10, 4),
    -                            _osx_support._supports_universal_builds())
    -
    -    def test__find_appropriate_compiler(self):
    -        compilers = (
    -                        ('gcc-test', 'i686-apple-darwin11-llvm-gcc-4.2'),
    -                        ('clang', 'clang version 3.1'),
    -                    )
    -        config_vars = {
    -        'CC': 'gcc-test -pthreads',
    -        'CXX': 'cc++-test',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-test -bundle -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-test -bundle -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang -pthreads',
    -        'CXX': 'clang++',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'clang -bundle -arch ppc -arch i386 -g',
    -        'LDSHARED': 'clang -bundle -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
    -        self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
    -        for c_name, c_output in compilers:
    -            test.support.unlink(c_name)
    -            self.addCleanup(test.support.unlink, c_name)
    -            with open(c_name, 'w') as f:
    -                f.write("#!/bin/sh\n/bin/echo " + c_output)
    -            os.chmod(c_name, stat.S_IRWXU)
    -        self.assertEqual(expected_vars,
    -                            _osx_support._find_appropriate_compiler(
    -                                    config_vars))
    -
    -    def test__remove_universal_flags(self):
    -        config_vars = {
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3    ',
    -        'LDFLAGS': '    -g',
    -        'CPPFLAGS': '-I.  ',
    -        'BLDSHARED': 'gcc-4.0 -bundle    -g',
    -        'LDSHARED': 'gcc-4.0 -bundle      -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        self.assertEqual(expected_vars,
    -                            _osx_support._remove_universal_flags(
    -                                    config_vars))
    -
    -    def test__remove_unsupported_archs(self):
    -        config_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3  -arch i386  ',
    -        'LDFLAGS': ' -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle   -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle   -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
    -        self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
    -        c_name = 'clang'
    -        test.support.unlink(c_name)
    -        self.addCleanup(test.support.unlink, c_name)
    -        # exit status 255 means no PPC support in this compiler chain
    -        with open(c_name, 'w') as f:
    -            f.write("#!/bin/sh\nexit 255")
    -        os.chmod(c_name, stat.S_IRWXU)
    -        self.assertEqual(expected_vars,
    -                            _osx_support._remove_unsupported_archs(
    -                                    config_vars))
    -
    -    def test__override_all_archs(self):
    -        self.env['ARCHFLAGS'] = '-arch x86_64'
    -        config_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3     -arch x86_64',
    -        'LDFLAGS': '    -g -arch x86_64',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle    -g -arch x86_64',
    -        'LDSHARED': 'gcc-4.0 -bundle   -isysroot '
    -                        '/Developer/SDKs/MacOSX10.4u.sdk -g -arch x86_64',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        self.assertEqual(expected_vars,
    -                            _osx_support._override_all_archs(
    -                                    config_vars))
    -
    -    def test__check_for_unavailable_sdk(self):
    -        config_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
    -                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.1.sdk',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk -g',
    -        }
    -        expected_vars = {
    -        'CC': 'clang',
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
    -                        ' ',
    -        'LDFLAGS': '-arch ppc -arch i386   -g',
    -        'CPPFLAGS': '-I.  ',
    -        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
    -        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
    -                        ' -g',
    -        }
    -        self.add_expected_saved_initial_values(config_vars, expected_vars)
    -
    -        self.assertEqual(expected_vars,
    -                            _osx_support._check_for_unavailable_sdk(
    -                                    config_vars))
    -
    -    def test_get_platform_osx(self):
    -        # Note, get_platform_osx is currently tested more extensively
    -        # indirectly by test_sysconfig and test_distutils
    -        config_vars = {
    -        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
    -                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk',
    -        'MACOSX_DEPLOYMENT_TARGET': '10.6',
    -        }
    -        result = _osx_support.get_platform_osx(config_vars, ' ', ' ', ' ')
    -        self.assertEqual(('macosx', '10.6', 'fat'), result)
    -
    -if __name__ == "__main__":
    -    unittest.main()
    diff --git a/lib-python/3/test/test_file_eintr.py b/lib-python/3/test/test_file_eintr.py
    deleted file mode 100644
    --- a/lib-python/3/test/test_file_eintr.py
    +++ /dev/null
    @@ -1,252 +0,0 @@
    -# Written to test interrupted system calls interfering with our many buffered
    -# IO implementations.  http://bugs.python.org/issue12268
    -#
    -# It was suggested that this code could be merged into test_io and the tests
    -# made to work using the same method as the existing signal tests in test_io.
    -# I was unable to get single process tests using alarm or setitimer that way
    -# to reproduce the EINTR problems.  This process based test suite reproduces
    -# the problems prior to the issue12268 patch reliably on Linux and OSX.
    -#  - gregory.p.smith
    -
    -import os
    -import select
    -import signal
    -import subprocess
    -import sys
    -import time
    -import unittest
    -
    -# Test import all of the things we're about to try testing up front.
    -import _io
    -import _pyio
    -
    -
    - at unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.')
    -class TestFileIOSignalInterrupt:
    -    def setUp(self):
    -        self._process = None
    -
    -    def tearDown(self):
    -        if self._process and self._process.poll() is None:
    -            try:
    -                self._process.kill()
    -            except OSError:
    -                pass
    -
    -    def _generate_infile_setup_code(self):
    -        """Returns the infile = ... line of code for the reader process.
    -
    -        subclasseses should override this to test different IO objects.
    -        """
    -        return ('import %s as io ;'
    -                'infile = io.FileIO(sys.stdin.fileno(), "rb")' %
    -                self.modname)
    -
    -    def fail_with_process_info(self, why, stdout=b'', stderr=b'',
    -                               communicate=True):
    -        """A common way to cleanup and fail with useful debug output.
    -
    -        Kills the process if it is still running, collects remaining output
    -        and fails the test with an error message including the output.
    -
    -        Args:
    -            why: Text to go after "Error from IO process" in the message.
    -            stdout, stderr: standard output and error from the process so
    -                far to include in the error message.
    -            communicate: bool, when True we call communicate() on the process
    -                after killing it to gather additional output.
    -        """
    -        if self._process.poll() is None:
    -            time.sleep(0.1)  # give it time to finish printing the error.
    -            try:
    -                self._process.terminate()  # Ensure it dies.
    -            except OSError:
    -                pass
    -        if communicate:
    -            stdout_end, stderr_end = self._process.communicate()
    -            stdout += stdout_end
    -            stderr += stderr_end
    -        self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' %
    -                  (why, stdout.decode(), stderr.decode()))
    -
    -    def _test_reading(self, data_to_write, read_and_verify_code):
    -        """Generic buffered read method test harness to validate EINTR behavior.
    -
    -        Also validates that Python signal handlers are run during the read.
    -
    -        Args:
    -            data_to_write: String to write to the child process for reading
    -                before sending it a signal, confirming the signal was handled,
    -                writing a final newline and closing the infile pipe.
    -            read_and_verify_code: Single "line" of code to read from a file
    -                object named 'infile' and validate the result.  This will be
    -                executed as part of a python subprocess fed data_to_write.
    -        """
    -        infile_setup_code = self._generate_infile_setup_code()
    -        # Total pipe IO in this function is smaller than the minimum posix OS
    -        # pipe buffer size of 512 bytes.  No writer should block.
    -        assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.'
    -
    -        # Start a subprocess to call our read method while handling a signal.
    -        self._process = subprocess.Popen(
    -                [sys.executable, '-u', '-c',
    -                 'import signal, sys ;'
    -                 'signal.signal(signal.SIGINT, '
    -                               'lambda s, f: sys.stderr.write("$\\n")) ;'
    -                 + infile_setup_code + ' ;' +
    -                 'sys.stderr.write("Worm Sign!\\n") ;'
    -                 + read_and_verify_code + ' ;' +
    -                 'infile.close()'
    -                ],
    -                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
    -                stderr=subprocess.PIPE)
    -
    -        # Wait for the signal handler to be installed.
    -        worm_sign = self._process.stderr.read(len(b'Worm Sign!\n'))
    -        if worm_sign != b'Worm Sign!\n':  # See also, Dune by Frank Herbert.
    -            self.fail_with_process_info('while awaiting a sign',
    -                                        stderr=worm_sign)
    -        self._process.stdin.write(data_to_write)
    -
    -        signals_sent = 0
    -        rlist = []
    -        # We don't know when the read_and_verify_code in our child is actually
    -        # executing within the read system call we want to interrupt.  This
    -        # loop waits for a bit before sending the first signal to increase
    -        # the likelihood of that.  Implementations without correct EINTR
    -        # and signal handling usually fail this test.
    -        while not rlist:
    -            rlist, _, _ = select.select([self._process.stderr], (), (), 0.05)
    -            self._process.send_signal(signal.SIGINT)
    -            signals_sent += 1
    -            if signals_sent > 200:
    -                self._process.kill()
    -                self.fail('reader process failed to handle our signals.')
    -        # This assumes anything unexpected that writes to stderr will also
    -        # write a newline.  That is true of the traceback printing code.
    -        signal_line = self._process.stderr.readline()
    -        if signal_line != b'$\n':
    -            self.fail_with_process_info('while awaiting signal',
    -                                        stderr=signal_line)
    -
    -        # We append a newline to our input so that a readline call can
    -        # end on its own before the EOF is seen and so that we're testing
    -        # the read call that was interrupted by a signal before the end of
    -        # the data stream has been reached.
    -        stdout, stderr = self._process.communicate(input=b'\n')
    -        if self._process.returncode:
    -            self.fail_with_process_info(
    -                    'exited rc=%d' % self._process.returncode,
    -                    stdout, stderr, communicate=False)
    -        # PASS!
    -
    -    # String format for the read_and_verify_code used by read methods.
    -    _READING_CODE_TEMPLATE = (
    -            'got = infile.{read_method_name}() ;'
    -            'expected = {expected!r} ;'
    -            'assert got == expected, ('
    -                    '"{read_method_name} returned wrong data.\\n"'
    -                    '"got data %r\\nexpected %r" % (got, expected))'
    -            )
    -
    -    def test_readline(self):
    -        """readline() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello, world!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readline',
    -                        expected=b'hello, world!\n'))
    -
    -    def test_readlines(self):
    -        """readlines() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readlines',
    -                        expected=[b'hello\n', b'world!\n']))
    -
    -    def test_readall(self):
    -        """readall() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readall',
    -                        expected=b'hello\nworld!\n'))
    -        # read() is the same thing as readall().
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='read',
    -                        expected=b'hello\nworld!\n'))
    -
    -
    -class CTestFileIOSignalInterrupt(TestFileIOSignalInterrupt, unittest.TestCase):
    -    modname = '_io'
    -
    -class PyTestFileIOSignalInterrupt(TestFileIOSignalInterrupt, unittest.TestCase):
    -    modname = '_pyio'
    -
    -
    -class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt):
    -    def _generate_infile_setup_code(self):
    -        """Returns the infile = ... line of code to make a BufferedReader."""
    -        return ('import %s as io ;infile = io.open(sys.stdin.fileno(), "rb") ;'
    -                'assert isinstance(infile, io.BufferedReader)' %
    -                self.modname)
    -
    -    def test_readall(self):
    -        """BufferedReader.read() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='read',
    -                        expected=b'hello\nworld!\n'))
    -
    -class CTestBufferedIOSignalInterrupt(TestBufferedIOSignalInterrupt, unittest.TestCase):
    -    modname = '_io'
    -
    -class PyTestBufferedIOSignalInterrupt(TestBufferedIOSignalInterrupt, unittest.TestCase):
    -    modname = '_pyio'
    -
    -
    -class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt):
    -    def _generate_infile_setup_code(self):
    -        """Returns the infile = ... line of code to make a TextIOWrapper."""
    -        return ('import %s as io ;'
    -                'infile = io.open(sys.stdin.fileno(), "rt", newline=None) ;'
    -                'assert isinstance(infile, io.TextIOWrapper)' %
    -                self.modname)
    -
    -    def test_readline(self):
    -        """readline() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello, world!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readline',
    -                        expected='hello, world!\n'))
    -
    -    def test_readlines(self):
    -        """readlines() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\r\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='readlines',
    -                        expected=['hello\n', 'world!\n']))
    -
    -    def test_readall(self):
    -        """read() must handle signals and not lose data."""
    -        self._test_reading(
    -                data_to_write=b'hello\nworld!',
    -                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
    -                        read_method_name='read',
    -                        expected="hello\nworld!\n"))
    -
    -class CTestTextIOSignalInterrupt(TestTextIOSignalInterrupt, unittest.TestCase):
    -    modname = '_io'
    -
    -class PyTestTextIOSignalInterrupt(TestTextIOSignalInterrupt, unittest.TestCase):
    -    modname = '_pyio'
    -
    -
    -if __name__ == '__main__':
    -    unittest.main()
    diff --git a/lib-python/3/tkinter/test/test_tkinter/test_misc.py b/lib-python/3/tkinter/test/test_tkinter/test_misc.py
    deleted file mode 100644
    --- a/lib-python/3/tkinter/test/test_tkinter/test_misc.py
    +++ /dev/null
    @@ -1,163 +0,0 @@
    -import unittest
    -import tkinter
    -from test import support
    -from tkinter.test.support import AbstractTkTest
    -
    -support.requires('gui')
    -
    -class MiscTest(AbstractTkTest, unittest.TestCase):
    -
    -    def test_repr(self):
    -        t = tkinter.Toplevel(self.root, name='top')
    -        f = tkinter.Frame(t, name='child')
    -        self.assertEqual(repr(f), '')
    -
    -    def test_generated_names(self):
    -        t = tkinter.Toplevel(self.root)
    -        f = tkinter.Frame(t)
    -        f2 = tkinter.Frame(t)
    -        b = tkinter.Button(f2)
    -        for name in str(b).split('.'):
    -            self.assertFalse(name.isidentifier(), msg=repr(name))
    -
    -    def test_tk_setPalette(self):
    -        root = self.root
    -        root.tk_setPalette('black')
    -        self.assertEqual(root['background'], 'black')
    -        root.tk_setPalette('white')
    -        self.assertEqual(root['background'], 'white')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^unknown color name "spam"$',
    -                root.tk_setPalette, 'spam')
    -
    -        root.tk_setPalette(background='black')
    -        self.assertEqual(root['background'], 'black')
    -        root.tk_setPalette(background='blue', highlightColor='yellow')
    -        self.assertEqual(root['background'], 'blue')
    -        self.assertEqual(root['highlightcolor'], 'yellow')
    -        root.tk_setPalette(background='yellow', highlightColor='blue')
    -        self.assertEqual(root['background'], 'yellow')
    -        self.assertEqual(root['highlightcolor'], 'blue')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^unknown color name "spam"$',
    -                root.tk_setPalette, background='spam')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^must specify a background color$',
    -                root.tk_setPalette, spam='white')
    -        self.assertRaisesRegex(tkinter.TclError,
    -                '^must specify a background color$',
    -                root.tk_setPalette, highlightColor='blue')
    -
    -    def test_after(self):
    -        root = self.root
    -
    -        def callback(start=0, step=1):
    -            nonlocal count
    -            count = start + step
    -
    -        # Without function, sleeps for ms.
    -        self.assertIsNone(root.after(1))
    -
    -        # Set up with callback with no args.
    -        count = 0
    -        timer1 = root.after(0, callback)
    -        self.assertIn(timer1, root.tk.call('after', 'info'))
    -        (script, _) = root.tk.splitlist(root.tk.call('after', 'info', timer1))
    -        root.update()  # Process all pending events.
    -        self.assertEqual(count, 1)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call(script)
    -
    -        # Set up with callback with args.
    -        count = 0
    -        timer1 = root.after(0, callback, 42, 11)
    -        root.update()  # Process all pending events.
    -        self.assertEqual(count, 53)
    -
    -        # Cancel before called.
    -        timer1 = root.after(1000, callback)
    -        self.assertIn(timer1, root.tk.call('after', 'info'))
    -        (script, _) = root.tk.splitlist(root.tk.call('after', 'info', timer1))
    -        root.after_cancel(timer1)  # Cancel this event.
    -        self.assertEqual(count, 53)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call(script)
    -
    -    def test_after_idle(self):
    -        root = self.root
    -
    -        def callback(start=0, step=1):
    -            nonlocal count
    -            count = start + step
    -
    -        # Set up with callback with no args.
    -        count = 0
    -        idle1 = root.after_idle(callback)
    -        self.assertIn(idle1, root.tk.call('after', 'info'))
    -        (script, _) = root.tk.splitlist(root.tk.call('after', 'info', idle1))
    -        root.update_idletasks()  # Process all pending events.
    -        self.assertEqual(count, 1)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call(script)
    -
    -        # Set up with callback with args.
    -        count = 0
    -        idle1 = root.after_idle(callback, 42, 11)
    -        root.update_idletasks()  # Process all pending events.
    -        self.assertEqual(count, 53)
    -
    -        # Cancel before called.
    -        idle1 = root.after_idle(callback)
    -        self.assertIn(idle1, root.tk.call('after', 'info'))
    -        (script, _) = root.tk.splitlist(root.tk.call('after', 'info', idle1))
    -        root.after_cancel(idle1)  # Cancel this event.
    -        self.assertEqual(count, 53)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call(script)
    -
    -    def test_after_cancel(self):
    -        root = self.root
    -
    -        def callback():
    -            nonlocal count
    -            count += 1
    -
    -        timer1 = root.after(5000, callback)
    -        idle1 = root.after_idle(callback)
    -
    -        # No value for id raises a ValueError.
    -        with self.assertRaises(ValueError):
    -            root.after_cancel(None)
    -
    -        # Cancel timer event.
    -        count = 0
    -        (script, _) = root.tk.splitlist(root.tk.call('after', 'info', timer1))
    -        root.tk.call(script)
    -        self.assertEqual(count, 1)
    -        root.after_cancel(timer1)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call(script)
    -        self.assertEqual(count, 1)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call('after', 'info', timer1)
    -
    -        # Cancel same event - nothing happens.
    -        root.after_cancel(timer1)
    -
    -        # Cancel idle event.
    -        count = 0
    -        (script, _) = root.tk.splitlist(root.tk.call('after', 'info', idle1))
    -        root.tk.call(script)
    -        self.assertEqual(count, 1)
    -        root.after_cancel(idle1)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call(script)
    -        self.assertEqual(count, 1)
    -        with self.assertRaises(tkinter.TclError):
    -            root.tk.call('after', 'info', idle1)
    -
    -
    -tests_gui = (MiscTest, )
    -
    -if __name__ == "__main__":
    -    support.run_unittest(*tests_gui)
    
    From pypy.commits at gmail.com  Mon Aug 26 11:27:53 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 08:27:53 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge sandbox-2
    Message-ID: <5d63fa79.1c69fb81.7a302.38d8@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97274:842a1a1386b0
    Date: 2019-08-26 17:24 +0200
    http://bitbucket.org/pypy/pypy/changeset/842a1a1386b0/
    
    Log:	hg merge sandbox-2
    
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -920,9 +920,11 @@
             w_array._charbuf_stop()
             return char
     
    +    @sandbox_review(reviewed=True)
         def setitem(self, index, char):
             w_array = self.w_array
             data = w_array._charbuf_start()
    +        assert 0 <= index < w_array.len
             data[index] = char
             w_array._charbuf_stop()
     
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -1482,6 +1482,7 @@
             calling_conv='win')
     
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('utime')
     @specialize.argtype(0, 1)
     def utime(path, times):
    @@ -1540,11 +1541,13 @@
                 lltype.free(atime, flavor='raw')
                 lltype.free(mtime, flavor='raw')
     
    + at sandbox_review(check_caller=True)
     def times_to_timeval2p(times, l_timeval2p):
         actime, modtime = times
         _time_to_timeval(actime, l_timeval2p[0])
         _time_to_timeval(modtime, l_timeval2p[1])
     
    + at sandbox_review(check_caller=True)
     def _time_to_timeval(t, l_timeval):
         import math
         fracpart, intpart = math.modf(t)
    @@ -2255,6 +2258,7 @@
             [rffi.CCHARP, TIMEVAL2P], rffi.INT,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(1)
         def lutimes(pathname, times):
             if times is None:
    @@ -2270,6 +2274,7 @@
             [rffi.INT, TIMEVAL2P], rffi.INT,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         @specialize.argtype(1)
         def futimes(fd, times):
             if times is None:
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -1450,6 +1450,7 @@
     # NOTE: This is not a weak key dictionary, thus keeping a lot of stuff alive.
     TEST_RAW_ADDR_KEEP_ALIVE = {}
     
    + at sandbox_review(reviewed=True)
     @jit.dont_look_inside
     def get_raw_address_of_string(string):
         """Returns a 'char *' that is valid as long as the rpython string object is alive.
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -21,7 +21,8 @@
         'malloc', 'malloc_varsize', 'free',
         'getfield', 'getarrayitem', 'getinteriorfield', 'raw_load',
         'cast_opaque_ptr', 'cast_ptr_to_int',
    -    'gc_thread_run', 'gc_stack_bottom', 'gc_thread_after_fork',
    +    'gc_thread_run', 'gc_stack_bottom',
    +    'gc_thread_before_fork', 'gc_thread_after_fork',
         'shrink_array', 'gc_pin', 'gc_unpin', 'gc_can_move', 'gc_id',
         'gc_identityhash', 'weakref_create', 'weakref_deref',
         'gc_fq_register', 'gc_fq_next_dead',
    @@ -103,8 +104,6 @@
     
                 elif opname in ('cast_ptr_to_adr', 'force_cast',
                                 'cast_int_to_ptr'):
    -                if is_gc_ptr(op.args[0].concretetype):
    -                    return "argument is a GC ptr: %r" % (opname,)
                     if is_gc_ptr(op.result.concretetype):
                         return "result is a GC ptr: %r" % (opname,)
     
    
    From pypy.commits at gmail.com  Mon Aug 26 12:06:22 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 09:06:22 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: With either the sandbox or the
     reverse-debugger, the hash used
    Message-ID: <5d64037e.1c69fb81.2e807.f73e@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97275:6bb742c50a9f
    Date: 2019-08-26 17:53 +0200
    http://bitbucket.org/pypy/pypy/changeset/6bb742c50a9f/
    
    Log:	With either the sandbox or the reverse-debugger, the hash used in
    	pypy3 needs to be the old 'fnv' hash instead of 'siphash24'. Make it
    	explicit in this way instead of ignoring the call to
    	enable_siphash24() and reporting wrong data in module.sys.vm.
    
    diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
    --- a/pypy/config/pypyoption.py
    +++ b/pypy/config/pypyoption.py
    @@ -267,6 +267,9 @@
         if level == 'jit':
             pass # none at the moment
     
    +    if config.translation.sandbox or config.translation.reverse_debugger:
    +        config.objspace.hash = "fnv"
    +
     
     def enable_allworkingmodules(config):
         modules = working_modules.copy()
    
    From pypy.commits at gmail.com  Mon Aug 26 12:06:24 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 09:06:24 -0700 (PDT)
    Subject: [pypy-commit] pypy default: Forbid calls to enable_siphash24() in
     sandbox or revdb mode
    Message-ID: <5d640380.1c69fb81.1b631.3aa5@mx.google.com>
    
    Author: Armin Rigo 
    Branch: 
    Changeset: r97276:56e99531241f
    Date: 2019-08-26 17:57 +0200
    http://bitbucket.org/pypy/pypy/changeset/56e99531241f/
    
    Log:	Forbid calls to enable_siphash24() in sandbox or revdb mode
    
    diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py
    --- a/rpython/rlib/rsiphash.py
    +++ b/rpython/rlib/rsiphash.py
    @@ -123,8 +123,11 @@
     
         def compute_result_annotation(self):
             translator = self.bookkeeper.annotator.translator
    -        if translator.config.translation.reverse_debugger:
    -            return    # ignore and use the regular hash, with reverse-debugger
    +        # you should not call enable_siphash24() when translating with the
    +        # reverse-debugger, or with sandbox.
    +        assert not translator.config.translation.reverse_debugger
    +        assert not translator.config.translation.sandbox
    +        #
             if hasattr(translator, 'll_hash_string'):
                 assert translator.ll_hash_string == ll_hash_string_siphash24
             else:
    
    From pypy.commits at gmail.com  Mon Aug 26 12:06:26 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 09:06:26 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: fixes
    Message-ID: <5d640382.1c69fb81.aaea0.c452@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97277:fc77f4cd4352
    Date: 2019-08-26 18:04 +0200
    http://bitbucket.org/pypy/pypy/changeset/fc77f4cd4352/
    
    Log:	fixes
    
    diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
    --- a/pypy/config/pypyoption.py
    +++ b/pypy/config/pypyoption.py
    @@ -273,6 +273,9 @@
         if level == 'jit':
             pass # none at the moment
     
    +    if config.translation.sandbox or config.translation.reverse_debugger:
    +        config.objspace.hash = "fnv"
    +
     
     def enable_allworkingmodules(config):
         modules = working_modules.copy()
    diff --git a/pypy/module/_codecs/locale.py b/pypy/module/_codecs/locale.py
    --- a/pypy/module/_codecs/locale.py
    +++ b/pypy/module/_codecs/locale.py
    @@ -5,7 +5,7 @@
     import os
     import py
     import sys
    -from rpython.rlib.objectmodel import we_are_translated
    +from rpython.rlib.objectmodel import we_are_translated, sandbox_review
     from rpython.rlib.rstring import StringBuilder, assert_str0
     from rpython.rlib.runicode import (
         default_unicode_error_decode, default_unicode_error_encode)
    @@ -103,6 +103,7 @@
             if self.buf:
                 lltype.free(self.buf, flavor='raw')
     
    + at sandbox_review(reviewed=True)
     def unicode2rawwcharp(u):
         """unicode -> raw wchar_t*"""
         if _should_merge_surrogates():
    @@ -115,6 +116,7 @@
         return array
     unicode2rawwcharp._annenforceargs_ = [unicode]
     
    + at sandbox_review(check_caller=True)
     def _unicode2rawwcharp_loop(u, array):
         ulen = len(u)
         count = i = 0
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -439,6 +439,7 @@
             s = space.charbuf_w(w_s)
             self._frombytes(space, s)
     
    +    @sandbox_review(reviewed=True)
         def _frombytes(self, space, s):
             if len(s) % self.itemsize != 0:
                 raise oefmt(space.w_ValueError,
    diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py
    --- a/pypy/module/time/interp_time.py
    +++ b/pypy/module/time/interp_time.py
    @@ -475,6 +475,7 @@
         from rpython.rlib.rtime import c_select
     from rpython.rlib import rwin32
     
    + at sandbox_review(reviewed=True)
     def sleep(space, w_secs):
         ns = timestamp_w(space, w_secs)
         if not (ns >= 0):
    @@ -800,6 +801,7 @@
                 secs = _timespec_to_seconds(timespec)
             return space.newfloat(secs)
     
    +    @sandbox_review(reviewed=True)
         @unwrap_spec(clk_id='c_int', secs=float)
         def clock_settime(space, clk_id, secs):
             with lltype.scoped_alloc(TIMESPEC) as timespec:
    @@ -839,6 +841,7 @@
             # reset timezone, altzone, daylight and tzname
             _init_timezone(space)
     
    + at sandbox_review(reviewed=True)
     @unwrap_spec(format='text')
     def strftime(space, format, w_tup=None):
         """strftime(format[, tuple]) -> string
    diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py
    --- a/rpython/rlib/rsiphash.py
    +++ b/rpython/rlib/rsiphash.py
    @@ -123,8 +123,11 @@
     
         def compute_result_annotation(self):
             translator = self.bookkeeper.annotator.translator
    -        if translator.config.translation.reverse_debugger:
    -            return    # ignore and use the regular hash, with reverse-debugger
    +        # you should not call enable_siphash24() when translating with the
    +        # reverse-debugger, or with sandbox.
    +        assert not translator.config.translation.reverse_debugger
    +        assert not translator.config.translation.sandbox
    +        #
             if hasattr(translator, 'll_hash_string'):
                 assert translator.ll_hash_string == ll_hash_string_siphash24
             else:
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -1104,6 +1104,7 @@
             i += 1
         return s.build(), i
     
    + at sandbox_review(reviewed=True)
     def utf82wcharp(utf8, utf8len, track_allocation=True):
         from rpython.rlib import rutf8
     
    @@ -1115,6 +1116,7 @@
         for ch in rutf8.Utf8StringIterator(utf8):
             w[index] = unichr(ch)
             index += 1
    +    assert index == utf8len
         w[index] = unichr(0)
         return w
     utf82wcharp._annenforceargs_ = [str, int, bool]
    
    From pypy.commits at gmail.com  Mon Aug 26 12:06:27 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 09:06:27 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge py3.6
    Message-ID: <5d640383.1c69fb81.23f7e.e12f@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97278:933b68b2f394
    Date: 2019-08-26 18:04 +0200
    http://bitbucket.org/pypy/pypy/changeset/933b68b2f394/
    
    Log:	hg merge py3.6
    
    
    From pypy.commits at gmail.com  Mon Aug 26 12:06:29 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 09:06:29 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: hg merge default
    Message-ID: <5d640385.1c69fb81.c7f4e.f5d8@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97279:889a5f9aec6c
    Date: 2019-08-26 18:05 +0200
    http://bitbucket.org/pypy/pypy/changeset/889a5f9aec6c/
    
    Log:	hg merge default
    
    diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py
    --- a/rpython/rlib/rsiphash.py
    +++ b/rpython/rlib/rsiphash.py
    @@ -123,8 +123,11 @@
     
         def compute_result_annotation(self):
             translator = self.bookkeeper.annotator.translator
    -        if translator.config.translation.reverse_debugger:
    -            return    # ignore and use the regular hash, with reverse-debugger
    +        # you should not call enable_siphash24() when translating with the
    +        # reverse-debugger, or with sandbox.
    +        assert not translator.config.translation.reverse_debugger
    +        assert not translator.config.translation.sandbox
    +        #
             if hasattr(translator, 'll_hash_string'):
                 assert translator.ll_hash_string == ll_hash_string_siphash24
             else:
    
    From pypy.commits at gmail.com  Mon Aug 26 12:06:30 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 09:06:30 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge py3.6
    Message-ID: <5d640386.1c69fb81.ee4d7.5a48@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97280:6c760b893b6a
    Date: 2019-08-26 18:05 +0200
    http://bitbucket.org/pypy/pypy/changeset/6c760b893b6a/
    
    Log:	hg merge py3.6
    
    
    From pypy.commits at gmail.com  Mon Aug 26 15:15:03 2019
    From: pypy.commits at gmail.com (cfbolz)
    Date: Mon, 26 Aug 2019 12:15:03 -0700 (PDT)
    Subject: [pypy-commit] pypy default: don't slice the string several times
     when doing str-to-int conversion
    Message-ID: <5d642fb7.1c69fb81.53863.5397@mx.google.com>
    
    Author: Carl Friedrich Bolz-Tereick 
    Branch: 
    Changeset: r97281:520b3c9072f2
    Date: 2019-08-26 21:05 +0200
    http://bitbucket.org/pypy/pypy/changeset/520b3c9072f2/
    
    Log:	don't slice the string several times when doing str-to-int
    	conversion
    
    diff --git a/rpython/rlib/rarithmetic.py b/rpython/rlib/rarithmetic.py
    --- a/rpython/rlib/rarithmetic.py
    +++ b/rpython/rlib/rarithmetic.py
    @@ -878,9 +878,8 @@
         Raises ParseStringOverflowError in case the result does not fit.
         """
         from rpython.rlib.rstring import (
    -        NumberStringParser, ParseStringOverflowError, strip_spaces)
    -    s = literal = strip_spaces(s)
    -    p = NumberStringParser(s, literal, base, 'int',
    +        NumberStringParser, ParseStringOverflowError)
    +    p = NumberStringParser(s, s, base, 'int',
                                allow_underscores=allow_underscores,
                                no_implicit_octal=no_implicit_octal)
         base = p.base
    diff --git a/rpython/rlib/rbigint.py b/rpython/rlib/rbigint.py
    --- a/rpython/rlib/rbigint.py
    +++ b/rpython/rlib/rbigint.py
    @@ -296,14 +296,17 @@
         def fromstr(s, base=0, allow_underscores=False):
             """As string_to_int(), but ignores an optional 'l' or 'L' suffix
             and returns an rbigint."""
    +        from rpython.rlib.rstring import NumberStringParser
             from rpython.rlib.rstring import NumberStringParser, \
                 strip_spaces
    -        s = literal = strip_spaces(s)
    +        s = literal = strip_spaces(s) # XXX could get rid of this slice
    +        end = len(s)
             if (s.endswith('l') or s.endswith('L')) and base < 22:
                 # in base 22 and above, 'L' is a valid digit!  try: long('L',22)
    -            s = s[:-1]
    +            end -= 1
             parser = NumberStringParser(s, literal, base, 'long',
    -                                    allow_underscores=allow_underscores)
    +                                    allow_underscores=allow_underscores,
    +                                    end=end)
             return rbigint._from_numberstring_parser(parser)
     
         @staticmethod
    diff --git a/rpython/rlib/rstring.py b/rpython/rlib/rstring.py
    --- a/rpython/rlib/rstring.py
    +++ b/rpython/rlib/rstring.py
    @@ -500,26 +500,34 @@
                                    (self.fname, self.original_base))
     
         def __init__(self, s, literal, base, fname, allow_underscores=False,
    -                 no_implicit_octal=False):
    +                 no_implicit_octal=False, start=0, end=-1):
             self.fname = fname
             sign = 1
    -        if s.startswith('-'):
    +        self.s = s
    +        self.start = start
    +        if end == -1:
    +            end = len(s)
    +        self.end = end
    +        self._strip_spaces()
    +        if self._startswith('-'):
                 sign = -1
    -            s = strip_spaces(s[1:])
    -        elif s.startswith('+'):
    -            s = strip_spaces(s[1:])
    +            self.start += 1
    +            self._strip_spaces()
    +        elif self._startswith('+'):
    +            self.start += 1
    +            self._strip_spaces()
             self.sign = sign
             self.original_base = base
             self.allow_underscores = allow_underscores
     
             if base == 0:
    -            if s.startswith('0x') or s.startswith('0X'):
    +            if self._startswith('0x') or self._startswith('0X'):
                     base = 16
    -            elif s.startswith('0b') or s.startswith('0B'):
    +            elif self._startswith('0b') or self._startswith('0B'):
                     base = 2
    -            elif s.startswith('0'): # also covers the '0o' case
    -                if no_implicit_octal and not (s.startswith('0o') or
    -                                              s.startswith('0O')):
    +            elif self._startswith('0'): # also covers the '0o' case
    +                if no_implicit_octal and not (self._startswith('0o') or
    +                                              self._startswith('0O')):
                         base = 1    # this makes only the digit '0' valid...
                     else:
                         base = 8
    @@ -530,30 +538,44 @@
             self.base = base
     
             # Leading underscores are not allowed
    -        if s.startswith('_'):
    +        if self._startswith('_'):
                 self.error()
     
    -        if base == 16 and (s.startswith('0x') or s.startswith('0X')):
    -            s = s[2:]
    -        if base == 8 and (s.startswith('0o') or s.startswith('0O')):
    -            s = s[2:]
    -        if base == 2 and (s.startswith('0b') or s.startswith('0B')):
    -            s = s[2:]
    -        if not s:
    +        if base == 16 and (self._startswith('0x') or self._startswith('0X')):
    +            self.start += 2
    +        if base == 8 and (self._startswith('0o') or self._startswith('0O')):
    +            self.start += 2
    +        if base == 2 and (self._startswith('0b') or self._startswith('0B')):
    +            self.start += 2
    +        if self.start == self.end:
                 self.error()
    -        self.s = s
    -        self.n = len(s)
    -        self.i = 0
    +        self.i = self.start
    +
    +    def _startswith(self, prefix):
    +        return startswith(self.s, prefix, start=self.start, end=self.end)
    +
    +    def _strip_spaces(self):
    +        # XXX this is not locale-dependent
    +        p = self.start
    +        q = self.end
    +        s = self.s
    +        while p < q and s[p] in ' \f\n\r\t\v':
    +            p += 1
    +        while p < q and s[q-1] in ' \f\n\r\t\v':
    +            q -= 1
    +        assert q >= p
    +        self.start = p
    +        self.end = q
     
         def rewind(self):
             self.i = 0
     
         def next_digit(self): # -1 => exhausted
    -        if self.i < self.n:
    +        if self.i < self.end:
                 c = self.s[self.i]
                 if self.allow_underscores and c == '_':
                     self.i += 1
    -                if self.i >= self.n:
    +                if self.i >= self.end:
                         self.error()
                     c = self.s[self.i]
                 digit = ord(c)
    @@ -576,7 +598,7 @@
             # After exhausting all n digits in next_digit(), you can walk them
             # again in reverse order by calling prev_digit() exactly n times
             i = self.i - 1
    -        assert i >= 0
    +        assert i >= self.start
             self.i = i
             c = self.s[i]
             digit = ord(c)
    diff --git a/rpython/rlib/test/test_rarithmetic.py b/rpython/rlib/test/test_rarithmetic.py
    --- a/rpython/rlib/test/test_rarithmetic.py
    +++ b/rpython/rlib/test/test_rarithmetic.py
    @@ -337,6 +337,10 @@
             res = self.interpret(f, [123])
             assert res == 4 + 2
     
    +    def test_string_to_int_translates(self):
    +        def f(s):
    +            return string_to_int(str(s))
    +        self.interpret(f, [123]) == 123
     
     def test_int_real_union():
         from rpython.rtyper.lltypesystem.rffi import r_int_real
    
    From pypy.commits at gmail.com  Mon Aug 26 16:32:04 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 13:32:04 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: some changes lost in the maze of
     merges
    Message-ID: <5d6441c4.1c69fb81.ff932.71af@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97282:f87b640630bf
    Date: 2019-08-26 18:15 +0200
    http://bitbucket.org/pypy/pypy/changeset/f87b640630bf/
    
    Log:	some changes lost in the maze of merges
    
    diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py
    --- a/rpython/rlib/rsiphash.py
    +++ b/rpython/rlib/rsiphash.py
    @@ -123,8 +123,11 @@
     
         def compute_result_annotation(self):
             translator = self.bookkeeper.annotator.translator
    -        if translator.config.translation.reverse_debugger:
    -            return    # ignore and use the regular hash, with reverse-debugger
    +        # you should not call enable_siphash24() when translating with the
    +        # reverse-debugger, or with sandbox.
    +        assert not translator.config.translation.reverse_debugger
    +        assert not translator.config.translation.sandbox
    +        #
             if hasattr(translator, 'll_hash_string'):
                 assert translator.ll_hash_string == ll_hash_string_siphash24
             else:
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -1104,6 +1104,7 @@
             i += 1
         return s.build(), i
     
    + at sandbox_review(reviewed=True)
     def utf82wcharp(utf8, utf8len, track_allocation=True):
         from rpython.rlib import rutf8
     
    @@ -1115,6 +1116,7 @@
         for ch in rutf8.Utf8StringIterator(utf8):
             w[index] = unichr(ch)
             index += 1
    +    assert index == utf8len
         w[index] = unichr(0)
         return w
     utf82wcharp._annenforceargs_ = [str, int, bool]
    
    From pypy.commits at gmail.com  Mon Aug 26 16:32:06 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 13:32:06 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: three more sandbox_review
    Message-ID: <5d6441c6.1c69fb81.c7f4e.18a0@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97283:4a871bfc18fb
    Date: 2019-08-26 18:25 +0200
    http://bitbucket.org/pypy/pypy/changeset/4a871bfc18fb/
    
    Log:	three more sandbox_review
    
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -2214,6 +2214,7 @@
         c_futimens = external('futimens', [rffi.INT, TIMESPEC2P], rffi.INT,
                               save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def futimens(fd, atime, atime_ns, mtime, mtime_ns):
             l_times = lltype.malloc(TIMESPEC2P.TO, 2, flavor='raw')
             rffi.setintfield(l_times[0], 'c_tv_sec', atime)
    @@ -2230,6 +2231,7 @@
             [rffi.INT, rffi.CCHARP, TIMESPEC2P, rffi.INT], rffi.INT,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def utimensat(pathname, atime, atime_ns, mtime, mtime_ns,
                 dir_fd=AT_FDCWD, follow_symlinks=True):
             """Wrapper around utimensat(2)
    @@ -2643,6 +2645,7 @@
                 rffi.SSIZE_T, save_err=rffi.RFFI_SAVE_ERRNO,
                 compilation_info=sendfile_eci)
     
    +    @sandbox_review(reviewed=True)
         def sendfile(out_fd, in_fd, offset, count):
             with lltype.scoped_alloc(_OFF_PTR_T.TO, 1) as p_offset:
                 p_offset[0] = rffi.cast(OFF_T, offset)
    
    From pypy.commits at gmail.com  Mon Aug 26 16:32:08 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 13:32:08 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge sandbox-2
    Message-ID: <5d6441c8.1c69fb81.e6979.2305@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97284:bf3980d91358
    Date: 2019-08-26 18:26 +0200
    http://bitbucket.org/pypy/pypy/changeset/bf3980d91358/
    
    Log:	hg merge sandbox-2
    
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -2214,6 +2214,7 @@
         c_futimens = external('futimens', [rffi.INT, TIMESPEC2P], rffi.INT,
                               save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def futimens(fd, atime, atime_ns, mtime, mtime_ns):
             l_times = lltype.malloc(TIMESPEC2P.TO, 2, flavor='raw')
             rffi.setintfield(l_times[0], 'c_tv_sec', atime)
    @@ -2230,6 +2231,7 @@
             [rffi.INT, rffi.CCHARP, TIMESPEC2P, rffi.INT], rffi.INT,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def utimensat(pathname, atime, atime_ns, mtime, mtime_ns,
                 dir_fd=AT_FDCWD, follow_symlinks=True):
             """Wrapper around utimensat(2)
    @@ -2643,6 +2645,7 @@
                 rffi.SSIZE_T, save_err=rffi.RFFI_SAVE_ERRNO,
                 compilation_info=sendfile_eci)
     
    +    @sandbox_review(reviewed=True)
         def sendfile(out_fd, in_fd, offset, count):
             with lltype.scoped_alloc(_OFF_PTR_T.TO, 1) as p_offset:
                 p_offset[0] = rffi.cast(OFF_T, offset)
    
    From pypy.commits at gmail.com  Mon Aug 26 16:32:09 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 13:32:09 -0700 (PDT)
    Subject: [pypy-commit] pypy default: CPython raises NotImplementedError if
     /dev/urandom cannot be found.
    Message-ID: <5d6441c9.1c69fb81.7950c.67cf@mx.google.com>
    
    Author: Armin Rigo 
    Branch: 
    Changeset: r97285:a6c1ae01424e
    Date: 2019-08-26 22:31 +0200
    http://bitbucket.org/pypy/pypy/changeset/a6c1ae01424e/
    
    Log:	CPython raises NotImplementedError if /dev/urandom cannot be found.
    	To maximize compatibility, we should also raise NotImplementedError
    	and not OSError (although CPython also raises OSError in case it
    	could open /dev/urandom but there are further problems).
    
    diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
    --- a/pypy/module/posix/interp_posix.py
    +++ b/pypy/module/posix/interp_posix.py
    @@ -1351,7 +1351,12 @@
             _sigcheck.space = space
             return space.newbytes(rurandom.urandom(context, n, _signal_checker))
         except OSError as e:
    -        raise wrap_oserror(space, e)
    +        # CPython raises NotImplementedError if /dev/urandom cannot be found.
    +        # To maximize compatibility, we should also raise NotImplementedError
    +        # and not OSError (although CPython also raises OSError in case it
    +        # could open /dev/urandom but there are further problems).
    +        raise wrap_oserror(space, e,
    +            w_exception_class=space.w_NotImplementedError)
     
     def ctermid(space):
         """ctermid() -> string
    
    From pypy.commits at gmail.com  Mon Aug 26 16:32:25 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 13:32:25 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: hg merge default
    Message-ID: <5d6441d9.1c69fb81.f275e.593b@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97286:c82f896c86b5
    Date: 2019-08-26 22:31 +0200
    http://bitbucket.org/pypy/pypy/changeset/c82f896c86b5/
    
    Log:	hg merge default
    
    diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
    --- a/pypy/module/posix/interp_posix.py
    +++ b/pypy/module/posix/interp_posix.py
    @@ -1351,7 +1351,12 @@
             _sigcheck.space = space
             return space.newbytes(rurandom.urandom(context, n, _signal_checker))
         except OSError as e:
    -        raise wrap_oserror(space, e)
    +        # CPython raises NotImplementedError if /dev/urandom cannot be found.
    +        # To maximize compatibility, we should also raise NotImplementedError
    +        # and not OSError (although CPython also raises OSError in case it
    +        # could open /dev/urandom but there are further problems).
    +        raise wrap_oserror(space, e,
    +            w_exception_class=space.w_NotImplementedError)
     
     def ctermid(space):
         """ctermid() -> string
    diff --git a/rpython/rlib/rarithmetic.py b/rpython/rlib/rarithmetic.py
    --- a/rpython/rlib/rarithmetic.py
    +++ b/rpython/rlib/rarithmetic.py
    @@ -878,9 +878,8 @@
         Raises ParseStringOverflowError in case the result does not fit.
         """
         from rpython.rlib.rstring import (
    -        NumberStringParser, ParseStringOverflowError, strip_spaces)
    -    s = literal = strip_spaces(s)
    -    p = NumberStringParser(s, literal, base, 'int',
    +        NumberStringParser, ParseStringOverflowError)
    +    p = NumberStringParser(s, s, base, 'int',
                                allow_underscores=allow_underscores,
                                no_implicit_octal=no_implicit_octal)
         base = p.base
    diff --git a/rpython/rlib/rbigint.py b/rpython/rlib/rbigint.py
    --- a/rpython/rlib/rbigint.py
    +++ b/rpython/rlib/rbigint.py
    @@ -296,14 +296,17 @@
         def fromstr(s, base=0, allow_underscores=False):
             """As string_to_int(), but ignores an optional 'l' or 'L' suffix
             and returns an rbigint."""
    +        from rpython.rlib.rstring import NumberStringParser
             from rpython.rlib.rstring import NumberStringParser, \
                 strip_spaces
    -        s = literal = strip_spaces(s)
    +        s = literal = strip_spaces(s) # XXX could get rid of this slice
    +        end = len(s)
             if (s.endswith('l') or s.endswith('L')) and base < 22:
                 # in base 22 and above, 'L' is a valid digit!  try: long('L',22)
    -            s = s[:-1]
    +            end -= 1
             parser = NumberStringParser(s, literal, base, 'long',
    -                                    allow_underscores=allow_underscores)
    +                                    allow_underscores=allow_underscores,
    +                                    end=end)
             return rbigint._from_numberstring_parser(parser)
     
         @staticmethod
    diff --git a/rpython/rlib/rstring.py b/rpython/rlib/rstring.py
    --- a/rpython/rlib/rstring.py
    +++ b/rpython/rlib/rstring.py
    @@ -500,26 +500,34 @@
                                    (self.fname, self.original_base))
     
         def __init__(self, s, literal, base, fname, allow_underscores=False,
    -                 no_implicit_octal=False):
    +                 no_implicit_octal=False, start=0, end=-1):
             self.fname = fname
             sign = 1
    -        if s.startswith('-'):
    +        self.s = s
    +        self.start = start
    +        if end == -1:
    +            end = len(s)
    +        self.end = end
    +        self._strip_spaces()
    +        if self._startswith('-'):
                 sign = -1
    -            s = strip_spaces(s[1:])
    -        elif s.startswith('+'):
    -            s = strip_spaces(s[1:])
    +            self.start += 1
    +            self._strip_spaces()
    +        elif self._startswith('+'):
    +            self.start += 1
    +            self._strip_spaces()
             self.sign = sign
             self.original_base = base
             self.allow_underscores = allow_underscores
     
             if base == 0:
    -            if s.startswith('0x') or s.startswith('0X'):
    +            if self._startswith('0x') or self._startswith('0X'):
                     base = 16
    -            elif s.startswith('0b') or s.startswith('0B'):
    +            elif self._startswith('0b') or self._startswith('0B'):
                     base = 2
    -            elif s.startswith('0'): # also covers the '0o' case
    -                if no_implicit_octal and not (s.startswith('0o') or
    -                                              s.startswith('0O')):
    +            elif self._startswith('0'): # also covers the '0o' case
    +                if no_implicit_octal and not (self._startswith('0o') or
    +                                              self._startswith('0O')):
                         base = 1    # this makes only the digit '0' valid...
                     else:
                         base = 8
    @@ -530,30 +538,44 @@
             self.base = base
     
             # Leading underscores are not allowed
    -        if s.startswith('_'):
    +        if self._startswith('_'):
                 self.error()
     
    -        if base == 16 and (s.startswith('0x') or s.startswith('0X')):
    -            s = s[2:]
    -        if base == 8 and (s.startswith('0o') or s.startswith('0O')):
    -            s = s[2:]
    -        if base == 2 and (s.startswith('0b') or s.startswith('0B')):
    -            s = s[2:]
    -        if not s:
    +        if base == 16 and (self._startswith('0x') or self._startswith('0X')):
    +            self.start += 2
    +        if base == 8 and (self._startswith('0o') or self._startswith('0O')):
    +            self.start += 2
    +        if base == 2 and (self._startswith('0b') or self._startswith('0B')):
    +            self.start += 2
    +        if self.start == self.end:
                 self.error()
    -        self.s = s
    -        self.n = len(s)
    -        self.i = 0
    +        self.i = self.start
    +
    +    def _startswith(self, prefix):
    +        return startswith(self.s, prefix, start=self.start, end=self.end)
    +
    +    def _strip_spaces(self):
    +        # XXX this is not locale-dependent
    +        p = self.start
    +        q = self.end
    +        s = self.s
    +        while p < q and s[p] in ' \f\n\r\t\v':
    +            p += 1
    +        while p < q and s[q-1] in ' \f\n\r\t\v':
    +            q -= 1
    +        assert q >= p
    +        self.start = p
    +        self.end = q
     
         def rewind(self):
             self.i = 0
     
         def next_digit(self): # -1 => exhausted
    -        if self.i < self.n:
    +        if self.i < self.end:
                 c = self.s[self.i]
                 if self.allow_underscores and c == '_':
                     self.i += 1
    -                if self.i >= self.n:
    +                if self.i >= self.end:
                         self.error()
                     c = self.s[self.i]
                 digit = ord(c)
    @@ -576,7 +598,7 @@
             # After exhausting all n digits in next_digit(), you can walk them
             # again in reverse order by calling prev_digit() exactly n times
             i = self.i - 1
    -        assert i >= 0
    +        assert i >= self.start
             self.i = i
             c = self.s[i]
             digit = ord(c)
    diff --git a/rpython/rlib/test/test_rarithmetic.py b/rpython/rlib/test/test_rarithmetic.py
    --- a/rpython/rlib/test/test_rarithmetic.py
    +++ b/rpython/rlib/test/test_rarithmetic.py
    @@ -337,6 +337,10 @@
             res = self.interpret(f, [123])
             assert res == 4 + 2
     
    +    def test_string_to_int_translates(self):
    +        def f(s):
    +            return string_to_int(str(s))
    +        self.interpret(f, [123]) == 123
     
     def test_int_real_union():
         from rpython.rtyper.lltypesystem.rffi import r_int_real
    
    From pypy.commits at gmail.com  Mon Aug 26 17:27:47 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Mon, 26 Aug 2019 14:27:47 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: tweak tests for PyPy
    Message-ID: <5d644ed3.1c69fb81.885a6.2aeb@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97287:251eb63edc0f
    Date: 2019-08-27 00:27 +0300
    http://bitbucket.org/pypy/pypy/changeset/251eb63edc0f/
    
    Log:	tweak tests for PyPy
    
    diff --git a/lib-python/3/unittest/test/test_case.py b/lib-python/3/unittest/test/test_case.py
    --- a/lib-python/3/unittest/test/test_case.py
    +++ b/lib-python/3/unittest/test/test_case.py
    @@ -1287,6 +1287,8 @@
             with self.assertRaises(TypeError):
                 self.assertRaises((ValueError, object))
     
    +    @unittest.skipUnless(hasattr(sys, 'getrefcount'),
    +                         'test needs sys.getrefcount()')
         def testAssertRaisesRefcount(self):
             # bpo-23890: assertRaises() must not keep objects alive longer
             # than expected
    diff --git a/lib-python/3/unittest/test/testmock/testmock.py b/lib-python/3/unittest/test/testmock/testmock.py
    --- a/lib-python/3/unittest/test/testmock/testmock.py
    +++ b/lib-python/3/unittest/test/testmock/testmock.py
    @@ -784,7 +784,8 @@
             self.addCleanup(sys.setrecursionlimit, current)
     
             # can't use sys.maxint as this doesn't exist in Python 3
    -        sys.setrecursionlimit(int(10e8))
    +        # changed from 10e8 to 10e6 on PyPY, 10e8 causes a MemoryError
    +        sys.setrecursionlimit(int(10e6))
             # this segfaults without the fix in place
             copy.copy(Mock())
     
    
    From pypy.commits at gmail.com  Tue Aug 27 02:43:07 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 23:43:07 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: Fix some of the tests
    Message-ID: <5d64d0fb.1c69fb81.a0645.7200@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97288:7177f77afa08
    Date: 2019-08-26 23:04 +0200
    http://bitbucket.org/pypy/pypy/changeset/7177f77afa08/
    
    Log:	Fix some of the tests
    
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -105,7 +105,7 @@
                 elif opname in ('cast_ptr_to_adr', 'force_cast',
                                 'cast_int_to_ptr'):
                     if is_gc_ptr(op.result.concretetype):
    -                    return "result is a GC ptr: %r" % (opname,)
    +                    return "result is a GC ptr: %r" % (op,)
     
                 else:
                     return "unsupported llop: %r" % (opname,)
    diff --git a/rpython/translator/sandbox/sandlib.py b/rpython/translator/sandbox/sandlib.py
    deleted file mode 100644
    --- a/rpython/translator/sandbox/sandlib.py
    +++ /dev/null
    @@ -1,517 +0,0 @@
    -"""
    -A Python library to execute and communicate with a subprocess that
    -was translated from RPython code with --sandbox.  This library is
    -for the outer process, which can run CPython or PyPy.
    -"""
    -
    -import sys, os, posixpath, errno, stat, time
    -import subprocess
    -from rpython.tool.killsubprocess import killsubprocess
    -from rpython.translator.sandbox.vfs import UID, GID
    -import py
    -
    -WIN32 = os.name == "nt"
    -
    -
    -def create_log():
    -    """Make and return a log for the sandbox to use, if needed."""
    -    from rpython.tool.ansi_print import AnsiLogger
    -    return AnsiLogger("sandlib")
    -
    -def write_exception(g, exception, tb=None):
    -    for i, excclass in EXCEPTION_TABLE:
    -        if isinstance(exception, excclass):
    -            write_message(g, i)
    -            if excclass is OSError:
    -                error = exception.errno
    -                if error is None:
    -                    error = errno.EPERM
    -                write_message(g, error)
    -            g.flush()
    -            break
    -    else:
    -        # just re-raise the exception
    -        raise exception.__class__, exception, tb
    -
    -def shortrepr(x):
    -    r = repr(x)
    -    if len(r) >= 80:
    -        r = r[:20] + '...' + r[-8:]
    -    return r
    -
    -def signal_name(n):
    -    import signal
    -    for key, value in signal.__dict__.items():
    -        if key.startswith('SIG') and not key.startswith('SIG_') and value == n:
    -            return key
    -    return 'signal %d' % (n,)
    -
    -
    -class SandboxedProc(object):
    -    """Base class to control a sandboxed subprocess.
    -    Inherit from this class and implement all the do_xxx() methods
    -    for the external functions xxx that you want to support.
    -    """
    -    debug = False
    -    log = None
    -    os_level_sandboxing = False   # Linux only: /proc/PID/seccomp
    -
    -    def __init__(self, args, executable=None):
    -        """'args' should a sequence of argument for the subprocess,
    -        starting with the full path of the executable.
    -        """
    -        self.popen = subprocess.Popen(args, executable=executable,
    -                                      bufsize=-1,
    -                                      stdin=subprocess.PIPE,
    -                                      stdout=subprocess.PIPE,
    -                                      close_fds=False if WIN32 else True,
    -                                      env={})
    -        self.popenlock = None
    -        self.currenttimeout = None
    -        self.currentlyidlefrom = None
    -
    -        if self.debug:
    -            self.log = create_log()
    -
    -    def withlock(self, function, *args, **kwds):
    -        lock = self.popenlock
    -        if lock is not None:
    -            lock.acquire()
    -        try:
    -            return function(*args, **kwds)
    -        finally:
    -            if lock is not None:
    -                lock.release()
    -
    -    def settimeout(self, timeout, interrupt_main=False):
    -        """Start a timeout that will kill the subprocess after the given
    -        amount of time.  Only one timeout can be active at a time.
    -        """
    -        import thread
    -
    -        def _waiting_thread():
    -            while True:
    -                while self.currentlyidlefrom is not None:
    -                    time.sleep(1)   # can't timeout while idle
    -                t = self.currenttimeout
    -                if t is None:
    -                    return  # cancelled
    -                delay = t - time.time()
    -                if delay <= 0.0:
    -                    break   # expired!
    -                time.sleep(min(delay*1.001, 1))
    -            if self.log:
    -                self.log.timeout("timeout!")
    -            self.kill()
    -            #if interrupt_main:
    -            #    if hasattr(os, 'kill'):
    -            #        import signal
    -            #        os.kill(os.getpid(), signal.SIGINT)
    -            #    else:
    -            #        thread.interrupt_main()
    -
    -        def _settimeout():
    -            need_new_thread = self.currenttimeout is None
    -            self.currenttimeout = time.time() + timeout
    -            if need_new_thread:
    -                thread.start_new_thread(_waiting_thread, ())
    -
    -        if self.popenlock is None:
    -            self.popenlock = thread.allocate_lock()
    -        self.withlock(_settimeout)
    -
    -    def canceltimeout(self):
    -        """Cancel the current timeout."""
    -        self.currenttimeout = None
    -        self.currentlyidlefrom = None
    -
    -    def enter_idle(self):
    -        self.currentlyidlefrom = time.time()
    -
    -    def leave_idle(self):
    -        def _postpone_timeout():
    -            t = self.currentlyidlefrom
    -            if t is not None and self.currenttimeout is not None:
    -                self.currenttimeout += time.time() - t
    -        try:
    -            self.withlock(_postpone_timeout)
    -        finally:
    -            self.currentlyidlefrom = None
    -
    -    def poll(self):
    -        returncode = self.withlock(self.popen.poll)
    -        if returncode is not None:
    -            self.canceltimeout()
    -        return returncode
    -
    -    def wait(self):
    -        returncode = self.withlock(self.popen.wait)
    -        if returncode is not None:
    -            self.canceltimeout()
    -        return returncode
    -
    -    def kill(self):
    -        self.withlock(killsubprocess, self.popen)
    -
    -    def handle_forever(self):
    -        returncode = self.handle_until_return()
    -        if returncode != 0:
    -            raise OSError("the sandboxed subprocess exited with code %d" % (
    -                returncode,))
    -
    -    def handle_until_return(self):
    -        child_stdin  = self.popen.stdin
    -        child_stdout = self.popen.stdout
    -        if self.os_level_sandboxing and sys.platform.startswith('linux'):
    -            # rationale: we wait until the child process started completely,
    -            # letting the C library do any system calls it wants for
    -            # initialization.  When the RPython code starts up, it quickly
    -            # does its first system call.  At this point we turn seccomp on.
    -            import select
    -            select.select([child_stdout], [], [])
    -            f = open('/proc/%d/seccomp' % self.popen.pid, 'w')
    -            print >> f, 1
    -            f.close()
    -        while True:
    -            try:
    -                fnname = read_message(child_stdout)
    -                args   = read_message(child_stdout)
    -            except EOFError as e:
    -                break
    -            if self.log and not self.is_spam(fnname, *args):
    -                self.log.call('%s(%s)' % (fnname,
    -                                     ', '.join([shortrepr(x) for x in args])))
    -            try:
    -                answer, resulttype = self.handle_message(fnname, *args)
    -            except Exception as e:
    -                tb = sys.exc_info()[2]
    -                write_exception(child_stdin, e, tb)
    -                if self.log:
    -                    if str(e):
    -                        self.log.exception('%s: %s' % (e.__class__.__name__, e))
    -                    else:
    -                        self.log.exception('%s' % (e.__class__.__name__,))
    -            else:
    -                if self.log and not self.is_spam(fnname, *args):
    -                    self.log.result(shortrepr(answer))
    -                try:
    -                    write_message(child_stdin, 0)  # error code - 0 for ok
    -                    write_message(child_stdin, answer, resulttype)
    -                    child_stdin.flush()
    -                except (IOError, OSError):
    -                    # likely cause: subprocess is dead, child_stdin closed
    -                    if self.poll() is not None:
    -                        break
    -                    else:
    -                        raise
    -        returncode = self.wait()
    -        return returncode
    -
    -    def is_spam(self, fnname, *args):
    -        # To hide the spamming amounts of reads and writes to stdin and stdout
    -        # in interactive sessions
    -        return (fnname in ('ll_os.ll_os_read', 'll_os.ll_os_write') and
    -                args[0] in (0, 1, 2))
    -
    -    def handle_message(self, fnname, *args):
    -        if '__' in fnname:
    -            raise ValueError("unsafe fnname")
    -        try:
    -            handler = getattr(self, 'do_' + fnname.replace('.', '__'))
    -        except AttributeError:
    -            raise RuntimeError("no handler for this function")
    -        resulttype = getattr(handler, 'resulttype', None)
    -        return handler(*args), resulttype
    -
    -
    -class SimpleIOSandboxedProc(SandboxedProc):
    -    """Control a sandboxed subprocess which is only allowed to read from
    -    its stdin and write to its stdout and stderr.
    -    """
    -    _input = None
    -    _output = None
    -    _error = None
    -    inputlogfile = None
    -
    -    def communicate(self, input=None):
    -        """Send data to stdin. Read data from stdout and stderr,
    -        until end-of-file is reached. Wait for process to terminate.
    -        """
    -        import cStringIO
    -        if input:
    -            if isinstance(input, str):
    -                input = cStringIO.StringIO(input)
    -            self._input = input
    -        self._output = cStringIO.StringIO()
    -        self._error = cStringIO.StringIO()
    -        self.handle_forever()
    -        output = self._output.getvalue()
    -        self._output = None
    -        error = self._error.getvalue()
    -        self._error = None
    -        return (output, error)
    -
    -    def interact(self, stdin=None, stdout=None, stderr=None):
    -        """Interact with the subprocess.  By default, stdin, stdout and
    -        stderr are set to the ones from 'sys'."""
    -        import sys
    -        self._input  = stdin  or sys.stdin
    -        self._output = stdout or sys.stdout
    -        self._error  = stderr or sys.stderr
    -        returncode = self.handle_until_return()
    -        if returncode != 0:
    -            if os.name == 'posix' and returncode < 0:
    -                print >> self._error, "[Subprocess killed by %s]" % (
    -                    signal_name(-returncode),)
    -            else:
    -                print >> self._error, "[Subprocess exit code: %d]" % (
    -                    returncode,)
    -        self._input = None
    -        self._output = None
    -        self._error = None
    -        return returncode
    -
    -    def setlogfile(self, filename):
    -        self.inputlogfile = open(filename, 'a')
    -
    -    def do_ll_os__ll_os_read(self, fd, size):
    -        if fd == 0:
    -            if self._input is None:
    -                return ""
    -            elif (getattr(self, 'virtual_console_isatty', False) or
    -                  self._input.isatty()):
    -                # don't wait for all 'size' chars if reading from a tty,
    -                # to avoid blocking.  Instead, stop after reading a line.
    -
    -                # For now, waiting at the interactive console is the
    -                # only time that counts as idle.
    -                self.enter_idle()
    -                try:
    -                    inputdata = self._input.readline(size)
    -                finally:
    -                    self.leave_idle()
    -            else:
    -                inputdata = self._input.read(size)
    -            if self.inputlogfile is not None:
    -                self.inputlogfile.write(inputdata)
    -            return inputdata
    -        raise OSError("trying to read from fd %d" % (fd,))
    -
    -    def do_ll_os__ll_os_write(self, fd, data):
    -        if fd == 1:
    -            self._output.write(data)
    -            return len(data)
    -        if fd == 2:
    -            self._error.write(data)
    -            return len(data)
    -        raise OSError("trying to write to fd %d" % (fd,))
    -
    -    # let's allow access to the real time
    -    def do_ll_time__ll_time_sleep(self, seconds):
    -        # regularly check for timeouts that could have killed the
    -        # subprocess
    -        while seconds > 5.0:
    -            time.sleep(5.0)
    -            seconds -= 5.0
    -            if self.poll() is not None:   # subprocess finished?
    -                return
    -        time.sleep(seconds)
    -
    -    def do_ll_time__ll_time_time(self):
    -        return time.time()
    -
    -    def do_ll_time__ll_time_clock(self):
    -        # measuring the CPU time of the controller process has
    -        # not much meaning, so let's emulate this and return
    -        # the real time elapsed since the first call to clock()
    -        # (this is one of the behaviors allowed by the docs)
    -        try:
    -            starttime = self.starttime
    -        except AttributeError:
    -            starttime = self.starttime = time.time()
    -        return time.time() - starttime
    -
    -class VirtualizedSandboxedProc(SandboxedProc):
    -    """Control a virtualized sandboxed process, which is given a custom
    -    view on the filesystem and a custom environment.
    -    """
    -    virtual_env = {}
    -    virtual_cwd = '/tmp'
    -    virtual_console_isatty = False
    -    virtual_fd_range = range(3, 50)
    -
    -    def __init__(self, *args, **kwds):
    -        super(VirtualizedSandboxedProc, self).__init__(*args, **kwds)
    -        self.virtual_root = self.build_virtual_root()
    -        self.open_fds = {}   # {virtual_fd: (real_file_object, node)}
    -
    -    def build_virtual_root(self):
    -        raise NotImplementedError("must be overridden")
    -
    -    def do_ll_os__ll_os_envitems(self):
    -        return self.virtual_env.items()
    -
    -    def do_ll_os__ll_os_getenv(self, name):
    -        return self.virtual_env.get(name)
    -
    -    def translate_path(self, vpath):
    -        # XXX this assumes posix vpaths for now, but os-specific real paths
    -        vpath = posixpath.normpath(posixpath.join(self.virtual_cwd, vpath))
    -        dirnode = self.virtual_root
    -        components = [component for component in vpath.split('/')]
    -        for component in components[:-1]:
    -            if component:
    -                dirnode = dirnode.join(component)
    -                if dirnode.kind != stat.S_IFDIR:
    -                    raise OSError(errno.ENOTDIR, component)
    -        return dirnode, components[-1]
    -
    -    def get_node(self, vpath):
    -        dirnode, name = self.translate_path(vpath)
    -        if name:
    -            node = dirnode.join(name)
    -        else:
    -            node = dirnode
    -        if self.log:
    -            self.log.vpath('%r => %r' % (vpath, node))
    -        return node
    -
    -    def do_ll_os__ll_os_stat(self, vpathname):
    -        node = self.get_node(vpathname)
    -        return node.stat()
    -    do_ll_os__ll_os_stat.resulttype = RESULTTYPE_STATRESULT
    -
    -    do_ll_os__ll_os_lstat = do_ll_os__ll_os_stat
    -
    -    def do_ll_os__ll_os_access(self, vpathname, mode):
    -        try:
    -            node = self.get_node(vpathname)
    -        except OSError as e:
    -            if e.errno == errno.ENOENT:
    -                return False
    -            raise
    -        return node.access(mode)
    -
    -    def do_ll_os__ll_os_isatty(self, fd):
    -        return self.virtual_console_isatty and fd in (0, 1, 2)
    -
    -    def allocate_fd(self, f, node=None):
    -        for fd in self.virtual_fd_range:
    -            if fd not in self.open_fds:
    -                self.open_fds[fd] = (f, node)
    -                return fd
    -        else:
    -            raise OSError(errno.EMFILE, "trying to open too many files")
    -
    -    def get_fd(self, fd, throw=True):
    -        """Get the objects implementing file descriptor `fd`.
    -
    -        Returns a pair, (open file, vfs node)
    -
    -        `throw`: if true, raise OSError for bad fd, else return (None, None).
    -        """
    -        try:
    -            f, node = self.open_fds[fd]
    -        except KeyError:
    -            if throw:
    -                raise OSError(errno.EBADF, "bad file descriptor")
    -            return None, None
    -        return f, node
    -
    -    def get_file(self, fd, throw=True):
    -        """Return the open file for file descriptor `fd`."""
    -        return self.get_fd(fd, throw)[0]
    -
    -    def do_ll_os__ll_os_open(self, vpathname, flags, mode):
    -        node = self.get_node(vpathname)
    -        if flags & (os.O_RDONLY|os.O_WRONLY|os.O_RDWR) != os.O_RDONLY:
    -            raise OSError(errno.EPERM, "write access denied")
    -        # all other flags are ignored
    -        f = node.open()
    -        return self.allocate_fd(f, node)
    -
    -    def do_ll_os__ll_os_close(self, fd):
    -        f = self.get_file(fd)
    -        del self.open_fds[fd]
    -        f.close()
    -
    -    def do_ll_os__ll_os_read(self, fd, size):
    -        f = self.get_file(fd, throw=False)
    -        if f is None:
    -            return super(VirtualizedSandboxedProc, self).do_ll_os__ll_os_read(
    -                fd, size)
    -        else:
    -            if not (0 <= size <= sys.maxint):
    -                raise OSError(errno.EINVAL, "invalid read size")
    -            # don't try to read more than 256KB at once here
    -            return f.read(min(size, 256*1024))
    -
    -    def do_ll_os__ll_os_fstat(self, fd):
    -        f, node = self.get_fd(fd)
    -        return node.stat()
    -    do_ll_os__ll_os_fstat.resulttype = RESULTTYPE_STATRESULT
    -
    -    def do_ll_os__ll_os_lseek(self, fd, pos, how):
    -        f = self.get_file(fd)
    -        f.seek(pos, how)
    -        return f.tell()
    -    do_ll_os__ll_os_lseek.resulttype = RESULTTYPE_LONGLONG
    -
    -    def do_ll_os__ll_os_getcwd(self):
    -        return self.virtual_cwd
    -
    -    def do_ll_os__ll_os_strerror(self, errnum):
    -        # unsure if this shouldn't be considered safeboxsafe
    -        return os.strerror(errnum) or ('Unknown error %d' % (errnum,))
    -
    -    def do_ll_os__ll_os_listdir(self, vpathname):
    -        node = self.get_node(vpathname)
    -        return node.keys()
    -
    -    def do_ll_os__ll_os_unlink(self, vpathname):
    -        raise OSError(errno.EPERM, "write access denied")
    -
    -    def do_ll_os__ll_os_mkdir(self, vpathname, mode=None):
    -        raise OSError(errno.EPERM, "write access denied")
    -
    -    def do_ll_os__ll_os_getuid(self):
    -        return UID
    -    do_ll_os__ll_os_geteuid = do_ll_os__ll_os_getuid
    -
    -    def do_ll_os__ll_os_getgid(self):
    -        return GID
    -    do_ll_os__ll_os_getegid = do_ll_os__ll_os_getgid
    -
    -
    -class VirtualizedSocketProc(VirtualizedSandboxedProc):
    -    """ Extends VirtualizedSandboxProc with socket
    -    options, ie tcp://host:port as args to os.open
    -    """
    -    def __init__(self, *args, **kwds):
    -        super(VirtualizedSocketProc, self).__init__(*args, **kwds)
    -        self.sockets = {}
    -
    -    def do_ll_os__ll_os_open(self, name, flags, mode):
    -        if not name.startswith("tcp://"):
    -            return super(VirtualizedSocketProc, self).do_ll_os__ll_os_open(
    -                name, flags, mode)
    -        import socket
    -        host, port = name[6:].split(":")
    -        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    -        sock.connect((host, int(port)))
    -        fd = self.allocate_fd(sock)
    -        self.sockets[fd] = True
    -        return fd
    -
    -    def do_ll_os__ll_os_read(self, fd, size):
    -        if fd in self.sockets:
    -            return self.get_file(fd).recv(size)
    -        return super(VirtualizedSocketProc, self).do_ll_os__ll_os_read(
    -            fd, size)
    -
    -    def do_ll_os__ll_os_write(self, fd, data):
    -        if fd in self.sockets:
    -            return self.get_file(fd).send(data)
    -        return super(VirtualizedSocketProc, self).do_ll_os__ll_os_write(
    -            fd, data)
    -
    diff --git a/rpython/translator/sandbox/test/test_graphchecker.py b/rpython/translator/sandbox/test/test_graphchecker.py
    --- a/rpython/translator/sandbox/test/test_graphchecker.py
    +++ b/rpython/translator/sandbox/test/test_graphchecker.py
    @@ -52,7 +52,11 @@
                 return llop.force_cast(lltype.Signed, x)
             self.check_safe(f, [float])
             self.check_safe(f, [lltype.Ptr(SRAW)])
    -        self.check_unsafe("argument is a GC ptr", f, [lltype.Ptr(SGC)])
    +        self.check_safe(f, [lltype.Ptr(SGC)])
    +        #
    +        def g(x):
    +            return llop.force_cast(lltype.Ptr(SGC), x)
    +        self.check_unsafe("result is a GC ptr", g, [int])
     
         def test_direct_call_to_check_caller(self):
             @sandbox_review(check_caller=True)
    diff --git a/rpython/translator/sandbox/test/test_sandlib.py b/rpython/translator/sandbox/test/test_sandlib.py
    deleted file mode 100644
    --- a/rpython/translator/sandbox/test/test_sandlib.py
    +++ /dev/null
    @@ -1,267 +0,0 @@
    -import py
    -import errno, os, StringIO
    -from rpython.tool.sourcetools import func_with_new_name
    -from rpython.rtyper.lltypesystem import rffi
    -from rpython.translator.sandbox.sandlib import SandboxedProc
    -from rpython.translator.sandbox.sandlib import SimpleIOSandboxedProc
    -from rpython.translator.sandbox.sandlib import VirtualizedSandboxedProc
    -from rpython.translator.sandbox.sandlib import VirtualizedSocketProc
    -from rpython.translator.sandbox.test.test_sandbox import compile
    -from rpython.translator.sandbox.vfs import Dir, File, RealDir, RealFile
    -
    -
    -class MockSandboxedProc(SandboxedProc):
    -    """A sandbox process wrapper that replays expected syscalls."""
    -
    -    def __init__(self, args, expected):
    -        SandboxedProc.__init__(self, args)
    -        self.expected = expected
    -        self.seen = 0
    -
    -    def _make_method(name):
    -        def do_xxx(self, *input):
    -            print "decoded from subprocess: %s%r" % (name, input)
    -            expectedmsg, expectedinput, output = self.expected[self.seen]
    -            assert name == expectedmsg
    -            assert input == expectedinput
    -            self.seen += 1
    -            if isinstance(output, Exception):
    -                raise output
    -            return output
    -        return func_with_new_name(do_xxx, 'do_%s' % name)
    -
    -    do_ll_os__ll_os_open  = _make_method("open")
    -    do_ll_os__ll_os_read  = _make_method("read")
    -    do_ll_os__ll_os_write = _make_method("write")
    -    do_ll_os__ll_os_close = _make_method("close")
    -
    -
    -def test_lib():
    -    def entry_point(argv):
    -        fd = os.open("/tmp/foobar", os.O_RDONLY, 0777)
    -        assert fd == 77
    -        res = os.read(fd, 123)
    -        assert res == "he\x00llo"
    -        count = os.write(fd, "world\x00!\x00")
    -        assert count == 42
    -        for arg in argv:
    -            count = os.write(fd, arg)
    -            assert count == 61
    -        os.close(fd)
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = MockSandboxedProc([exe, 'x1', 'y2'], expected = [
    -        ("open", ("/tmp/foobar", os.O_RDONLY, 0777), 77),
    -        ("read", (77, 123), "he\x00llo"),
    -        ("write", (77, "world\x00!\x00"), 42),
    -        ("write", (77, exe), 61),
    -        ("write", (77, "x1"), 61),
    -        ("write", (77, "y2"), 61),
    -        ("close", (77,), None),
    -        ])
    -    proc.handle_forever()
    -    assert proc.seen == len(proc.expected)
    -
    -def test_foobar():
    -    py.test.skip("to be updated")
    -    foobar = rffi.llexternal("foobar", [rffi.CCHARP], rffi.LONG)
    -    def entry_point(argv):
    -        s = rffi.str2charp(argv[1]); n = foobar(s); rffi.free_charp(s)
    -        s = rffi.str2charp(argv[n]); n = foobar(s); rffi.free_charp(s)
    -        return n
    -    exe = compile(entry_point)
    -
    -    proc = MockSandboxedProc([exe, 'spam', 'egg'], expected = [
    -        ("foobar", ("spam",), 2),
    -        ("foobar", ("egg",), 0),
    -        ])
    -    proc.handle_forever()
    -    assert proc.seen == len(proc.expected)
    -
    -def test_simpleio():
    -    def entry_point(argv):
    -        print "Please enter a number:"
    -        buf = ""
    -        while True:
    -            t = os.read(0, 1)    # 1 character from stdin
    -            if not t:
    -                raise EOFError
    -            if t == '\n':
    -                break
    -            buf += t
    -        num = int(buf)
    -        print "The double is:", num * 2
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = SimpleIOSandboxedProc([exe, 'x1', 'y2'])
    -    output, error = proc.communicate("21\n")
    -    assert output == "Please enter a number:\nThe double is: 42\n"
    -    assert error == ""
    -
    -def test_socketio():
    -    class SocketProc(VirtualizedSocketProc, SimpleIOSandboxedProc):
    -        def build_virtual_root(self):
    -            pass
    -
    -    def entry_point(argv):
    -        fd = os.open("tcp://python.org:80", os.O_RDONLY, 0777)
    -        os.write(fd, 'GET /\n')
    -        print os.read(fd, 50)
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = SocketProc([exe])
    -    output, error = proc.communicate("")
    -    assert output.startswith('HTTP/1.0 400 Bad request')
    -
    -def test_oserror():
    -    def entry_point(argv):
    -        try:
    -            os.open("/tmp/foobar", os.O_RDONLY, 0777)
    -        except OSError as e:
    -            os.close(e.errno)    # nonsense, just to see outside
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = MockSandboxedProc([exe], expected = [
    -        ("open", ("/tmp/foobar", os.O_RDONLY, 0777), OSError(-42, "baz")),
    -        ("close", (-42,), None),
    -        ])
    -    proc.handle_forever()
    -    assert proc.seen == len(proc.expected)
    -
    -
    -class SandboxedProcWithFiles(VirtualizedSandboxedProc, SimpleIOSandboxedProc):
    -    """A sandboxed process with a simple virtualized filesystem.
    -
    -    For testing file operations.
    -
    -    """
    -    def build_virtual_root(self):
    -        return Dir({
    -            'hi.txt': File("Hello, world!\n"),
    -            'this.pyc': RealFile(__file__),
    -             })
    -
    -def test_too_many_opens():
    -    def entry_point(argv):
    -        try:
    -            open_files = []
    -            for i in range(500):
    -                fd = os.open('/hi.txt', os.O_RDONLY, 0777)
    -                open_files.append(fd)
    -                txt = os.read(fd, 100)
    -                if txt != "Hello, world!\n":
    -                    print "Wrong content: %s" % txt
    -        except OSError as e:
    -            # We expect to get EMFILE, for opening too many files.
    -            if e.errno != errno.EMFILE:
    -                print "OSError: %s!" % (e.errno,)
    -        else:
    -            print "We opened 500 fake files! Shouldn't have been able to."
    -
    -        for fd in open_files:
    -            os.close(fd)
    -
    -        try:
    -            open_files = []
    -            for i in range(500):
    -                fd = os.open('/this.pyc', os.O_RDONLY, 0777)
    -                open_files.append(fd)
    -        except OSError as e:
    -            # We expect to get EMFILE, for opening too many files.
    -            if e.errno != errno.EMFILE:
    -                print "OSError: %s!" % (e.errno,)
    -        else:
    -            print "We opened 500 real files! Shouldn't have been able to."
    -
    -        print "All ok!"
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = SandboxedProcWithFiles([exe])
    -    output, error = proc.communicate("")
    -    assert output == "All ok!\n"
    -    assert error == ""
    -
    -def test_fstat():
    -    def compare(a, b, i):
    -        if a != b:
    -            print "stat and fstat differ @%d: %s != %s" % (i, a, b)
    -
    -    def entry_point(argv):
    -        try:
    -            # Open a file, and compare stat and fstat
    -            fd = os.open('/hi.txt', os.O_RDONLY, 0777)
    -            st = os.stat('/hi.txt')
    -            fs = os.fstat(fd)
    -            # RPython requires the index for stat to be a constant.. :(
    -            compare(st[0], fs[0], 0)
    -            compare(st[1], fs[1], 1)
    -            compare(st[2], fs[2], 2)
    -            compare(st[3], fs[3], 3)
    -            compare(st[4], fs[4], 4)
    -            compare(st[5], fs[5], 5)
    -            compare(st[6], fs[6], 6)
    -            compare(st[7], fs[7], 7)
    -            compare(st[8], fs[8], 8)
    -            compare(st[9], fs[9], 9)
    -        except OSError as e:
    -            print "OSError: %s" % (e.errno,)
    -        print "All ok!"
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = SandboxedProcWithFiles([exe])
    -    output, error = proc.communicate("")
    -    assert output == "All ok!\n"
    -    assert error == ""
    -
    -def test_lseek():
    -    def char_should_be(c, should):
    -        if c != should:
    -            print "Wrong char: '%s' should be '%s'" % (c, should)
    -
    -    def entry_point(argv):
    -        fd = os.open('/hi.txt', os.O_RDONLY, 0777)
    -        char_should_be(os.read(fd, 1), "H")
    -        new = os.lseek(fd, 3, os.SEEK_CUR)
    -        if new != 4:
    -            print "Wrong offset, %d should be 4" % new
    -        char_should_be(os.read(fd, 1), "o")
    -        new = os.lseek(fd, -3, os.SEEK_END)
    -        if new != 11:
    -            print "Wrong offset, %d should be 11" % new
    -        char_should_be(os.read(fd, 1), "d")
    -        new = os.lseek(fd, 7, os.SEEK_SET)
    -        if new != 7:
    -            print "Wrong offset, %d should be 7" % new
    -        char_should_be(os.read(fd, 1), "w")
    -        print "All ok!"
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = SandboxedProcWithFiles([exe])
    -    output, error = proc.communicate("")
    -    assert output == "All ok!\n"
    -    assert error == ""
    -
    -def test_getuid():
    -    if not hasattr(os, 'getuid'):
    -        py.test.skip("posix only")
    -
    -    def entry_point(argv):
    -        import os
    -        print "uid is %s" % os.getuid()
    -        print "euid is %s" % os.geteuid()
    -        print "gid is %s" % os.getgid()
    -        print "egid is %s" % os.getegid()
    -        return 0
    -    exe = compile(entry_point)
    -
    -    proc = SandboxedProcWithFiles([exe])
    -    output, error = proc.communicate("")
    -    assert output == "uid is 1000\neuid is 1000\ngid is 1000\negid is 1000\n"
    -    assert error == ""
    diff --git a/rpython/translator/sandbox/test/test_vfs.py b/rpython/translator/sandbox/test/test_vfs.py
    deleted file mode 100644
    --- a/rpython/translator/sandbox/test/test_vfs.py
    +++ /dev/null
    @@ -1,114 +0,0 @@
    -import py
    -import sys, stat, os
    -from rpython.translator.sandbox.vfs import *
    -from rpython.tool.udir import udir
    -
    -HASLINK = hasattr(os, 'symlink')
    -
    -def setup_module(mod):
    -    d = udir.ensure('test_vfs', dir=1)
    -    d.join('file1').write('somedata1')
    -    d.join('file2').write('somelongerdata2')
    -    os.chmod(str(d.join('file2')), stat.S_IWUSR)     # unreadable
    -    d.join('.hidden').write('secret')
    -    d.ensure('subdir1', dir=1).join('subfile1').write('spam')
    -    d.ensure('.subdir2', dir=1).join('subfile2').write('secret as well')
    -    if HASLINK:
    -        d.join('symlink1').mksymlinkto(str(d.join('subdir1')))
    -        d.join('symlink2').mksymlinkto('.hidden')
    -        d.join('symlink3').mksymlinkto('BROKEN')
    -
    -
    -def test_dir():
    -    d = Dir({'foo': Dir()})
    -    assert d.keys() == ['foo']
    -    py.test.raises(OSError, d.open)
    -    assert 0 <= d.getsize() <= sys.maxint
    -    d1 = d.join('foo')
    -    assert stat.S_ISDIR(d1.kind)
    -    assert d1.keys() == []
    -    py.test.raises(OSError, d.join, 'bar')
    -    st = d.stat()
    -    assert stat.S_ISDIR(st.st_mode)
    -    assert d.access(os.R_OK | os.X_OK)
    -    assert not d.access(os.W_OK)
    -
    -def test_file():
    -    f = File('hello world')
    -    assert stat.S_ISREG(f.kind)
    -    py.test.raises(OSError, f.keys)
    -    assert f.getsize() == 11
    -    h = f.open()
    -    data = h.read()
    -    assert data == 'hello world'
    -    h.close()
    -    st = f.stat()
    -    assert stat.S_ISREG(st.st_mode)
    -    assert st.st_size == 11
    -    assert f.access(os.R_OK)
    -    assert not f.access(os.W_OK)
    -
    -def test_realdir_realfile():
    -    for show_dotfiles in [False, True]:
    -        for follow_links in [False, True]:
    -            v_udir = RealDir(str(udir), show_dotfiles = show_dotfiles,
    -                                        follow_links  = follow_links)
    -            v_test_vfs = v_udir.join('test_vfs')
    -            names = v_test_vfs.keys()
    -            names.sort()
    -            assert names == (show_dotfiles * ['.hidden', '.subdir2'] +
    -                                          ['file1', 'file2', 'subdir1'] +
    -                             HASLINK * ['symlink1', 'symlink2', 'symlink3'])
    -            py.test.raises(OSError, v_test_vfs.open)
    -            assert 0 <= v_test_vfs.getsize() <= sys.maxint
    -
    -            f = v_test_vfs.join('file1')
    -            assert f.open().read() == 'somedata1'
    -
    -            f = v_test_vfs.join('file2')
    -            assert f.getsize() == len('somelongerdata2')
    -            if os.name != 'nt':     # can't have unreadable files there?
    -                py.test.raises(OSError, f.open)
    -
    -            py.test.raises(OSError, v_test_vfs.join, 'does_not_exist')
    -            py.test.raises(OSError, v_test_vfs.join, 'symlink3')
    -            if follow_links and HASLINK:
    -                d = v_test_vfs.join('symlink1')
    -                assert stat.S_ISDIR(d.stat().st_mode)
    -                assert d.keys() == ['subfile1']
    -                assert d.join('subfile1').open().read() == 'spam'
    -
    -                f = v_test_vfs.join('symlink2')
    -                assert stat.S_ISREG(f.stat().st_mode)
    -                assert f.access(os.R_OK)
    -                assert f.open().read() == 'secret'
    -            else:
    -                py.test.raises(OSError, v_test_vfs.join, 'symlink1')
    -                py.test.raises(OSError, v_test_vfs.join, 'symlink2')
    -
    -            if show_dotfiles:
    -                f = v_test_vfs.join('.hidden')
    -                assert f.open().read() == 'secret'
    -
    -                d = v_test_vfs.join('.subdir2')
    -                assert d.keys() == ['subfile2']
    -                assert d.join('subfile2').open().read() == 'secret as well'
    -            else:
    -                py.test.raises(OSError, v_test_vfs.join, '.hidden')
    -                py.test.raises(OSError, v_test_vfs.join, '.subdir2')
    -
    -def test_realdir_exclude():
    -    xdir = udir.ensure('test_realdir_exclude', dir=1)
    -    xdir.ensure('test_realdir_exclude.yes')
    -    xdir.ensure('test_realdir_exclude.no')
    -    v_udir = RealDir(str(udir), exclude=['.no'])
    -    v_xdir = v_udir.join('test_realdir_exclude')
    -    assert 'test_realdir_exclude.yes' in v_xdir.keys()
    -    assert 'test_realdir_exclude.no' not in v_xdir.keys()
    -    v_xdir.join('test_realdir_exclude.yes')    # works
    -    py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.no')
    -    # Windows and Mac tests, for the case
    -    py.test.raises(OSError, v_xdir.join, 'Test_RealDir_Exclude.no')
    -    py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.No')
    -    py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.nO')
    -    py.test.raises(OSError, v_xdir.join, 'test_realdir_exclude.NO')
    diff --git a/rpython/translator/sandbox/vfs.py b/rpython/translator/sandbox/vfs.py
    deleted file mode 100644
    --- a/rpython/translator/sandbox/vfs.py
    +++ /dev/null
    @@ -1,137 +0,0 @@
    -import os
    -import stat, errno
    -
    -UID = 1000
    -GID = 1000
    -ATIME = MTIME = CTIME = 0
    -INO_COUNTER = 0
    -
    -
    -class FSObject(object):
    -    read_only = True
    -
    -    def stat(self):
    -        try:
    -            st_ino = self._st_ino
    -        except AttributeError:
    -            global INO_COUNTER
    -            INO_COUNTER += 1
    -            st_ino = self._st_ino = INO_COUNTER
    -        st_dev = 1
    -        st_nlink = 1
    -        st_size = self.getsize()
    -        st_mode = self.kind
    -        st_mode |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
    -        if stat.S_ISDIR(self.kind):
    -            st_mode |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
    -        if self.read_only:
    -            st_uid = 0       # read-only files are virtually owned by root
    -            st_gid = 0
    -        else:
    -            st_uid = UID     # read-write files are owned by this virtual user
    -            st_gid = GID
    -        st_atime = ATIME
    -        st_mtime = MTIME
    -        st_ctime = CTIME
    -        return os.stat_result(
    -            (st_mode, st_ino, st_dev, st_nlink, st_uid, st_gid,
    -             st_size, st_atime, st_mtime, st_ctime))
    -
    -    def access(self, mode):
    -        s = self.stat()
    -        e_mode = s.st_mode & stat.S_IRWXO
    -        if UID == s.st_uid:
    -            e_mode |= (s.st_mode & stat.S_IRWXU) >> 6
    -        if GID == s.st_gid:
    -            e_mode |= (s.st_mode & stat.S_IRWXG) >> 3
    -        return (e_mode & mode) == mode
    -
    -    def keys(self):
    -        raise OSError(errno.ENOTDIR, self)
    -
    -    def open(self):
    -        raise OSError(errno.EACCES, self)
    -
    -    def getsize(self):
    -        return 0
    -
    -
    -class Dir(FSObject):
    -    kind = stat.S_IFDIR
    -    def __init__(self, entries={}):
    -        self.entries = entries
    -    def keys(self):
    -        return self.entries.keys()
    -    def join(self, name):
    -        try:
    -            return self.entries[name]
    -        except KeyError:
    -            raise OSError(errno.ENOENT, name)
    -
    -class RealDir(Dir):
    -    # If show_dotfiles=False, we pretend that all files whose name starts
    -    # with '.' simply don't exist.  If follow_links=True, then symlinks are
    -    # transparently followed (they look like a regular file or directory to
    -    # the sandboxed process).  If follow_links=False, the subprocess is
    -    # not allowed to access them at all.  Finally, exclude is a list of
    -    # file endings that we filter out (note that we also filter out files
    -    # with the same ending but a different case, to be safe).
    -    def __init__(self, path, show_dotfiles=False, follow_links=False,
    -                 exclude=[]):
    -        self.path = path
    -        self.show_dotfiles = show_dotfiles
    -        self.follow_links  = follow_links
    -        self.exclude       = [excl.lower() for excl in exclude]
    -    def __repr__(self):
    -        return '' % (self.path,)
    -    def keys(self):
    -        names = os.listdir(self.path)
    -        if not self.show_dotfiles:
    -            names = [name for name in names if not name.startswith('.')]
    -        for excl in self.exclude:
    -            names = [name for name in names if not name.lower().endswith(excl)]
    -        return names
    -    def join(self, name):
    -        if name.startswith('.') and not self.show_dotfiles:
    -            raise OSError(errno.ENOENT, name)
    -        for excl in self.exclude:
    -            if name.lower().endswith(excl):
    -                raise OSError(errno.ENOENT, name)
    -        path = os.path.join(self.path, name)
    -        if self.follow_links:
    -            st = os.stat(path)
    -        else:
    -            st = os.lstat(path)
    -        if stat.S_ISDIR(st.st_mode):
    -            return RealDir(path, show_dotfiles = self.show_dotfiles,
    -                                 follow_links  = self.follow_links,
    -                                 exclude       = self.exclude)
    -        elif stat.S_ISREG(st.st_mode):
    -            return RealFile(path)
    -        else:
    -            # don't allow access to symlinks and other special files
    -            raise OSError(errno.EACCES, path)
    -
    -class File(FSObject):
    -    kind = stat.S_IFREG
    -    def __init__(self, data=''):
    -        self.data = data
    -    def getsize(self):
    -        return len(self.data)
    -    def open(self):
    -        import cStringIO
    -        return cStringIO.StringIO(self.data)
    -
    -class RealFile(File):
    -    def __init__(self, path, mode=0):
    -        self.path = path
    -        self.kind |= mode
    -    def __repr__(self):
    -        return '' % (self.path,)
    -    def getsize(self):
    -        return os.stat(self.path).st_size
    -    def open(self):
    -        try:
    -            return open(self.path, "rb")
    -        except IOError as e:
    -            raise OSError(e.errno, "open failed")
    
    From pypy.commits at gmail.com  Tue Aug 27 02:43:09 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Mon, 26 Aug 2019 23:43:09 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: in-progress
    Message-ID: <5d64d0fd.1c69fb81.57a05.abb0@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97289:f4bf2fbd0c19
    Date: 2019-08-27 08:42 +0200
    http://bitbucket.org/pypy/pypy/changeset/f4bf2fbd0c19/
    
    Log:	in-progress
    
    diff --git a/pypy/module/__pypy__/interp_time.py b/pypy/module/__pypy__/interp_time.py
    --- a/pypy/module/__pypy__/interp_time.py
    +++ b/pypy/module/__pypy__/interp_time.py
    @@ -6,10 +6,12 @@
     from rpython.rtyper.lltypesystem import rffi, lltype
     from rpython.rlib import rtime
     from rpython.rlib.rtime import HAS_CLOCK_GETTIME
    +from rpython.rlib.objectmodel import sandbox_review
     
     
     if HAS_CLOCK_GETTIME:
     
    +    @sandbox_review(reviewed=True)
         @unwrap_spec(clk_id="c_int")
         def clock_gettime(space, clk_id):
             with lltype.scoped_alloc(rtime.TIMESPEC) as tp:
    @@ -20,6 +22,7 @@
                      float(rffi.getintfield(tp, 'c_tv_nsec')) * 0.000000001)
             return space.newfloat(t)
     
    +    @sandbox_review(reviewed=True)
         @unwrap_spec(clk_id="c_int")
         def clock_getres(space, clk_id):
             with lltype.scoped_alloc(rtime.TIMESPEC) as tp:
    diff --git a/pypy/module/_file/readinto.py b/pypy/module/_file/readinto.py
    --- a/pypy/module/_file/readinto.py
    +++ b/pypy/module/_file/readinto.py
    @@ -22,7 +22,7 @@
         fd = -1
         target_pos = 0
     
    -    if size > 64:
    +    if size > 64 and not self.space.config.translation.sandbox:
             try:
                 target_address = rwbuffer.get_raw_address()
             except ValueError:
    @@ -48,6 +48,13 @@
         else:
             # optimized case: reading more than 64 bytes into a rwbuffer
             # with a valid raw address
    +
    +        # XXX note that this is not fully safe, because we don't "lock"
    +        # the buffer so we can't in theory pass its raw address to c_read().
    +        # Another thread could cause it to be freed in parallel.
    +        # Without proper buffer locking, it's not going to be fixed, though.
    +        assert not self.space.config.translation.sandbox
    +
             self.check_readable()
     
             # first "read" the part that is already sitting in buffers, if any
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -495,6 +495,7 @@
     
     #___________________________________________________________________
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('open')
     @specialize.argtype(0)
     @enforceargs(NOT_CONSTANT, int, int, typecheck=False)
    @@ -514,6 +515,7 @@
     c_close = external(UNDERSCORE_ON_WIN32 + 'close', [rffi.INT], rffi.INT,
                        releasegil=False, save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('read')
     @signature(types.int(), types.int(), returns=types.any())
     def read(fd, count):
    @@ -525,6 +527,7 @@
                 got = handle_posix_error('read', c_read(fd, void_buf, count))
                 return buf.str(got)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('write')
     @signature(types.int(), types.any(), returns=types.any())
     def write(fd, data):
    @@ -711,6 +714,7 @@
         with FdValidator(fd):
             handle_posix_error('fchdir', c_fchdir(fd))
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('access')
     @specialize.argtype(0)
     def access(path, mode):
    @@ -753,6 +757,7 @@
                          [rffi.CWCHARP, rffi.SIZE_T], rffi.CWCHARP,
                          save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getcwd')
     def getcwd():
         bufsize = 256
    @@ -773,6 +778,7 @@
         lltype.free(buf, flavor='raw')
         return result
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getcwdu')
     def getcwdu():
         bufsize = 256
    diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py
    --- a/rpython/rlib/rposix_environ.py
    +++ b/rpython/rlib/rposix_environ.py
    @@ -2,7 +2,7 @@
     import sys
     from rpython.annotator import model as annmodel
     from rpython.rlib._os_support import _WIN32, StringTraits, UnicodeTraits
    -from rpython.rlib.objectmodel import enforceargs
    +from rpython.rlib.objectmodel import enforceargs, sandbox_review
     # importing rposix here creates a cycle on Windows
     from rpython.rtyper.controllerentry import Controller
     from rpython.rtyper.lltypesystem import rffi, lltype
    @@ -148,6 +148,7 @@
             byname, eq = envkeepalive.bywname, u'='
             from rpython.rlib.rwin32 import lastSavedWindowsError as last_error
     
    +    @sandbox_review(reviewed=True)
         def envitems_llimpl():
             environ = get_environ()
             result = []
    @@ -162,6 +163,7 @@
                 i += 1
             return result
     
    +    @sandbox_review(reviewed=True)
         def getenv_llimpl(name):
             with traits.scoped_str2charp(name) as l_name:
                 l_result = getenv(l_name)
    diff --git a/rpython/rlib/rposix_stat.py b/rpython/rlib/rposix_stat.py
    --- a/rpython/rlib/rposix_stat.py
    +++ b/rpython/rlib/rposix_stat.py
    @@ -18,6 +18,7 @@
     
     from rpython.rlib._os_support import _preferred_traits, string_traits
     from rpython.rlib.objectmodel import specialize, we_are_translated, not_rpython
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rtyper.lltypesystem import lltype, rffi
     from rpython.translator.tool.cbuild import ExternalCompilationInfo
     from rpython.rlib.rarithmetic import intmask
    @@ -574,6 +575,7 @@
             finally:
                 lltype.free(info, flavor='raw')
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('stat')
     @specialize.argtype(0)
     def stat(path):
    @@ -587,6 +589,7 @@
             path = traits.as_str0(path)
             return win32_xstat(traits, path, traverse=True)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('lstat')
     @specialize.argtype(0)
     def lstat(path):
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -346,11 +346,15 @@
             wrapper = sandbox_review(abort=True)(wrapper)
         else:
             assert isinstance(sandboxsafe, bool)
    -        wrapper = sandbox_review(reviewed=True)(wrapper)
    +        if sandboxsafe or (all(_sandbox_type_safe(ARG) for ARG in args) and
    +                           _sandbox_type_safe(result)):
    +            wrapper = sandbox_review(reviewed=True)(wrapper)
    +        else:
    +            wrapper = sandbox_review(check_caller=True)(wrapper)
         return wrapper
     
    -def sandbox_check_type(TYPE):
    -    return not isinstance(TYPE, lltype.Primitive) or TYPE == llmemory.Address
    +def _sandbox_type_safe(TYPE):
    +    return isinstance(TYPE, lltype.Primitive) and TYPE != llmemory.Address
     
     
     class CallbackHolder:
    
    From pypy.commits at gmail.com  Tue Aug 27 03:27:25 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 00:27:25 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: More review of the posix modules
    Message-ID: <5d64db5d.1c69fb81.120c1.8350@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97290:76b447660dd6
    Date: 2019-08-27 09:26 +0200
    http://bitbucket.org/pypy/pypy/changeset/76b447660dd6/
    
    Log:	More review of the posix modules
    
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -395,12 +395,14 @@
                       save_err=rffi.RFFI_SAVE_ERRNO)
     c_open = external(UNDERSCORE_ON_WIN32 + 'open',
                       [rffi.CCHARP, rffi.INT, rffi.MODE_T], rffi.INT,
    -                  save_err=rffi.RFFI_SAVE_ERRNO)
    +                  save_err=rffi.RFFI_SAVE_ERRNO,
    +                  sandboxsafe="nowrite")
     
     # Win32 Unicode functions
     c_wopen = external(UNDERSCORE_ON_WIN32 + 'wopen',
                        [rffi.CWCHARP, rffi.INT, rffi.MODE_T], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO,
    +                   sandboxsafe="nowrite")
     
     #___________________________________________________________________
     # Wrappers around posix functions, that accept either strings, or
    @@ -495,7 +497,6 @@
     
     #___________________________________________________________________
     
    - at sandbox_review(reviewed=True)
     @replace_os_function('open')
     @specialize.argtype(0)
     @enforceargs(NOT_CONSTANT, int, int, typecheck=False)
    @@ -652,13 +653,13 @@
     #___________________________________________________________________
     
     c_chdir = external('chdir', [rffi.CCHARP], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_fchdir = external('fchdir', [rffi.INT], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO)
     c_access = external(UNDERSCORE_ON_WIN32 + 'access',
    -                    [rffi.CCHARP, rffi.INT], rffi.INT)
    +                    [rffi.CCHARP, rffi.INT], rffi.INT, sandboxsafe="nowrite")
     c_waccess = external(UNDERSCORE_ON_WIN32 + 'waccess',
    -                     [rffi.CWCHARP, rffi.INT], rffi.INT)
    +                     [rffi.CWCHARP, rffi.INT], rffi.INT, sandboxsafe="nowrite")
     
     @replace_os_function('chdir')
     @specialize.argtype(0)
    @@ -714,7 +715,6 @@
         with FdValidator(fd):
             handle_posix_error('fchdir', c_fchdir(fd))
     
    - at sandbox_review(reviewed=True)
     @replace_os_function('access')
     @specialize.argtype(0)
     def access(path, mode):
    @@ -817,9 +817,11 @@
         DIRENT = dirent_config['DIRENT']
         DIRENTP = lltype.Ptr(DIRENT)
         c_opendir = external('opendir',
    -        [rffi.CCHARP], DIRP, save_err=rffi.RFFI_SAVE_ERRNO)
    +        [rffi.CCHARP], DIRP, save_err=rffi.RFFI_SAVE_ERRNO,
    +        sandboxsafe="nowrite")
         c_fdopendir = external('fdopendir',
    -        [rffi.INT], DIRP, save_err=rffi.RFFI_SAVE_ERRNO)
    +        [rffi.INT], DIRP, save_err=rffi.RFFI_SAVE_ERRNO,
    +        sandboxsafe="nowrite")
         c_rewinddir = external('rewinddir',
             [DIRP], lltype.Void, releasegil=False)
         # XXX macro=True is hack to make sure we get the correct kind of
    @@ -834,6 +836,7 @@
     else:
         dirent_config = {}
     
    + at sandbox_review(reviewed=True)
     def _listdir(dirp, rewind=False):
         result = []
         while True:
    @@ -853,6 +856,7 @@
         return result
     
     if not _WIN32:
    +    @sandbox_review(reviewed=True)
         def fdlistdir(dirfd):
             """
             Like listdir(), except that the directory is specified as an open
    @@ -927,17 +931,17 @@
     #___________________________________________________________________
     
     c_execv = external('execv', [rffi.CCHARP, rffi.CCHARPP], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_execve = external('execve',
                         [rffi.CCHARP, rffi.CCHARPP, rffi.CCHARPP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_spawnv = external(UNDERSCORE_ON_WIN32 + 'spawnv',
                         [rffi.INT, rffi.CCHARP, rffi.CCHARPP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_spawnve = external(UNDERSCORE_ON_WIN32 + 'spawnve',
                         [rffi.INT, rffi.CCHARP, rffi.CCHARPP, rffi.CCHARPP],
                          rffi.INT,
    -                     save_err=rffi.RFFI_SAVE_ERRNO)
    +                     save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     @replace_os_function('execv')
     def execv(path, args):
    @@ -1116,6 +1120,7 @@
     c_getloadavg = external('getloadavg',
                             [rffi.CArrayPtr(lltype.Float), rffi.INT], rffi.INT)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getlogin')
     def getlogin():
         result = c_getlogin()
    @@ -1123,6 +1128,7 @@
             raise OSError(get_saved_errno(), "getlogin failed")
         return rffi.charp2str(result)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getloadavg')
     def getloadavg():
         load = lltype.malloc(rffi.CArrayPtr(lltype.Float).TO, 3, flavor='raw')
    @@ -1140,6 +1146,7 @@
                           [rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T], rffi.SSIZE_T,
                           save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('readlink')
     def readlink(path):
         path = _as_bytes0(path)
    @@ -1174,6 +1181,7 @@
                          releasegil=False,
                          save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('ttyname')
     def ttyname(fd):
         l_name = c_ttyname(fd)
    @@ -1184,6 +1192,7 @@
     c_strerror = external('strerror', [rffi.INT], rffi.CCHARP,
                           releasegil=False, sandboxsafe=True)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('strerror')
     def strerror(errnum):
         res = c_strerror(errnum)
    @@ -1191,20 +1200,20 @@
             raise ValueError("os_strerror failed")
         return rffi.charp2str(res)
     
    -c_system = external('system', [rffi.CCHARP], rffi.INT)
    +c_system = external('system', [rffi.CCHARP], rffi.INT, sandboxsafe="nowrite")
     
     @replace_os_function('system')
     def system(command):
         return widen(c_system(command))
     
     c_unlink = external('unlink', [rffi.CCHARP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_mkdir = external('mkdir', [rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_rmdir = external(UNDERSCORE_ON_WIN32 + 'rmdir', [rffi.CCHARP], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_wrmdir = external(UNDERSCORE_ON_WIN32 + 'wrmdir', [rffi.CWCHARP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     @replace_os_function('unlink')
     @specialize.argtype(0)
    @@ -1238,11 +1247,11 @@
             handle_posix_error('rmdir', c_rmdir(_as_bytes0(path)))
     
     c_chmod = external('chmod', [rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_fchmod = external('fchmod', [rffi.INT, rffi.MODE_T], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO,)
     c_rename = external('rename', [rffi.CCHARP, rffi.CCHARP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     @replace_os_function('chmod')
     @specialize.argtype(0)
    @@ -1299,10 +1308,11 @@
     #___________________________________________________________________
     
     c_mkfifo = external('mkfifo', [rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_mknod = external('mknod', [rffi.CCHARP, rffi.MODE_T, rffi.INT], rffi.INT,
     #                                           # xxx: actually ^^^ dev_t
    -                   macro=_MACRO_ON_POSIX, save_err=rffi.RFFI_SAVE_ERRNO)
    +                   macro=_MACRO_ON_POSIX, save_err=rffi.RFFI_SAVE_ERRNO,
    +                   sandboxsafe="nowrite")
     
     @replace_os_function('mkfifo')
     @specialize.argtype(0)
    @@ -1391,9 +1401,9 @@
             lltype.free(filedes, flavor='raw')
     
     c_link = external('link', [rffi.CCHARP, rffi.CCHARP], rffi.INT,
    -                  save_err=rffi.RFFI_SAVE_ERRNO,)
    +                  save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_symlink = external('symlink', [rffi.CCHARP, rffi.CCHARP], rffi.INT,
    -                     save_err=rffi.RFFI_SAVE_ERRNO)
    +                     save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     #___________________________________________________________________
     
    @@ -1426,9 +1436,9 @@
         return widen(c_umask(newmask))
     
     c_chown = external('chown', [rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_lchown = external('lchown', [rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_fchown = external('fchown', [rffi.INT, rffi.INT, rffi.INT], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO)
     
    @@ -1686,12 +1696,14 @@
     
     c_ctermid = external('ctermid', [rffi.CCHARP], rffi.CCHARP)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('ctermid')
     def ctermid():
         return rffi.charp2str(c_ctermid(lltype.nullptr(rffi.CCHARP.TO)))
     
     c_tmpnam = external('tmpnam', [rffi.CCHARP], rffi.CCHARP)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('tmpnam')
     def tmpnam():
         return rffi.charp2str(c_tmpnam(lltype.nullptr(rffi.CCHARP.TO)))
    @@ -1743,8 +1755,10 @@
         c_setgroups = external('setgroups', [rffi.SIZE_T, GID_GROUPS_T], rffi.INT,
                                save_err=rffi.RFFI_SAVE_ERRNO)
         c_initgroups = external('initgroups', [rffi.CCHARP, GID_T], rffi.INT,
    -                            save_err=rffi.RFFI_SAVE_ERRNO)
    +                            save_err=rffi.RFFI_SAVE_ERRNO,
    +                            sandboxsafe="nowrite")
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getgroups')
     def getgroups():
         n = handle_posix_error('getgroups',
    @@ -1962,6 +1976,7 @@
     c_chroot = external('chroot', [rffi.CCHARP], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO,
                         macro=_MACRO_ON_POSIX,
    +                    sandboxsafe="nowrite",
                         compilation_info=ExternalCompilationInfo(includes=['unistd.h']))
     
     @replace_os_function('chroot')
    @@ -1987,6 +2002,7 @@
                            compilation_info=CConfig._compilation_info_,
                            save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('uname')
     def uname():
         l_utsbuf = lltype.malloc(UTSNAMEP.TO, flavor='raw')
    @@ -2030,7 +2046,8 @@
     c_fpathconf = external('fpathconf', [rffi.INT, rffi.INT], rffi.LONG,
                            save_err=rffi.RFFI_FULL_ERRNO_ZERO)
     c_pathconf = external('pathconf', [rffi.CCHARP, rffi.INT], rffi.LONG,
    -                      save_err=rffi.RFFI_FULL_ERRNO_ZERO)
    +                      save_err=rffi.RFFI_FULL_ERRNO_ZERO,
    +                      sandboxsafe="nowrite")
     c_confstr = external('confstr',
                          [rffi.INT, rffi.CCHARP, rffi.SIZE_T], rffi.SIZE_T,
                           save_err=rffi.RFFI_FULL_ERRNO_ZERO)
    @@ -2062,6 +2079,7 @@
                 raise OSError(errno, "pathconf failed")
         return res
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('confstr')
     def confstr(value):
         n = intmask(c_confstr(value, lltype.nullptr(rffi.CCHARP.TO), 0))
    @@ -2135,7 +2153,8 @@
     
     if HAVE_FACCESSAT:
         c_faccessat = external('faccessat',
    -        [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT)
    +        [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    +        sandboxsafe="nowrite")
     
         def faccessat(pathname, mode, dir_fd=AT_FDCWD,
                 effective_ids=False, follow_symlinks=True):
    @@ -2153,7 +2172,7 @@
     if HAVE_FCHMODAT:
         c_fchmodat = external('fchmodat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO,)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def fchmodat(path, mode, dir_fd=AT_FDCWD, follow_symlinks=True):
             if follow_symlinks:
    @@ -2166,7 +2185,7 @@
     if HAVE_FCHOWNAT:
         c_fchownat = external('fchownat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO,)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def fchownat(path, owner, group, dir_fd=AT_FDCWD,
                 follow_symlinks=True, empty_path=False):
    @@ -2181,7 +2200,7 @@
     if HAVE_FEXECVE:
         c_fexecve = external('fexecve',
             [rffi.INT, rffi.CCHARPP, rffi.CCHARPP], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def fexecve(fd, args, env):
             envstrs = []
    @@ -2202,7 +2221,7 @@
         c_linkat = external(
             'linkat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.CCHARP, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def linkat(src, dst, src_dir_fd=AT_FDCWD, dst_dir_fd=AT_FDCWD,
                 follow_symlinks=True):
    @@ -2296,7 +2315,7 @@
     if HAVE_MKDIRAT:
         c_mkdirat = external('mkdirat',
             [rffi.INT, rffi.CCHARP, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def mkdirat(pathname, mode, dir_fd=AT_FDCWD):
             error = c_mkdirat(dir_fd, pathname, mode)
    @@ -2305,7 +2324,7 @@
     if HAVE_UNLINKAT:
         c_unlinkat = external('unlinkat',
             [rffi.INT, rffi.CCHARP, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def unlinkat(pathname, dir_fd=AT_FDCWD, removedir=False):
             flag = AT_REMOVEDIR if removedir else 0
    @@ -2343,7 +2362,7 @@
         c_renameat = external(
             'renameat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.CCHARP], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def renameat(src, dst, src_dir_fd=AT_FDCWD, dst_dir_fd=AT_FDCWD):
             error = c_renameat(src_dir_fd, src, dst_dir_fd, dst)
    @@ -2353,7 +2372,7 @@
     if HAVE_SYMLINKAT:
         c_symlinkat = external('symlinkat',
             [rffi.CCHARP, rffi.INT, rffi.CCHARP], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def symlinkat(src, dst, dir_fd=AT_FDCWD):
             error = c_symlinkat(src, dir_fd, dst)
    @@ -2362,7 +2381,7 @@
     if HAVE_OPENAT:
         c_openat = external('openat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.MODE_T], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         @enforceargs(s_Str0, int, int, int, typecheck=False)
         def openat(path, flags, mode, dir_fd=AT_FDCWD):
    @@ -2372,7 +2391,7 @@
     if HAVE_MKFIFOAT:
         c_mkfifoat = external('mkfifoat',
             [rffi.INT, rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def mkfifoat(path, mode, dir_fd=AT_FDCWD):
             error = c_mkfifoat(dir_fd, path, mode)
    @@ -2381,7 +2400,7 @@
     if HAVE_MKNODAT:
         c_mknodat = external('mknodat',
             [rffi.INT, rffi.CCHARP, rffi.MODE_T, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def mknodat(path, mode, device, dir_fd=AT_FDCWD):
             error = c_mknodat(dir_fd, path, mode, device)
    @@ -2693,29 +2712,29 @@
             [rffi.INT, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T, rffi.INT],
             rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_setxattr = external('setxattr',
             [rffi.CCHARP, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T, rffi.INT],
             rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_lsetxattr = external('lsetxattr',
             [rffi.CCHARP, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T, rffi.INT],
             rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_fremovexattr = external('fremovexattr',
             [rffi.INT, rffi.CCHARP], rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_removexattr = external('removexattr',
             [rffi.CCHARP, rffi.CCHARP], rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_lremovexattr = external('lremovexattr',
             [rffi.CCHARP, rffi.CCHARP], rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_flistxattr = external('flistxattr',
             [rffi.INT, rffi.CCHARP, rffi.SIZE_T], rffi.SSIZE_T,
             compilation_info=CConfig._compilation_info_,
    @@ -2730,6 +2749,7 @@
             save_err=rffi.RFFI_SAVE_ERRNO)
         buf_sizes = [256, XATTR_SIZE_MAX]
     
    +    @sandbox_review(reviewed=True)
         def fgetxattr(fd, name):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    @@ -2744,6 +2764,7 @@
             else:
                 raise OSError(errno.ERANGE, 'fgetxattr failed')
     
    +    @sandbox_review(reviewed=True)
         def getxattr(path, name, follow_symlinks=True):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    @@ -2789,6 +2810,7 @@
             del result[-1]
             return result
     
    +    @sandbox_review(reviewed=True)
         def flistxattr(fd):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    @@ -2802,6 +2824,7 @@
             else:
                 raise OSError(errno.ERANGE, 'flistxattr failed')
     
    +    @sandbox_review(reviewed=True)
         def listxattr(path, follow_symlinks=True):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py
    --- a/rpython/rlib/rposix_environ.py
    +++ b/rpython/rlib/rposix_environ.py
    @@ -198,6 +198,7 @@
         os_unsetenv = llexternal('unsetenv', [rffi.CCHARP], rffi.INT,
                                       save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def r_unsetenv(name):
             with rffi.scoped_str2charp(name) as l_name:
                 error = rffi.cast(lltype.Signed, os_unsetenv(l_name))
    diff --git a/rpython/rlib/rposix_stat.py b/rpython/rlib/rposix_stat.py
    --- a/rpython/rlib/rposix_stat.py
    +++ b/rpython/rlib/rposix_stat.py
    @@ -535,6 +535,7 @@
                                   compilation_info=compilation_info,
                                   save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('fstat')
     def fstat(fd):
         if not _WIN32:
    @@ -642,12 +643,14 @@
                 handle_posix_error('fstatat', error)
                 return build_stat_result(stresult)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('fstatvfs')
     def fstatvfs(fd):
         with lltype.scoped_alloc(STATVFS_STRUCT.TO) as stresult:
             handle_posix_error('fstatvfs', c_fstatvfs(fd, stresult))
             return build_statvfs_result(stresult)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('statvfs')
     @specialize.argtype(0)
     def statvfs(path):
    diff --git a/rpython/rlib/rtime.py b/rpython/rlib/rtime.py
    --- a/rpython/rlib/rtime.py
    +++ b/rpython/rlib/rtime.py
    @@ -236,6 +236,7 @@
             diff = a[0] - state.counter_start
         return float(diff) / state.divisor
     
    + at sandbox_review(reviewed=True)
     @replace_time_function('clock')
     def clock():
         if _WIN32:
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -102,7 +102,13 @@
                      is sandboxed.  If False, it will turn into a stdin/stdout
                      communication with the parent process.  If "check_caller",
                      it is like True but we call @sandbox_review(check_caller=True)
    -                 which means that we need to also check the callers.
    +                 which means that we need to also check the callers.  If
    +                 "nowrite", we don't need to check the callers.  The default
    +                 of False either implies "check_caller" or "nowrite"
    +                 depending on whether the function takes and returns pointer
    +                 arguments or not.  Use "nowrite" only if the external
    +                 function call will only *read* from 'char *' or other data
    +                 structures passed in.
     
         calling_conv: if 'unknown' or 'win', the C function is not directly seen
                       by the JIT.  If 'c', it can be seen (depending on
    @@ -344,6 +350,8 @@
             wrapper = sandbox_review(check_caller=True)(wrapper)
         elif sandboxsafe == 'abort':
             wrapper = sandbox_review(abort=True)(wrapper)
    +    elif sandboxsafe == 'nowrite':
    +        wrapper = sandbox_review(reviewed=True)(wrapper)
         else:
             assert isinstance(sandboxsafe, bool)
             if sandboxsafe or (all(_sandbox_type_safe(ARG) for ARG in args) and
    
    From pypy.commits at gmail.com  Tue Aug 27 03:36:20 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 00:36:20 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: More fixes
    Message-ID: <5d64dd74.1c69fb81.cb9b1.701a@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97291:bf7d11fce803
    Date: 2019-08-27 09:35 +0200
    http://bitbucket.org/pypy/pypy/changeset/bf7d11fce803/
    
    Log:	More fixes
    
    diff --git a/pypy/module/_io/interp_fileio.py b/pypy/module/_io/interp_fileio.py
    --- a/pypy/module/_io/interp_fileio.py
    +++ b/pypy/module/_io/interp_fileio.py
    @@ -374,7 +374,7 @@
             length = rwbuffer.getlength()
     
             target_address = lltype.nullptr(rffi.CCHARP.TO)
    -        if length > 64:
    +        if length > 64 and not space.config.translation.sandbox:
                 try:
                     target_address = rwbuffer.get_raw_address()
                 except ValueError:
    @@ -394,6 +394,13 @@
             else:
                 # optimized case: reading more than 64 bytes into a rwbuffer
                 # with a valid raw address
    +
    +            # XXX note that this is not fully safe, because we don't "lock"
    +            # the buffer so we can't in theory pass its raw address to c_read().
    +            # Another thread could cause it to be freed in parallel.
    +            # Without proper buffer locking, it's not going to be fixed, though.
    +            assert not space.config.translation.sandbox
    +
                 got = c_read(self.fd, target_address, length)
                 keepalive_until_here(rwbuffer)
                 got = rffi.cast(lltype.Signed, got)
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -1016,6 +1016,7 @@
             debug.debug_forked(ofs)
         return childpid
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('openpty')
     @jit.dont_look_inside
     def openpty():
    @@ -1353,6 +1354,7 @@
             c_pipe2 = external('pipe2', [INT_ARRAY_P, rffi.INT], rffi.INT,
                               save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('pipe')
     def pipe(flags=0):
         # 'flags' might be ignored.  Check the result.
    @@ -1389,6 +1391,7 @@
             finally:
                 lltype.free(filedes, flavor='raw')
     
    + at sandbox_review(reviewed=True)
     def pipe2(flags):
         # Only available if there is really a c_pipe2 function.
         # No fallback to pipe() if we get ENOSYS.
    @@ -1906,6 +1909,7 @@
         c_setresgid = external('setresgid', [GID_T] * 3, rffi.INT,
                                save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         @replace_os_function('getresuid')
         def getresuid():
             out = lltype.malloc(UID_T_P.TO, 3, flavor='raw')
    @@ -1918,6 +1922,7 @@
             finally:
                 lltype.free(out, flavor='raw')
     
    +    @sandbox_review(reviewed=True)
         @replace_os_function('getresgid')
         def getresgid():
             out = lltype.malloc(GID_T_P.TO, 3, flavor='raw')
    diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py
    --- a/rpython/rlib/rposix_environ.py
    +++ b/rpython/rlib/rposix_environ.py
    @@ -169,6 +169,7 @@
                 l_result = getenv(l_name)
                 return traits.charp2str(l_result) if l_result else None
     
    +    @sandbox_review(reviewed=True)
         def putenv_llimpl(name, value):
             l_string = traits.str2charp(name + eq + value)
             error = rffi.cast(lltype.Signed, putenv(l_string))
    
    From pypy.commits at gmail.com  Tue Aug 27 03:39:44 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Tue, 27 Aug 2019 00:39:44 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: remove test files that no longer exist in
     cpython 3.6.9
    Message-ID: <5d64de40.1c69fb81.54359.857a@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97292:81a9ec3c2935
    Date: 2019-08-27 10:38 +0300
    http://bitbucket.org/pypy/pypy/changeset/81a9ec3c2935/
    
    Log:	remove test files that no longer exist in cpython 3.6.9
    
    diff too long, truncating to 2000 out of 11624 lines
    
    diff --git a/lib-python/3/ensurepip/_bundled/pip-8.1.2-py2.py3-none-any.whl b/lib-python/3/ensurepip/_bundled/pip-8.1.2-py2.py3-none-any.whl
    deleted file mode 100644
    index cc49227a0c7e13757f4863a9b7ace1eb56c3ce61..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
    GIT binary patch
    
    [cut]
    
    diff --git a/lib-python/3/ensurepip/_bundled/setuptools-21.2.1-py2.py3-none-any.whl b/lib-python/3/ensurepip/_bundled/setuptools-21.2.1-py2.py3-none-any.whl
    deleted file mode 100644
    index fe36464f79ba87960c33f3bdff817deb9e4e5f7c..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
    GIT binary patch
    
    [cut]
    
    diff --git a/lib-python/3/plat-aix4/IN.py b/lib-python/3/plat-aix4/IN.py
    deleted file mode 100644
    --- a/lib-python/3/plat-aix4/IN.py
    +++ /dev/null
    @@ -1,165 +0,0 @@
    -# Generated by h2py from /usr/include/netinet/in.h
    -
    -# Included from net/nh.h
    -
    -# Included from sys/machine.h
    -LITTLE_ENDIAN = 1234
    -BIG_ENDIAN = 4321
    -PDP_ENDIAN = 3412
    -BYTE_ORDER = BIG_ENDIAN
    -DEFAULT_GPR = 0xDEADBEEF
    -MSR_EE = 0x8000
    -MSR_PR = 0x4000
    -MSR_FP = 0x2000
    -MSR_ME = 0x1000
    -MSR_FE = 0x0800
    -MSR_FE0 = 0x0800
    -MSR_SE = 0x0400
    -MSR_BE = 0x0200
    -MSR_IE = 0x0100
    -MSR_FE1 = 0x0100
    -MSR_AL = 0x0080
    -MSR_IP = 0x0040
    -MSR_IR = 0x0020
    -MSR_DR = 0x0010
    -MSR_PM = 0x0004
    -DEFAULT_MSR = (MSR_EE | MSR_ME | MSR_AL | MSR_IR | MSR_DR)
    -DEFAULT_USER_MSR = (DEFAULT_MSR | MSR_PR)
    -CR_LT = 0x80000000
    -CR_GT = 0x40000000
    -CR_EQ = 0x20000000
    -CR_SO = 0x10000000
    -CR_FX = 0x08000000
    -CR_FEX = 0x04000000
    -CR_VX = 0x02000000
    -CR_OX = 0x01000000
    -XER_SO = 0x80000000
    -XER_OV = 0x40000000
    -XER_CA = 0x20000000
    -def XER_COMP_BYTE(xer): return ((xer >> 8) & 0x000000FF)
    -
    -def XER_LENGTH(xer): return (xer & 0x0000007F)
    -
    -DSISR_IO = 0x80000000
    -DSISR_PFT = 0x40000000
    -DSISR_LOCK = 0x20000000
    -DSISR_FPIO = 0x10000000
    -DSISR_PROT = 0x08000000
    -DSISR_LOOP = 0x04000000
    -DSISR_DRST = 0x04000000
    -DSISR_ST = 0x02000000
    -DSISR_SEGB = 0x01000000
    -DSISR_DABR = 0x00400000
    -DSISR_EAR = 0x00100000
    -SRR_IS_PFT = 0x40000000
    -SRR_IS_ISPEC = 0x20000000
    -SRR_IS_IIO = 0x10000000
    -SRR_IS_GUARD = 0x10000000
    -SRR_IS_PROT = 0x08000000
    -SRR_IS_LOOP = 0x04000000
    -SRR_PR_FPEN = 0x00100000
    -SRR_PR_INVAL = 0x00080000
    -SRR_PR_PRIV = 0x00040000
    -SRR_PR_TRAP = 0x00020000
    -SRR_PR_IMPRE = 0x00010000
    -def BUID_7F_SRVAL(raddr): return (0x87F00000 | (((uint)(raddr)) >> 28))
    -
    -BT_256M = 0x1FFC
    -BT_128M = 0x0FFC
    -BT_64M = 0x07FC
    -BT_32M = 0x03FC
    -BT_16M = 0x01FC
    -BT_8M = 0x00FC
    -BT_4M = 0x007C
    -BT_2M = 0x003C
    -BT_1M = 0x001C
    -BT_512K = 0x000C
    -BT_256K = 0x0004
    -BT_128K = 0x0000
    -BT_NOACCESS = 0x0
    -BT_RDONLY = 0x1
    -BT_WRITE = 0x2
    -BT_VS = 0x2
    -BT_VP = 0x1
    -def BAT_ESEG(dbatu): return (((uint)(dbatu) >> 28))
    -
    -MIN_BAT_SIZE = 0x00020000
    -MAX_BAT_SIZE = 0x10000000
    -def ntohl(x): return (x)
    -
    -def ntohs(x): return (x)
    -
    -def htonl(x): return (x)
    -
    -def htons(x): return (x)
    -
    -IPPROTO_IP = 0
    -IPPROTO_ICMP = 1
    -IPPROTO_IGMP = 2
    -IPPROTO_GGP = 3
    -IPPROTO_TCP = 6
    -IPPROTO_EGP = 8
    -IPPROTO_PUP = 12
    -IPPROTO_UDP = 17
    -IPPROTO_IDP = 22
    -IPPROTO_TP = 29
    -IPPROTO_LOCAL = 63
    -IPPROTO_EON = 80
    -IPPROTO_BIP = 0x53
    -IPPROTO_RAW = 255
    -IPPROTO_MAX = 256
    -IPPORT_RESERVED = 1024
    -IPPORT_USERRESERVED = 5000
    -IPPORT_TIMESERVER = 37
    -def IN_CLASSA(i): return (((int)(i) & 0x80000000) == 0)
    -
    -IN_CLASSA_NET = 0xff000000
    -IN_CLASSA_NSHIFT = 24
    -IN_CLASSA_HOST = 0x00ffffff
    -IN_CLASSA_MAX = 128
    -def IN_CLASSB(i): return (((int)(i) & 0xc0000000) == 0x80000000)
    -
    -IN_CLASSB_NET = 0xffff0000
    -IN_CLASSB_NSHIFT = 16
    -IN_CLASSB_HOST = 0x0000ffff
    -IN_CLASSB_MAX = 65536
    -def IN_CLASSC(i): return (((int)(i) & 0xe0000000) == 0xc0000000)
    -
    -IN_CLASSC_NET = 0xffffff00
    -IN_CLASSC_NSHIFT = 8
    -IN_CLASSC_HOST = 0x000000ff
    -def IN_CLASSD(i): return (((int)(i) & 0xf0000000) == 0xe0000000)
    -
    -def IN_MULTICAST(i): return IN_CLASSD(i)
    -
    -IN_CLASSD_NET = 0xf0000000
    -IN_CLASSD_NSHIFT = 28
    -IN_CLASSD_HOST = 0x0fffffff
    -INADDR_UNSPEC_GROUP = 0xe0000000
    -INADDR_ALLHOSTS_GROUP = 0xe0000001
    -INADDR_MAX_LOCAL_GROUP = 0xe00000ff
    -def IN_EXPERIMENTAL(i): return (((int)(i) & 0xe0000000) == 0xe0000000)
    -
    -def IN_BADCLASS(i): return (((int)(i) & 0xf0000000) == 0xf0000000)
    -
    -INADDR_ANY = 0x00000000
    -INADDR_BROADCAST = 0xffffffff
    -INADDR_LOOPBACK = 0x7f000001
    -INADDR_NONE = 0xffffffff
    -IN_LOOPBACKNET = 127
    -IP_OPTIONS = 1
    -IP_HDRINCL = 2
    -IP_TOS = 3
    -IP_TTL = 4
    -IP_RECVOPTS = 5
    -IP_RECVRETOPTS = 6
    -IP_RECVDSTADDR = 7
    -IP_RETOPTS = 8
    -IP_MULTICAST_IF = 9
    -IP_MULTICAST_TTL = 10
    -IP_MULTICAST_LOOP = 11
    -IP_ADD_MEMBERSHIP = 12
    -IP_DROP_MEMBERSHIP = 13
    -IP_DEFAULT_MULTICAST_TTL = 1
    -IP_DEFAULT_MULTICAST_LOOP = 1
    -IP_MAX_MEMBERSHIPS = 20
    diff --git a/lib-python/3/plat-aix4/regen b/lib-python/3/plat-aix4/regen
    deleted file mode 100755
    --- a/lib-python/3/plat-aix4/regen
    +++ /dev/null
    @@ -1,8 +0,0 @@
    -#! /bin/sh
    -case `uname -sv` in
    -'AIX 4'*)  ;;
    -*)      echo Probably not on an AIX 4 system 1>&2
    -        exit 1;;
    -esac
    -set -v
    -h2py.py -i '(u_long)' /usr/include/netinet/in.h
    diff --git a/lib-python/3/plat-darwin/IN.py b/lib-python/3/plat-darwin/IN.py
    deleted file mode 100644
    --- a/lib-python/3/plat-darwin/IN.py
    +++ /dev/null
    @@ -1,662 +0,0 @@
    -# Generated by h2py from /usr/include/netinet/in.h
    -
    -# Included from sys/appleapiopts.h
    -
    -# Included from sys/_types.h
    -
    -# Included from sys/cdefs.h
    -def __P(protos): return protos
    -
    -def __STRING(x): return #x
    -
    -def __P(protos): return ()
    -
    -def __STRING(x): return "x"
    -
    -def __attribute__(x): return
    -
    -def __COPYRIGHT(s): return __IDSTRING(copyright,s)
    -
    -def __RCSID(s): return __IDSTRING(rcsid,s)
    -
    -def __SCCSID(s): return __IDSTRING(sccsid,s)
    -
    -def __PROJECT_VERSION(s): return __IDSTRING(project_version,s)
    -
    -__DARWIN_UNIX03 = 1
    -__DARWIN_UNIX03 = 0
    -__DARWIN_UNIX03 = 0
    -__DARWIN_UNIX03 = 1
    -__DARWIN_64_BIT_INO_T = 1
    -__DARWIN_64_BIT_INO_T = 0
    -__DARWIN_64_BIT_INO_T = 0
    -__DARWIN_NON_CANCELABLE = 0
    -__DARWIN_VERS_1050 = 1
    -__DARWIN_VERS_1050 = 0
    -__DARWIN_SUF_UNIX03 = "$UNIX2003"
    -__DARWIN_SUF_UNIX03_SET = 1
    -__DARWIN_SUF_UNIX03_SET = 0
    -__DARWIN_SUF_64_BIT_INO_T = "$INODE64"
    -__DARWIN_SUF_NON_CANCELABLE = "$NOCANCEL"
    -__DARWIN_SUF_1050 = "$1050"
    -__DARWIN_SUF_UNIX03_SET = 0
    -__DARWIN_SUF_EXTSN = "$DARWIN_EXTSN"
    -__DARWIN_LONG_DOUBLE_IS_DOUBLE = 0
    -def __DARWIN_LDBL_COMPAT(x): return
    -
    -def __DARWIN_LDBL_COMPAT2(x): return
    -
    -__DARWIN_LONG_DOUBLE_IS_DOUBLE = 1
    -def __DARWIN_LDBL_COMPAT(x): return
    -
    -def __DARWIN_LDBL_COMPAT2(x): return
    -
    -__DARWIN_LONG_DOUBLE_IS_DOUBLE = 0
    -_DARWIN_FEATURE_LONG_DOUBLE_IS_DOUBLE = 1
    -_DARWIN_FEATURE_UNIX_CONFORMANCE = 3
    -_DARWIN_FEATURE_64_BIT_INODE = 1
    -
    -# Included from machine/_types.h
    -__PTHREAD_SIZE__ = 1168
    -__PTHREAD_ATTR_SIZE__ = 56
    -__PTHREAD_MUTEXATTR_SIZE__ = 8
    -__PTHREAD_MUTEX_SIZE__ = 56
    -__PTHREAD_CONDATTR_SIZE__ = 8
    -__PTHREAD_COND_SIZE__ = 40
    -__PTHREAD_ONCE_SIZE__ = 8
    -__PTHREAD_RWLOCK_SIZE__ = 192
    -__PTHREAD_RWLOCKATTR_SIZE__ = 16
    -__PTHREAD_SIZE__ = 596
    -__PTHREAD_ATTR_SIZE__ = 36
    -__PTHREAD_MUTEXATTR_SIZE__ = 8
    -__PTHREAD_MUTEX_SIZE__ = 40
    -__PTHREAD_CONDATTR_SIZE__ = 4
    -__PTHREAD_COND_SIZE__ = 24
    -__PTHREAD_ONCE_SIZE__ = 4
    -__PTHREAD_RWLOCK_SIZE__ = 124
    -__PTHREAD_RWLOCKATTR_SIZE__ = 12
    -__DARWIN_NULL = 0
    -
    -# Included from stdint.h
    -__WORDSIZE = 64
    -__WORDSIZE = 32
    -INT8_MAX = 127
    -INT16_MAX = 32767
    -INT32_MAX = 2147483647
    -INT8_MIN = -128
    -INT16_MIN = -32768
    -INT32_MIN = (-INT32_MAX-1)
    -UINT8_MAX = 255
    -UINT16_MAX = 65535
    -INT_LEAST8_MIN = INT8_MIN
    -INT_LEAST16_MIN = INT16_MIN
    -INT_LEAST32_MIN = INT32_MIN
    -INT_LEAST8_MAX = INT8_MAX
    -INT_LEAST16_MAX = INT16_MAX
    -INT_LEAST32_MAX = INT32_MAX
    -UINT_LEAST8_MAX = UINT8_MAX
    -UINT_LEAST16_MAX = UINT16_MAX
    -INT_FAST8_MIN = INT8_MIN
    -INT_FAST16_MIN = INT16_MIN
    -INT_FAST32_MIN = INT32_MIN
    -INT_FAST8_MAX = INT8_MAX
    -INT_FAST16_MAX = INT16_MAX
    -INT_FAST32_MAX = INT32_MAX
    -UINT_FAST8_MAX = UINT8_MAX
    -UINT_FAST16_MAX = UINT16_MAX
    -INTPTR_MIN = INT32_MIN
    -INTPTR_MAX = INT32_MAX
    -PTRDIFF_MIN = INT32_MIN
    -PTRDIFF_MAX = INT32_MAX
    -WCHAR_MAX = 0x7fffffff
    -WCHAR_MIN = 0
    -WCHAR_MIN = (-WCHAR_MAX-1)
    -WINT_MIN = INT32_MIN
    -WINT_MAX = INT32_MAX
    -SIG_ATOMIC_MIN = INT32_MIN
    -SIG_ATOMIC_MAX = INT32_MAX
    -def INT8_C(v): return (v)
    -
    -def INT16_C(v): return (v)
    -
    -def INT32_C(v): return (v)
    -
    -
    -# Included from sys/socket.h
    -
    -# Included from machine/_param.h
    -SOCK_STREAM = 1
    -SOCK_DGRAM = 2
    -SOCK_RAW = 3
    -SOCK_RDM = 4
    -SOCK_SEQPACKET = 5
    -SO_DEBUG = 0x0001
    -SO_ACCEPTCONN = 0x0002
    -SO_REUSEADDR = 0x0004
    -SO_KEEPALIVE = 0x0008
    -SO_DONTROUTE = 0x0010
    -SO_BROADCAST = 0x0020
    -SO_USELOOPBACK = 0x0040
    -SO_LINGER = 0x0080
    -SO_LINGER = 0x1080
    -SO_OOBINLINE = 0x0100
    -SO_REUSEPORT = 0x0200
    -SO_TIMESTAMP = 0x0400
    -SO_ACCEPTFILTER = 0x1000
    -SO_DONTTRUNC = 0x2000
    -SO_WANTMORE = 0x4000
    -SO_WANTOOBFLAG = 0x8000
    -SO_SNDBUF = 0x1001
    -SO_RCVBUF = 0x1002
    -SO_SNDLOWAT = 0x1003
    -SO_RCVLOWAT = 0x1004
    -SO_SNDTIMEO = 0x1005
    -SO_RCVTIMEO = 0x1006
    -SO_ERROR = 0x1007
    -SO_TYPE = 0x1008
    -SO_NREAD = 0x1020
    -SO_NKE = 0x1021
    -SO_NOSIGPIPE = 0x1022
    -SO_NOADDRERR = 0x1023
    -SO_NWRITE = 0x1024
    -SO_REUSESHAREUID = 0x1025
    -SO_NOTIFYCONFLICT = 0x1026
    -SO_LINGER_SEC = 0x1080
    -SO_RESTRICTIONS = 0x1081
    -SO_RESTRICT_DENYIN = 0x00000001
    -SO_RESTRICT_DENYOUT = 0x00000002
    -SO_RESTRICT_DENYSET = (-2147483648)
    -SO_LABEL = 0x1010
    -SO_PEERLABEL = 0x1011
    -SOL_SOCKET = 0xffff
    -AF_UNSPEC = 0
    -AF_UNIX = 1
    -AF_LOCAL = AF_UNIX
    -AF_INET = 2
    -AF_IMPLINK = 3
    -AF_PUP = 4
    -AF_CHAOS = 5
    -AF_NS = 6
    -AF_ISO = 7
    -AF_OSI = AF_ISO
    -AF_ECMA = 8
    -AF_DATAKIT = 9
    -AF_CCITT = 10
    -AF_SNA = 11
    -AF_DECnet = 12
    -AF_DLI = 13
    -AF_LAT = 14
    -AF_HYLINK = 15
    -AF_APPLETALK = 16
    -AF_ROUTE = 17
    -AF_LINK = 18
    -pseudo_AF_XTP = 19
    -AF_COIP = 20
    -AF_CNT = 21
    -pseudo_AF_RTIP = 22
    -AF_IPX = 23
    -AF_SIP = 24
    -pseudo_AF_PIP = 25
    -AF_NDRV = 27
    -AF_ISDN = 28
    -AF_E164 = AF_ISDN
    -pseudo_AF_KEY = 29
    -AF_INET6 = 30
    -AF_NATM = 31
    -AF_SYSTEM = 32
    -AF_NETBIOS = 33
    -AF_PPP = 34
    -AF_ATM = 30
    -pseudo_AF_HDRCMPLT = 35
    -AF_RESERVED_36 = 36
    -AF_NETGRAPH = 32
    -AF_MAX = 37
    -SOCK_MAXADDRLEN = 255
    -_SS_MAXSIZE = 128
    -PF_UNSPEC = AF_UNSPEC
    -PF_LOCAL = AF_LOCAL
    -PF_UNIX = PF_LOCAL
    -PF_INET = AF_INET
    -PF_IMPLINK = AF_IMPLINK
    -PF_PUP = AF_PUP
    -PF_CHAOS = AF_CHAOS
    -PF_NS = AF_NS
    -PF_ISO = AF_ISO
    -PF_OSI = AF_ISO
    -PF_ECMA = AF_ECMA
    -PF_DATAKIT = AF_DATAKIT
    -PF_CCITT = AF_CCITT
    -PF_SNA = AF_SNA
    -PF_DECnet = AF_DECnet
    -PF_DLI = AF_DLI
    -PF_LAT = AF_LAT
    -PF_HYLINK = AF_HYLINK
    -PF_APPLETALK = AF_APPLETALK
    -PF_ROUTE = AF_ROUTE
    -PF_LINK = AF_LINK
    -PF_XTP = pseudo_AF_XTP
    -PF_COIP = AF_COIP
    -PF_CNT = AF_CNT
    -PF_SIP = AF_SIP
    -PF_IPX = AF_IPX
    -PF_RTIP = pseudo_AF_RTIP
    -PF_PIP = pseudo_AF_PIP
    -PF_NDRV = AF_NDRV
    -PF_ISDN = AF_ISDN
    -PF_KEY = pseudo_AF_KEY
    -PF_INET6 = AF_INET6
    -PF_NATM = AF_NATM
    -PF_SYSTEM = AF_SYSTEM
    -PF_NETBIOS = AF_NETBIOS
    -PF_PPP = AF_PPP
    -PF_RESERVED_36 = AF_RESERVED_36
    -PF_ATM = AF_ATM
    -PF_NETGRAPH = AF_NETGRAPH
    -PF_MAX = AF_MAX
    -NET_MAXID = AF_MAX
    -NET_RT_DUMP = 1
    -NET_RT_FLAGS = 2
    -NET_RT_IFLIST = 3
    -NET_RT_STAT = 4
    -NET_RT_TRASH = 5
    -NET_RT_IFLIST2 = 6
    -NET_RT_DUMP2 = 7
    -NET_RT_MAXID = 8
    -SOMAXCONN = 128
    -MSG_OOB = 0x1
    -MSG_PEEK = 0x2
    -MSG_DONTROUTE = 0x4
    -MSG_EOR = 0x8
    -MSG_TRUNC = 0x10
    -MSG_CTRUNC = 0x20
    -MSG_WAITALL = 0x40
    -MSG_DONTWAIT = 0x80
    -MSG_EOF = 0x100
    -MSG_WAITSTREAM = 0x200
    -MSG_FLUSH = 0x400
    -MSG_HOLD = 0x800
    -MSG_SEND = 0x1000
    -MSG_HAVEMORE = 0x2000
    -MSG_RCVMORE = 0x4000
    -MSG_NEEDSA = 0x10000
    -CMGROUP_MAX = 16
    -SCM_RIGHTS = 0x01
    -SCM_TIMESTAMP = 0x02
    -SCM_CREDS = 0x03
    -SHUT_RD = 0
    -SHUT_WR = 1
    -SHUT_RDWR = 2
    -
    -# Included from machine/endian.h
    -
    -# Included from sys/_endian.h
    -def ntohl(x): return (x)
    -
    -def ntohs(x): return (x)
    -
    -def htonl(x): return (x)
    -
    -def htons(x): return (x)
    -
    -def NTOHL(x): return (x)
    -
    -def NTOHS(x): return (x)
    -
    -def HTONL(x): return (x)
    -
    -def HTONS(x): return (x)
    -
    -
    -# Included from libkern/_OSByteOrder.h
    -def __DARWIN_OSSwapConstInt16(x): return \
    -
    -def __DARWIN_OSSwapConstInt32(x): return \
    -
    -def __DARWIN_OSSwapConstInt64(x): return \
    -
    -
    -# Included from libkern/i386/_OSByteOrder.h
    -def __DARWIN_OSSwapInt16(x): return \
    -
    -def __DARWIN_OSSwapInt32(x): return \
    -
    -def __DARWIN_OSSwapInt64(x): return \
    -
    -def __DARWIN_OSSwapInt16(x): return _OSSwapInt16(x)
    -
    -def __DARWIN_OSSwapInt32(x): return _OSSwapInt32(x)
    -
    -def __DARWIN_OSSwapInt64(x): return _OSSwapInt64(x)
    -
    -def ntohs(x): return __DARWIN_OSSwapInt16(x)
    -
    -def htons(x): return __DARWIN_OSSwapInt16(x)
    -
    -def ntohl(x): return __DARWIN_OSSwapInt32(x)
    -
    -def htonl(x): return __DARWIN_OSSwapInt32(x)
    -
    -IPPROTO_IP = 0
    -IPPROTO_HOPOPTS = 0
    -IPPROTO_ICMP = 1
    -IPPROTO_IGMP = 2
    -IPPROTO_GGP = 3
    -IPPROTO_IPV4 = 4
    -IPPROTO_IPIP = IPPROTO_IPV4
    -IPPROTO_TCP = 6
    -IPPROTO_ST = 7
    -IPPROTO_EGP = 8
    -IPPROTO_PIGP = 9
    -IPPROTO_RCCMON = 10
    -IPPROTO_NVPII = 11
    -IPPROTO_PUP = 12
    -IPPROTO_ARGUS = 13
    -IPPROTO_EMCON = 14
    -IPPROTO_XNET = 15
    -IPPROTO_CHAOS = 16
    -IPPROTO_UDP = 17
    -IPPROTO_MUX = 18
    -IPPROTO_MEAS = 19
    -IPPROTO_HMP = 20
    -IPPROTO_PRM = 21
    -IPPROTO_IDP = 22
    -IPPROTO_TRUNK1 = 23
    -IPPROTO_TRUNK2 = 24
    -IPPROTO_LEAF1 = 25
    -IPPROTO_LEAF2 = 26
    -IPPROTO_RDP = 27
    -IPPROTO_IRTP = 28
    -IPPROTO_TP = 29
    -IPPROTO_BLT = 30
    -IPPROTO_NSP = 31
    -IPPROTO_INP = 32
    -IPPROTO_SEP = 33
    -IPPROTO_3PC = 34
    -IPPROTO_IDPR = 35
    -IPPROTO_XTP = 36
    -IPPROTO_DDP = 37
    -IPPROTO_CMTP = 38
    -IPPROTO_TPXX = 39
    -IPPROTO_IL = 40
    -IPPROTO_IPV6 = 41
    -IPPROTO_SDRP = 42
    -IPPROTO_ROUTING = 43
    -IPPROTO_FRAGMENT = 44
    -IPPROTO_IDRP = 45
    -IPPROTO_RSVP = 46
    -IPPROTO_GRE = 47
    -IPPROTO_MHRP = 48
    -IPPROTO_BHA = 49
    -IPPROTO_ESP = 50
    -IPPROTO_AH = 51
    -IPPROTO_INLSP = 52
    -IPPROTO_SWIPE = 53
    -IPPROTO_NHRP = 54
    -IPPROTO_ICMPV6 = 58
    -IPPROTO_NONE = 59
    -IPPROTO_DSTOPTS = 60
    -IPPROTO_AHIP = 61
    -IPPROTO_CFTP = 62
    -IPPROTO_HELLO = 63
    -IPPROTO_SATEXPAK = 64
    -IPPROTO_KRYPTOLAN = 65
    -IPPROTO_RVD = 66
    -IPPROTO_IPPC = 67
    -IPPROTO_ADFS = 68
    -IPPROTO_SATMON = 69
    -IPPROTO_VISA = 70
    -IPPROTO_IPCV = 71
    -IPPROTO_CPNX = 72
    -IPPROTO_CPHB = 73
    -IPPROTO_WSN = 74
    -IPPROTO_PVP = 75
    -IPPROTO_BRSATMON = 76
    -IPPROTO_ND = 77
    -IPPROTO_WBMON = 78
    -IPPROTO_WBEXPAK = 79
    -IPPROTO_EON = 80
    -IPPROTO_VMTP = 81
    -IPPROTO_SVMTP = 82
    -IPPROTO_VINES = 83
    -IPPROTO_TTP = 84
    -IPPROTO_IGP = 85
    -IPPROTO_DGP = 86
    -IPPROTO_TCF = 87
    -IPPROTO_IGRP = 88
    -IPPROTO_OSPFIGP = 89
    -IPPROTO_SRPC = 90
    -IPPROTO_LARP = 91
    -IPPROTO_MTP = 92
    -IPPROTO_AX25 = 93
    -IPPROTO_IPEIP = 94
    -IPPROTO_MICP = 95
    -IPPROTO_SCCSP = 96
    -IPPROTO_ETHERIP = 97
    -IPPROTO_ENCAP = 98
    -IPPROTO_APES = 99
    -IPPROTO_GMTP = 100
    -IPPROTO_IPCOMP = 108
    -IPPROTO_PIM = 103
    -IPPROTO_PGM = 113
    -IPPROTO_DIVERT = 254
    -IPPROTO_RAW = 255
    -IPPROTO_MAX = 256
    -IPPROTO_DONE = 257
    -__DARWIN_IPPORT_RESERVED = 1024
    -IPPORT_RESERVED = __DARWIN_IPPORT_RESERVED
    -IPPORT_USERRESERVED = 5000
    -IPPORT_HIFIRSTAUTO = 49152
    -IPPORT_HILASTAUTO = 65535
    -IPPORT_RESERVEDSTART = 600
    -def IN_CLASSA(i): return (((u_int32_t)(i) & (-2147483648)) == 0)
    -
    -IN_CLASSA_NET = (-16777216)
    -IN_CLASSA_NSHIFT = 24
    -IN_CLASSA_HOST = 0x00ffffff
    -IN_CLASSA_MAX = 128
    -def IN_CLASSB(i): return (((u_int32_t)(i) & (-1073741824)) == (-2147483648))
    -
    -IN_CLASSB_NET = (-65536)
    -IN_CLASSB_NSHIFT = 16
    -IN_CLASSB_HOST = 0x0000ffff
    -IN_CLASSB_MAX = 65536
    -def IN_CLASSC(i): return (((u_int32_t)(i) & (-536870912)) == (-1073741824))
    -
    -IN_CLASSC_NET = (-256)
    -IN_CLASSC_NSHIFT = 8
    -IN_CLASSC_HOST = 0x000000ff
    -def IN_CLASSD(i): return (((u_int32_t)(i) & (-268435456)) == (-536870912))
    -
    -IN_CLASSD_NET = (-268435456)
    -IN_CLASSD_NSHIFT = 28
    -IN_CLASSD_HOST = 0x0fffffff
    -def IN_MULTICAST(i): return IN_CLASSD(i)
    -
    -def IN_EXPERIMENTAL(i): return (((u_int32_t)(i) & (-268435456)) == (-268435456))
    -
    -def IN_BADCLASS(i): return (((u_int32_t)(i) & (-268435456)) == (-268435456))
    -
    -INADDR_NONE = (-1)
    -def IN_LINKLOCAL(i): return (((u_int32_t)(i) & IN_CLASSB_NET) == IN_LINKLOCALNETNUM)
    -
    -IN_LOOPBACKNET = 127
    -INET_ADDRSTRLEN = 16
    -IP_OPTIONS = 1
    -IP_HDRINCL = 2
    -IP_TOS = 3
    -IP_TTL = 4
    -IP_RECVOPTS = 5
    -IP_RECVRETOPTS = 6
    -IP_RECVDSTADDR = 7
    -IP_RETOPTS = 8
    -IP_MULTICAST_IF = 9
    -IP_MULTICAST_TTL = 10
    -IP_MULTICAST_LOOP = 11
    -IP_ADD_MEMBERSHIP = 12
    -IP_DROP_MEMBERSHIP = 13
    -IP_MULTICAST_VIF = 14
    -IP_RSVP_ON = 15
    -IP_RSVP_OFF = 16
    -IP_RSVP_VIF_ON = 17
    -IP_RSVP_VIF_OFF = 18
    -IP_PORTRANGE = 19
    -IP_RECVIF = 20
    -IP_IPSEC_POLICY = 21
    -IP_FAITH = 22
    -IP_STRIPHDR = 23
    -IP_RECVTTL = 24
    -IP_FW_ADD = 40
    -IP_FW_DEL = 41
    -IP_FW_FLUSH = 42
    -IP_FW_ZERO = 43
    -IP_FW_GET = 44
    -IP_FW_RESETLOG = 45
    -IP_OLD_FW_ADD = 50
    -IP_OLD_FW_DEL = 51
    -IP_OLD_FW_FLUSH = 52
    -IP_OLD_FW_ZERO = 53
    -IP_OLD_FW_GET = 54
    -IP_NAT__XXX = 55
    -IP_OLD_FW_RESETLOG = 56
    -IP_DUMMYNET_CONFIGURE = 60
    -IP_DUMMYNET_DEL = 61
    -IP_DUMMYNET_FLUSH = 62
    -IP_DUMMYNET_GET = 64
    -IP_TRAFFIC_MGT_BACKGROUND = 65
    -IP_FORCE_OUT_IFP = 69
    -TRAFFIC_MGT_SO_BACKGROUND = 0x0001
    -TRAFFIC_MGT_SO_BG_SUPPRESSED = 0x0002
    -IP_DEFAULT_MULTICAST_TTL = 1
    -IP_DEFAULT_MULTICAST_LOOP = 1
    -IP_MAX_MEMBERSHIPS = 20
    -IP_PORTRANGE_DEFAULT = 0
    -IP_PORTRANGE_HIGH = 1
    -IP_PORTRANGE_LOW = 2
    -IPPROTO_MAXID = (IPPROTO_AH + 1)
    -IPCTL_FORWARDING = 1
    -IPCTL_SENDREDIRECTS = 2
    -IPCTL_DEFTTL = 3
    -IPCTL_DEFMTU = 4
    -IPCTL_RTEXPIRE = 5
    -IPCTL_RTMINEXPIRE = 6
    -IPCTL_RTMAXCACHE = 7
    -IPCTL_SOURCEROUTE = 8
    -IPCTL_DIRECTEDBROADCAST = 9
    -IPCTL_INTRQMAXLEN = 10
    -IPCTL_INTRQDROPS = 11
    -IPCTL_STATS = 12
    -IPCTL_ACCEPTSOURCEROUTE = 13
    -IPCTL_FASTFORWARDING = 14
    -IPCTL_KEEPFAITH = 15
    -IPCTL_GIF_TTL = 16
    -IPCTL_MAXID = 17
    -
    -# Included from netinet6/in6.h
    -__KAME_VERSION = "20010528/apple-darwin"
    -IPV6PORT_RESERVED = 1024
    -IPV6PORT_ANONMIN = 49152
    -IPV6PORT_ANONMAX = 65535
    -IPV6PORT_RESERVEDMIN = 600
    -IPV6PORT_RESERVEDMAX = (IPV6PORT_RESERVED-1)
    -INET6_ADDRSTRLEN = 46
    -def IN6_IS_ADDR_UNSPECIFIED(a): return \
    -
    -def IN6_IS_ADDR_LOOPBACK(a): return \
    -
    -def IN6_IS_ADDR_V4COMPAT(a): return \
    -
    -def IN6_IS_ADDR_V4MAPPED(a): return \
    -
    -__IPV6_ADDR_SCOPE_NODELOCAL = 0x01
    -__IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
    -__IPV6_ADDR_SCOPE_SITELOCAL = 0x05
    -__IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
    -__IPV6_ADDR_SCOPE_GLOBAL = 0x0e
    -def IN6_IS_ADDR_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_NODELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_GLOBAL(a): return \
    -
    -IPV6_OPTIONS = 1
    -IPV6_RECVOPTS = 5
    -IPV6_RECVRETOPTS = 6
    -IPV6_RECVDSTADDR = 7
    -IPV6_RETOPTS = 8
    -IPV6_SOCKOPT_RESERVED1 = 3
    -IPV6_UNICAST_HOPS = 4
    -IPV6_MULTICAST_IF = 9
    -IPV6_MULTICAST_HOPS = 10
    -IPV6_MULTICAST_LOOP = 11
    -IPV6_JOIN_GROUP = 12
    -IPV6_LEAVE_GROUP = 13
    -IPV6_PORTRANGE = 14
    -ICMP6_FILTER = 18
    -IPV6_PKTINFO = 19
    -IPV6_HOPLIMIT = 20
    -IPV6_NEXTHOP = 21
    -IPV6_HOPOPTS = 22
    -IPV6_DSTOPTS = 23
    -IPV6_RTHDR = 24
    -IPV6_PKTOPTIONS = 25
    -IPV6_CHECKSUM = 26
    -IPV6_V6ONLY = 27
    -IPV6_BINDV6ONLY = IPV6_V6ONLY
    -IPV6_IPSEC_POLICY = 28
    -IPV6_FAITH = 29
    -IPV6_FW_ADD = 30
    -IPV6_FW_DEL = 31
    -IPV6_FW_FLUSH = 32
    -IPV6_FW_ZERO = 33
    -IPV6_FW_GET = 34
    -IPV6_RTHDR_LOOSE = 0
    -IPV6_RTHDR_STRICT = 1
    -IPV6_RTHDR_TYPE_0 = 0
    -IPV6_DEFAULT_MULTICAST_HOPS = 1
    -IPV6_DEFAULT_MULTICAST_LOOP = 1
    -IPV6_PORTRANGE_DEFAULT = 0
    -IPV6_PORTRANGE_HIGH = 1
    -IPV6_PORTRANGE_LOW = 2
    -IPV6PROTO_MAXID = (IPPROTO_PIM + 1)
    -IPV6CTL_FORWARDING = 1
    -IPV6CTL_SENDREDIRECTS = 2
    -IPV6CTL_DEFHLIM = 3
    -IPV6CTL_DEFMTU = 4
    -IPV6CTL_FORWSRCRT = 5
    -IPV6CTL_STATS = 6
    -IPV6CTL_MRTSTATS = 7
    -IPV6CTL_MRTPROTO = 8
    -IPV6CTL_MAXFRAGPACKETS = 9
    -IPV6CTL_SOURCECHECK = 10
    -IPV6CTL_SOURCECHECK_LOGINT = 11
    -IPV6CTL_ACCEPT_RTADV = 12
    -IPV6CTL_KEEPFAITH = 13
    -IPV6CTL_LOG_INTERVAL = 14
    -IPV6CTL_HDRNESTLIMIT = 15
    -IPV6CTL_DAD_COUNT = 16
    -IPV6CTL_AUTO_FLOWLABEL = 17
    -IPV6CTL_DEFMCASTHLIM = 18
    -IPV6CTL_GIF_HLIM = 19
    -IPV6CTL_KAME_VERSION = 20
    -IPV6CTL_USE_DEPRECATED = 21
    -IPV6CTL_RR_PRUNE = 22
    -IPV6CTL_MAPPED_ADDR = 23
    -IPV6CTL_V6ONLY = 24
    -IPV6CTL_RTEXPIRE = 25
    -IPV6CTL_RTMINEXPIRE = 26
    -IPV6CTL_RTMAXCACHE = 27
    -IPV6CTL_USETEMPADDR = 32
    -IPV6CTL_TEMPPLTIME = 33
    -IPV6CTL_TEMPVLTIME = 34
    -IPV6CTL_AUTO_LINKLOCAL = 35
    -IPV6CTL_RIP6STATS = 36
    -IPV6CTL_MAXFRAGS = 41
    -IPV6CTL_MAXID = 42
    diff --git a/lib-python/3/plat-darwin/regen b/lib-python/3/plat-darwin/regen
    deleted file mode 100755
    --- a/lib-python/3/plat-darwin/regen
    +++ /dev/null
    @@ -1,3 +0,0 @@
    -#! /bin/sh
    -set -v
    -python$EXE ../../Tools/scripts/h2py.py -i '(u_long)' /usr/include/netinet/in.h
    diff --git a/lib-python/3/plat-freebsd4/IN.py b/lib-python/3/plat-freebsd4/IN.py
    deleted file mode 100644
    --- a/lib-python/3/plat-freebsd4/IN.py
    +++ /dev/null
    @@ -1,355 +0,0 @@
    -# Generated by h2py from /usr/include/netinet/in.h
    -IPPROTO_IP = 0
    -IPPROTO_HOPOPTS = 0
    -IPPROTO_ICMP = 1
    -IPPROTO_IGMP = 2
    -IPPROTO_GGP = 3
    -IPPROTO_IPV4 = 4
    -IPPROTO_IPIP = IPPROTO_IPV4
    -IPPROTO_TCP = 6
    -IPPROTO_ST = 7
    -IPPROTO_EGP = 8
    -IPPROTO_PIGP = 9
    -IPPROTO_RCCMON = 10
    -IPPROTO_NVPII = 11
    -IPPROTO_PUP = 12
    -IPPROTO_ARGUS = 13
    -IPPROTO_EMCON = 14
    -IPPROTO_XNET = 15
    -IPPROTO_CHAOS = 16
    -IPPROTO_UDP = 17
    -IPPROTO_MUX = 18
    -IPPROTO_MEAS = 19
    -IPPROTO_HMP = 20
    -IPPROTO_PRM = 21
    -IPPROTO_IDP = 22
    -IPPROTO_TRUNK1 = 23
    -IPPROTO_TRUNK2 = 24
    -IPPROTO_LEAF1 = 25
    -IPPROTO_LEAF2 = 26
    -IPPROTO_RDP = 27
    -IPPROTO_IRTP = 28
    -IPPROTO_TP = 29
    -IPPROTO_BLT = 30
    -IPPROTO_NSP = 31
    -IPPROTO_INP = 32
    -IPPROTO_SEP = 33
    -IPPROTO_3PC = 34
    -IPPROTO_IDPR = 35
    -IPPROTO_XTP = 36
    -IPPROTO_DDP = 37
    -IPPROTO_CMTP = 38
    -IPPROTO_TPXX = 39
    -IPPROTO_IL = 40
    -IPPROTO_IPV6 = 41
    -IPPROTO_SDRP = 42
    -IPPROTO_ROUTING = 43
    -IPPROTO_FRAGMENT = 44
    -IPPROTO_IDRP = 45
    -IPPROTO_RSVP = 46
    -IPPROTO_GRE = 47
    -IPPROTO_MHRP = 48
    -IPPROTO_BHA = 49
    -IPPROTO_ESP = 50
    -IPPROTO_AH = 51
    -IPPROTO_INLSP = 52
    -IPPROTO_SWIPE = 53
    -IPPROTO_NHRP = 54
    -IPPROTO_ICMPV6 = 58
    -IPPROTO_NONE = 59
    -IPPROTO_DSTOPTS = 60
    -IPPROTO_AHIP = 61
    -IPPROTO_CFTP = 62
    -IPPROTO_HELLO = 63
    -IPPROTO_SATEXPAK = 64
    -IPPROTO_KRYPTOLAN = 65
    -IPPROTO_RVD = 66
    -IPPROTO_IPPC = 67
    -IPPROTO_ADFS = 68
    -IPPROTO_SATMON = 69
    -IPPROTO_VISA = 70
    -IPPROTO_IPCV = 71
    -IPPROTO_CPNX = 72
    -IPPROTO_CPHB = 73
    -IPPROTO_WSN = 74
    -IPPROTO_PVP = 75
    -IPPROTO_BRSATMON = 76
    -IPPROTO_ND = 77
    -IPPROTO_WBMON = 78
    -IPPROTO_WBEXPAK = 79
    -IPPROTO_EON = 80
    -IPPROTO_VMTP = 81
    -IPPROTO_SVMTP = 82
    -IPPROTO_VINES = 83
    -IPPROTO_TTP = 84
    -IPPROTO_IGP = 85
    -IPPROTO_DGP = 86
    -IPPROTO_TCF = 87
    -IPPROTO_IGRP = 88
    -IPPROTO_OSPFIGP = 89
    -IPPROTO_SRPC = 90
    -IPPROTO_LARP = 91
    -IPPROTO_MTP = 92
    -IPPROTO_AX25 = 93
    -IPPROTO_IPEIP = 94
    -IPPROTO_MICP = 95
    -IPPROTO_SCCSP = 96
    -IPPROTO_ETHERIP = 97
    -IPPROTO_ENCAP = 98
    -IPPROTO_APES = 99
    -IPPROTO_GMTP = 100
    -IPPROTO_IPCOMP = 108
    -IPPROTO_PIM = 103
    -IPPROTO_PGM = 113
    -IPPROTO_DIVERT = 254
    -IPPROTO_RAW = 255
    -IPPROTO_MAX = 256
    -IPPROTO_DONE = 257
    -IPPORT_RESERVED = 1024
    -IPPORT_USERRESERVED = 5000
    -IPPORT_HIFIRSTAUTO = 49152
    -IPPORT_HILASTAUTO = 65535
    -IPPORT_RESERVEDSTART = 600
    -def IN_CLASSA(i): return (((u_int32_t)(i) & 0x80000000) == 0)
    -
    -IN_CLASSA_NET = 0xff000000
    -IN_CLASSA_NSHIFT = 24
    -IN_CLASSA_HOST = 0x00ffffff
    -IN_CLASSA_MAX = 128
    -def IN_CLASSB(i): return (((u_int32_t)(i) & 0xc0000000) == 0x80000000)
    -
    -IN_CLASSB_NET = 0xffff0000
    -IN_CLASSB_NSHIFT = 16
    -IN_CLASSB_HOST = 0x0000ffff
    -IN_CLASSB_MAX = 65536
    -def IN_CLASSC(i): return (((u_int32_t)(i) & 0xe0000000) == 0xc0000000)
    -
    -IN_CLASSC_NET = 0xffffff00
    -IN_CLASSC_NSHIFT = 8
    -IN_CLASSC_HOST = 0x000000ff
    -def IN_CLASSD(i): return (((u_int32_t)(i) & 0xf0000000) == 0xe0000000)
    -
    -IN_CLASSD_NET = 0xf0000000
    -IN_CLASSD_NSHIFT = 28
    -IN_CLASSD_HOST = 0x0fffffff
    -def IN_MULTICAST(i): return IN_CLASSD(i)
    -
    -def IN_EXPERIMENTAL(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
    -
    -def IN_BADCLASS(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
    -
    -INADDR_NONE = 0xffffffff
    -IN_LOOPBACKNET = 127
    -INET_ADDRSTRLEN = 16
    -IP_OPTIONS = 1
    -IP_HDRINCL = 2
    -IP_TOS = 3
    -IP_TTL = 4
    -IP_RECVOPTS = 5
    -IP_RECVRETOPTS = 6
    -IP_RECVDSTADDR = 7
    -IP_RETOPTS = 8
    -IP_MULTICAST_IF = 9
    -IP_MULTICAST_TTL = 10
    -IP_MULTICAST_LOOP = 11
    -IP_ADD_MEMBERSHIP = 12
    -IP_DROP_MEMBERSHIP = 13
    -IP_MULTICAST_VIF = 14
    -IP_RSVP_ON = 15
    -IP_RSVP_OFF = 16
    -IP_RSVP_VIF_ON = 17
    -IP_RSVP_VIF_OFF = 18
    -IP_PORTRANGE = 19
    -IP_RECVIF = 20
    -IP_IPSEC_POLICY = 21
    -IP_FAITH = 22
    -IP_FW_ADD = 50
    -IP_FW_DEL = 51
    -IP_FW_FLUSH = 52
    -IP_FW_ZERO = 53
    -IP_FW_GET = 54
    -IP_FW_RESETLOG = 55
    -IP_DUMMYNET_CONFIGURE = 60
    -IP_DUMMYNET_DEL = 61
    -IP_DUMMYNET_FLUSH = 62
    -IP_DUMMYNET_GET = 64
    -IP_DEFAULT_MULTICAST_TTL = 1
    -IP_DEFAULT_MULTICAST_LOOP = 1
    -IP_MAX_MEMBERSHIPS = 20
    -IP_PORTRANGE_DEFAULT = 0
    -IP_PORTRANGE_HIGH = 1
    -IP_PORTRANGE_LOW = 2
    -IPPROTO_MAXID = (IPPROTO_AH + 1)
    -IPCTL_FORWARDING = 1
    -IPCTL_SENDREDIRECTS = 2
    -IPCTL_DEFTTL = 3
    -IPCTL_DEFMTU = 4
    -IPCTL_RTEXPIRE = 5
    -IPCTL_RTMINEXPIRE = 6
    -IPCTL_RTMAXCACHE = 7
    -IPCTL_SOURCEROUTE = 8
    -IPCTL_DIRECTEDBROADCAST = 9
    -IPCTL_INTRQMAXLEN = 10
    -IPCTL_INTRQDROPS = 11
    -IPCTL_STATS = 12
    -IPCTL_ACCEPTSOURCEROUTE = 13
    -IPCTL_FASTFORWARDING = 14
    -IPCTL_KEEPFAITH = 15
    -IPCTL_GIF_TTL = 16
    -IPCTL_MAXID = 17
    -
    -# Included from netinet6/in6.h
    -
    -# Included from sys/queue.h
    -def SLIST_HEAD_INITIALIZER(head): return \
    -
    -def SLIST_ENTRY(type): return \
    -
    -def STAILQ_HEAD_INITIALIZER(head): return \
    -
    -def STAILQ_ENTRY(type): return \
    -
    -def LIST_HEAD_INITIALIZER(head): return \
    -
    -def LIST_ENTRY(type): return \
    -
    -def TAILQ_HEAD_INITIALIZER(head): return \
    -
    -def TAILQ_ENTRY(type): return \
    -
    -def CIRCLEQ_ENTRY(type): return \
    -
    -__KAME_VERSION = "20000701/FreeBSD-current"
    -IPV6PORT_RESERVED = 1024
    -IPV6PORT_ANONMIN = 49152
    -IPV6PORT_ANONMAX = 65535
    -IPV6PORT_RESERVEDMIN = 600
    -IPV6PORT_RESERVEDMAX = (IPV6PORT_RESERVED-1)
    -INET6_ADDRSTRLEN = 46
    -IPV6_ADDR_INT32_ONE = 1
    -IPV6_ADDR_INT32_TWO = 2
    -IPV6_ADDR_INT32_MNL = 0xff010000
    -IPV6_ADDR_INT32_MLL = 0xff020000
    -IPV6_ADDR_INT32_SMP = 0x0000ffff
    -IPV6_ADDR_INT16_ULL = 0xfe80
    -IPV6_ADDR_INT16_USL = 0xfec0
    -IPV6_ADDR_INT16_MLL = 0xff02
    -IPV6_ADDR_INT32_ONE = 0x01000000
    -IPV6_ADDR_INT32_TWO = 0x02000000
    -IPV6_ADDR_INT32_MNL = 0x000001ff
    -IPV6_ADDR_INT32_MLL = 0x000002ff
    -IPV6_ADDR_INT32_SMP = 0xffff0000
    -IPV6_ADDR_INT16_ULL = 0x80fe
    -IPV6_ADDR_INT16_USL = 0xc0fe
    -IPV6_ADDR_INT16_MLL = 0x02ff
    -def IN6_IS_ADDR_UNSPECIFIED(a): return \
    -
    -def IN6_IS_ADDR_LOOPBACK(a): return \
    -
    -def IN6_IS_ADDR_V4COMPAT(a): return \
    -
    -def IN6_IS_ADDR_V4MAPPED(a): return \
    -
    -IPV6_ADDR_SCOPE_NODELOCAL = 0x01
    -IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
    -IPV6_ADDR_SCOPE_SITELOCAL = 0x05
    -IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
    -IPV6_ADDR_SCOPE_GLOBAL = 0x0e
    -__IPV6_ADDR_SCOPE_NODELOCAL = 0x01
    -__IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
    -__IPV6_ADDR_SCOPE_SITELOCAL = 0x05
    -__IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
    -__IPV6_ADDR_SCOPE_GLOBAL = 0x0e
    -def IN6_IS_ADDR_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_NODELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_GLOBAL(a): return \
    -
    -def IN6_IS_ADDR_MC_NODELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_GLOBAL(a): return \
    -
    -def IN6_IS_SCOPE_LINKLOCAL(a): return \
    -
    -IPV6_OPTIONS = 1
    -IPV6_RECVOPTS = 5
    -IPV6_RECVRETOPTS = 6
    -IPV6_RECVDSTADDR = 7
    -IPV6_RETOPTS = 8
    -IPV6_SOCKOPT_RESERVED1 = 3
    -IPV6_UNICAST_HOPS = 4
    -IPV6_MULTICAST_IF = 9
    -IPV6_MULTICAST_HOPS = 10
    -IPV6_MULTICAST_LOOP = 11
    -IPV6_JOIN_GROUP = 12
    -IPV6_LEAVE_GROUP = 13
    -IPV6_PORTRANGE = 14
    -ICMP6_FILTER = 18
    -IPV6_PKTINFO = 19
    -IPV6_HOPLIMIT = 20
    -IPV6_NEXTHOP = 21
    -IPV6_HOPOPTS = 22
    -IPV6_DSTOPTS = 23
    -IPV6_RTHDR = 24
    -IPV6_PKTOPTIONS = 25
    -IPV6_CHECKSUM = 26
    -IPV6_BINDV6ONLY = 27
    -IPV6_IPSEC_POLICY = 28
    -IPV6_FAITH = 29
    -IPV6_FW_ADD = 30
    -IPV6_FW_DEL = 31
    -IPV6_FW_FLUSH = 32
    -IPV6_FW_ZERO = 33
    -IPV6_FW_GET = 34
    -IPV6_RTHDR_LOOSE = 0
    -IPV6_RTHDR_STRICT = 1
    -IPV6_RTHDR_TYPE_0 = 0
    -IPV6_DEFAULT_MULTICAST_HOPS = 1
    -IPV6_DEFAULT_MULTICAST_LOOP = 1
    -IPV6_PORTRANGE_DEFAULT = 0
    -IPV6_PORTRANGE_HIGH = 1
    -IPV6_PORTRANGE_LOW = 2
    -IPV6PROTO_MAXID = (IPPROTO_PIM + 1)
    -IPV6CTL_FORWARDING = 1
    -IPV6CTL_SENDREDIRECTS = 2
    -IPV6CTL_DEFHLIM = 3
    -IPV6CTL_DEFMTU = 4
    -IPV6CTL_FORWSRCRT = 5
    -IPV6CTL_STATS = 6
    -IPV6CTL_MRTSTATS = 7
    -IPV6CTL_MRTPROTO = 8
    -IPV6CTL_MAXFRAGPACKETS = 9
    -IPV6CTL_SOURCECHECK = 10
    -IPV6CTL_SOURCECHECK_LOGINT = 11
    -IPV6CTL_ACCEPT_RTADV = 12
    -IPV6CTL_KEEPFAITH = 13
    -IPV6CTL_LOG_INTERVAL = 14
    -IPV6CTL_HDRNESTLIMIT = 15
    -IPV6CTL_DAD_COUNT = 16
    -IPV6CTL_AUTO_FLOWLABEL = 17
    -IPV6CTL_DEFMCASTHLIM = 18
    -IPV6CTL_GIF_HLIM = 19
    -IPV6CTL_KAME_VERSION = 20
    -IPV6CTL_USE_DEPRECATED = 21
    -IPV6CTL_RR_PRUNE = 22
    -IPV6CTL_MAPPED_ADDR = 23
    -IPV6CTL_BINDV6ONLY = 24
    -IPV6CTL_RTEXPIRE = 25
    -IPV6CTL_RTMINEXPIRE = 26
    -IPV6CTL_RTMAXCACHE = 27
    -IPV6CTL_MAXID = 28
    diff --git a/lib-python/3/plat-freebsd4/regen b/lib-python/3/plat-freebsd4/regen
    deleted file mode 100755
    --- a/lib-python/3/plat-freebsd4/regen
    +++ /dev/null
    @@ -1,3 +0,0 @@
    -#! /bin/sh
    -set -v
    -python ../../Tools/scripts/h2py.py -i '(u_long)' /usr/include/netinet/in.h
    diff --git a/lib-python/3/plat-freebsd5/IN.py b/lib-python/3/plat-freebsd5/IN.py
    deleted file mode 100644
    --- a/lib-python/3/plat-freebsd5/IN.py
    +++ /dev/null
    @@ -1,355 +0,0 @@
    -# Generated by h2py from /usr/include/netinet/in.h
    -IPPROTO_IP = 0
    -IPPROTO_HOPOPTS = 0
    -IPPROTO_ICMP = 1
    -IPPROTO_IGMP = 2
    -IPPROTO_GGP = 3
    -IPPROTO_IPV4 = 4
    -IPPROTO_IPIP = IPPROTO_IPV4
    -IPPROTO_TCP = 6
    -IPPROTO_ST = 7
    -IPPROTO_EGP = 8
    -IPPROTO_PIGP = 9
    -IPPROTO_RCCMON = 10
    -IPPROTO_NVPII = 11
    -IPPROTO_PUP = 12
    -IPPROTO_ARGUS = 13
    -IPPROTO_EMCON = 14
    -IPPROTO_XNET = 15
    -IPPROTO_CHAOS = 16
    -IPPROTO_UDP = 17
    -IPPROTO_MUX = 18
    -IPPROTO_MEAS = 19
    -IPPROTO_HMP = 20
    -IPPROTO_PRM = 21
    -IPPROTO_IDP = 22
    -IPPROTO_TRUNK1 = 23
    -IPPROTO_TRUNK2 = 24
    -IPPROTO_LEAF1 = 25
    -IPPROTO_LEAF2 = 26
    -IPPROTO_RDP = 27
    -IPPROTO_IRTP = 28
    -IPPROTO_TP = 29
    -IPPROTO_BLT = 30
    -IPPROTO_NSP = 31
    -IPPROTO_INP = 32
    -IPPROTO_SEP = 33
    -IPPROTO_3PC = 34
    -IPPROTO_IDPR = 35
    -IPPROTO_XTP = 36
    -IPPROTO_DDP = 37
    -IPPROTO_CMTP = 38
    -IPPROTO_TPXX = 39
    -IPPROTO_IL = 40
    -IPPROTO_IPV6 = 41
    -IPPROTO_SDRP = 42
    -IPPROTO_ROUTING = 43
    -IPPROTO_FRAGMENT = 44
    -IPPROTO_IDRP = 45
    -IPPROTO_RSVP = 46
    -IPPROTO_GRE = 47
    -IPPROTO_MHRP = 48
    -IPPROTO_BHA = 49
    -IPPROTO_ESP = 50
    -IPPROTO_AH = 51
    -IPPROTO_INLSP = 52
    -IPPROTO_SWIPE = 53
    -IPPROTO_NHRP = 54
    -IPPROTO_ICMPV6 = 58
    -IPPROTO_NONE = 59
    -IPPROTO_DSTOPTS = 60
    -IPPROTO_AHIP = 61
    -IPPROTO_CFTP = 62
    -IPPROTO_HELLO = 63
    -IPPROTO_SATEXPAK = 64
    -IPPROTO_KRYPTOLAN = 65
    -IPPROTO_RVD = 66
    -IPPROTO_IPPC = 67
    -IPPROTO_ADFS = 68
    -IPPROTO_SATMON = 69
    -IPPROTO_VISA = 70
    -IPPROTO_IPCV = 71
    -IPPROTO_CPNX = 72
    -IPPROTO_CPHB = 73
    -IPPROTO_WSN = 74
    -IPPROTO_PVP = 75
    -IPPROTO_BRSATMON = 76
    -IPPROTO_ND = 77
    -IPPROTO_WBMON = 78
    -IPPROTO_WBEXPAK = 79
    -IPPROTO_EON = 80
    -IPPROTO_VMTP = 81
    -IPPROTO_SVMTP = 82
    -IPPROTO_VINES = 83
    -IPPROTO_TTP = 84
    -IPPROTO_IGP = 85
    -IPPROTO_DGP = 86
    -IPPROTO_TCF = 87
    -IPPROTO_IGRP = 88
    -IPPROTO_OSPFIGP = 89
    -IPPROTO_SRPC = 90
    -IPPROTO_LARP = 91
    -IPPROTO_MTP = 92
    -IPPROTO_AX25 = 93
    -IPPROTO_IPEIP = 94
    -IPPROTO_MICP = 95
    -IPPROTO_SCCSP = 96
    -IPPROTO_ETHERIP = 97
    -IPPROTO_ENCAP = 98
    -IPPROTO_APES = 99
    -IPPROTO_GMTP = 100
    -IPPROTO_IPCOMP = 108
    -IPPROTO_PIM = 103
    -IPPROTO_PGM = 113
    -IPPROTO_DIVERT = 254
    -IPPROTO_RAW = 255
    -IPPROTO_MAX = 256
    -IPPROTO_DONE = 257
    -IPPORT_RESERVED = 1024
    -IPPORT_USERRESERVED = 5000
    -IPPORT_HIFIRSTAUTO = 49152
    -IPPORT_HILASTAUTO = 65535
    -IPPORT_RESERVEDSTART = 600
    -def IN_CLASSA(i): return (((u_int32_t)(i) & 0x80000000) == 0)
    -
    -IN_CLASSA_NET = 0xff000000
    -IN_CLASSA_NSHIFT = 24
    -IN_CLASSA_HOST = 0x00ffffff
    -IN_CLASSA_MAX = 128
    -def IN_CLASSB(i): return (((u_int32_t)(i) & 0xc0000000) == 0x80000000)
    -
    -IN_CLASSB_NET = 0xffff0000
    -IN_CLASSB_NSHIFT = 16
    -IN_CLASSB_HOST = 0x0000ffff
    -IN_CLASSB_MAX = 65536
    -def IN_CLASSC(i): return (((u_int32_t)(i) & 0xe0000000) == 0xc0000000)
    -
    -IN_CLASSC_NET = 0xffffff00
    -IN_CLASSC_NSHIFT = 8
    -IN_CLASSC_HOST = 0x000000ff
    -def IN_CLASSD(i): return (((u_int32_t)(i) & 0xf0000000) == 0xe0000000)
    -
    -IN_CLASSD_NET = 0xf0000000
    -IN_CLASSD_NSHIFT = 28
    -IN_CLASSD_HOST = 0x0fffffff
    -def IN_MULTICAST(i): return IN_CLASSD(i)
    -
    -def IN_EXPERIMENTAL(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
    -
    -def IN_BADCLASS(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
    -
    -INADDR_NONE = 0xffffffff
    -IN_LOOPBACKNET = 127
    -INET_ADDRSTRLEN = 16
    -IP_OPTIONS = 1
    -IP_HDRINCL = 2
    -IP_TOS = 3
    -IP_TTL = 4
    -IP_RECVOPTS = 5
    -IP_RECVRETOPTS = 6
    -IP_RECVDSTADDR = 7
    -IP_RETOPTS = 8
    -IP_MULTICAST_IF = 9
    -IP_MULTICAST_TTL = 10
    -IP_MULTICAST_LOOP = 11
    -IP_ADD_MEMBERSHIP = 12
    -IP_DROP_MEMBERSHIP = 13
    -IP_MULTICAST_VIF = 14
    -IP_RSVP_ON = 15
    -IP_RSVP_OFF = 16
    -IP_RSVP_VIF_ON = 17
    -IP_RSVP_VIF_OFF = 18
    -IP_PORTRANGE = 19
    -IP_RECVIF = 20
    -IP_IPSEC_POLICY = 21
    -IP_FAITH = 22
    -IP_FW_ADD = 50
    -IP_FW_DEL = 51
    -IP_FW_FLUSH = 52
    -IP_FW_ZERO = 53
    -IP_FW_GET = 54
    -IP_FW_RESETLOG = 55
    -IP_DUMMYNET_CONFIGURE = 60
    -IP_DUMMYNET_DEL = 61
    -IP_DUMMYNET_FLUSH = 62
    -IP_DUMMYNET_GET = 64
    -IP_DEFAULT_MULTICAST_TTL = 1
    -IP_DEFAULT_MULTICAST_LOOP = 1
    -IP_MAX_MEMBERSHIPS = 20
    -IP_PORTRANGE_DEFAULT = 0
    -IP_PORTRANGE_HIGH = 1
    -IP_PORTRANGE_LOW = 2
    -IPPROTO_MAXID = (IPPROTO_AH + 1)
    -IPCTL_FORWARDING = 1
    -IPCTL_SENDREDIRECTS = 2
    -IPCTL_DEFTTL = 3
    -IPCTL_DEFMTU = 4
    -IPCTL_RTEXPIRE = 5
    -IPCTL_RTMINEXPIRE = 6
    -IPCTL_RTMAXCACHE = 7
    -IPCTL_SOURCEROUTE = 8
    -IPCTL_DIRECTEDBROADCAST = 9
    -IPCTL_INTRQMAXLEN = 10
    -IPCTL_INTRQDROPS = 11
    -IPCTL_STATS = 12
    -IPCTL_ACCEPTSOURCEROUTE = 13
    -IPCTL_FASTFORWARDING = 14
    -IPCTL_KEEPFAITH = 15
    -IPCTL_GIF_TTL = 16
    -IPCTL_MAXID = 17
    -
    -# Included from netinet6/in6.h
    -
    -# Included from sys/queue.h
    -def SLIST_HEAD_INITIALIZER(head): return \
    -
    -def SLIST_ENTRY(type): return \
    -
    -def STAILQ_HEAD_INITIALIZER(head): return \
    -
    -def STAILQ_ENTRY(type): return \
    -
    -def LIST_HEAD_INITIALIZER(head): return \
    -
    -def LIST_ENTRY(type): return \
    -
    -def TAILQ_HEAD_INITIALIZER(head): return \
    -
    -def TAILQ_ENTRY(type): return \
    -
    -def CIRCLEQ_ENTRY(type): return \
    -
    -__KAME_VERSION = "20000701/FreeBSD-current"
    -IPV6PORT_RESERVED = 1024
    -IPV6PORT_ANONMIN = 49152
    -IPV6PORT_ANONMAX = 65535
    -IPV6PORT_RESERVEDMIN = 600
    -IPV6PORT_RESERVEDMAX = (IPV6PORT_RESERVED-1)
    -INET6_ADDRSTRLEN = 46
    -IPV6_ADDR_INT32_ONE = 1
    -IPV6_ADDR_INT32_TWO = 2
    -IPV6_ADDR_INT32_MNL = 0xff010000
    -IPV6_ADDR_INT32_MLL = 0xff020000
    -IPV6_ADDR_INT32_SMP = 0x0000ffff
    -IPV6_ADDR_INT16_ULL = 0xfe80
    -IPV6_ADDR_INT16_USL = 0xfec0
    -IPV6_ADDR_INT16_MLL = 0xff02
    -IPV6_ADDR_INT32_ONE = 0x01000000
    -IPV6_ADDR_INT32_TWO = 0x02000000
    -IPV6_ADDR_INT32_MNL = 0x000001ff
    -IPV6_ADDR_INT32_MLL = 0x000002ff
    -IPV6_ADDR_INT32_SMP = 0xffff0000
    -IPV6_ADDR_INT16_ULL = 0x80fe
    -IPV6_ADDR_INT16_USL = 0xc0fe
    -IPV6_ADDR_INT16_MLL = 0x02ff
    -def IN6_IS_ADDR_UNSPECIFIED(a): return \
    -
    -def IN6_IS_ADDR_LOOPBACK(a): return \
    -
    -def IN6_IS_ADDR_V4COMPAT(a): return \
    -
    -def IN6_IS_ADDR_V4MAPPED(a): return \
    -
    -IPV6_ADDR_SCOPE_NODELOCAL = 0x01
    -IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
    -IPV6_ADDR_SCOPE_SITELOCAL = 0x05
    -IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
    -IPV6_ADDR_SCOPE_GLOBAL = 0x0e
    -__IPV6_ADDR_SCOPE_NODELOCAL = 0x01
    -__IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
    -__IPV6_ADDR_SCOPE_SITELOCAL = 0x05
    -__IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
    -__IPV6_ADDR_SCOPE_GLOBAL = 0x0e
    -def IN6_IS_ADDR_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_NODELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_GLOBAL(a): return \
    -
    -def IN6_IS_ADDR_MC_NODELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_SITELOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
    -
    -def IN6_IS_ADDR_MC_GLOBAL(a): return \
    -
    -def IN6_IS_SCOPE_LINKLOCAL(a): return \
    -
    -IPV6_OPTIONS = 1
    -IPV6_RECVOPTS = 5
    -IPV6_RECVRETOPTS = 6
    -IPV6_RECVDSTADDR = 7
    -IPV6_RETOPTS = 8
    -IPV6_SOCKOPT_RESERVED1 = 3
    -IPV6_UNICAST_HOPS = 4
    -IPV6_MULTICAST_IF = 9
    -IPV6_MULTICAST_HOPS = 10
    -IPV6_MULTICAST_LOOP = 11
    -IPV6_JOIN_GROUP = 12
    -IPV6_LEAVE_GROUP = 13
    -IPV6_PORTRANGE = 14
    -ICMP6_FILTER = 18
    -IPV6_PKTINFO = 19
    -IPV6_HOPLIMIT = 20
    -IPV6_NEXTHOP = 21
    -IPV6_HOPOPTS = 22
    -IPV6_DSTOPTS = 23
    -IPV6_RTHDR = 24
    -IPV6_PKTOPTIONS = 25
    -IPV6_CHECKSUM = 26
    -IPV6_BINDV6ONLY = 27
    -IPV6_IPSEC_POLICY = 28
    -IPV6_FAITH = 29
    -IPV6_FW_ADD = 30
    -IPV6_FW_DEL = 31
    -IPV6_FW_FLUSH = 32
    -IPV6_FW_ZERO = 33
    -IPV6_FW_GET = 34
    -IPV6_RTHDR_LOOSE = 0
    -IPV6_RTHDR_STRICT = 1
    -IPV6_RTHDR_TYPE_0 = 0
    -IPV6_DEFAULT_MULTICAST_HOPS = 1
    -IPV6_DEFAULT_MULTICAST_LOOP = 1
    -IPV6_PORTRANGE_DEFAULT = 0
    -IPV6_PORTRANGE_HIGH = 1
    -IPV6_PORTRANGE_LOW = 2
    -IPV6PROTO_MAXID = (IPPROTO_PIM + 1)
    -IPV6CTL_FORWARDING = 1
    -IPV6CTL_SENDREDIRECTS = 2
    -IPV6CTL_DEFHLIM = 3
    -IPV6CTL_DEFMTU = 4
    -IPV6CTL_FORWSRCRT = 5
    -IPV6CTL_STATS = 6
    -IPV6CTL_MRTSTATS = 7
    -IPV6CTL_MRTPROTO = 8
    -IPV6CTL_MAXFRAGPACKETS = 9
    -IPV6CTL_SOURCECHECK = 10
    -IPV6CTL_SOURCECHECK_LOGINT = 11
    -IPV6CTL_ACCEPT_RTADV = 12
    -IPV6CTL_KEEPFAITH = 13
    -IPV6CTL_LOG_INTERVAL = 14
    -IPV6CTL_HDRNESTLIMIT = 15
    -IPV6CTL_DAD_COUNT = 16
    -IPV6CTL_AUTO_FLOWLABEL = 17
    -IPV6CTL_DEFMCASTHLIM = 18
    -IPV6CTL_GIF_HLIM = 19
    -IPV6CTL_KAME_VERSION = 20
    -IPV6CTL_USE_DEPRECATED = 21
    -IPV6CTL_RR_PRUNE = 22
    -IPV6CTL_MAPPED_ADDR = 23
    -IPV6CTL_BINDV6ONLY = 24
    -IPV6CTL_RTEXPIRE = 25
    -IPV6CTL_RTMINEXPIRE = 26
    -IPV6CTL_RTMAXCACHE = 27
    -IPV6CTL_MAXID = 28
    diff --git a/lib-python/3/plat-freebsd5/regen b/lib-python/3/plat-freebsd5/regen
    deleted file mode 100755
    --- a/lib-python/3/plat-freebsd5/regen
    +++ /dev/null
    @@ -1,3 +0,0 @@
    -#! /bin/sh
    -set -v
    -python ../../Tools/scripts/h2py.py -i '(u_long)' /usr/include/netinet/in.h
    diff --git a/lib-python/3/plat-freebsd6/IN.py b/lib-python/3/plat-freebsd6/IN.py
    deleted file mode 100644
    --- a/lib-python/3/plat-freebsd6/IN.py
    +++ /dev/null
    @@ -1,551 +0,0 @@
    -# Generated by h2py from /usr/include/netinet/in.h
    -
    -# Included from sys/cdefs.h
    -__GNUCLIKE_ASM = 3
    -__GNUCLIKE_ASM = 2
    -__GNUCLIKE___TYPEOF = 1
    -__GNUCLIKE___OFFSETOF = 1
    -__GNUCLIKE___SECTION = 1
    -__GNUCLIKE_ATTRIBUTE_MODE_DI = 1
    -__GNUCLIKE_CTOR_SECTION_HANDLING = 1
    -__GNUCLIKE_BUILTIN_CONSTANT_P = 1
    -__GNUCLIKE_BUILTIN_VARARGS = 1
    -__GNUCLIKE_BUILTIN_STDARG = 1
    -__GNUCLIKE_BUILTIN_VAALIST = 1
    -__GNUC_VA_LIST_COMPATIBILITY = 1
    -__GNUCLIKE_BUILTIN_NEXT_ARG = 1
    -__GNUCLIKE_BUILTIN_MEMCPY = 1
    -__CC_SUPPORTS_INLINE = 1
    -__CC_SUPPORTS___INLINE = 1
    -__CC_SUPPORTS___INLINE__ = 1
    -__CC_SUPPORTS___FUNC__ = 1
    -__CC_SUPPORTS_WARNING = 1
    -__CC_SUPPORTS_VARADIC_XXX = 1
    -__CC_SUPPORTS_DYNAMIC_ARRAY_INIT = 1
    -__CC_INT_IS_32BIT = 1
    -def __P(protos): return protos
    -
    -def __STRING(x): return #x
    -
    -def __XSTRING(x): return __STRING(x)
    -
    -def __P(protos): return ()
    -
    -def __STRING(x): return "x"
    -
    -def __aligned(x): return __attribute__((__aligned__(x)))
    -
    -def __section(x): return __attribute__((__section__(x)))
    -
    -def __aligned(x): return __attribute__((__aligned__(x)))
    -
    -def __section(x): return __attribute__((__section__(x)))
    -
    -def __nonnull(x): return __attribute__((__nonnull__(x)))
    -
    -def __predict_true(exp): return __builtin_expect((exp), 1)
    -
    -def __predict_false(exp): return __builtin_expect((exp), 0)
    -
    -def __predict_true(exp): return (exp)
    -
    -def __predict_false(exp): return (exp)
    -
    -def __format_arg(fmtarg): return __attribute__((__format_arg__ (fmtarg)))
    -
    -def __FBSDID(s): return __IDSTRING(__CONCAT(__rcsid_,__LINE__),s)
    -
    -def __RCSID(s): return __IDSTRING(__CONCAT(__rcsid_,__LINE__),s)
    -
    -def __RCSID_SOURCE(s): return __IDSTRING(__CONCAT(__rcsid_source_,__LINE__),s)
    -
    -def __SCCSID(s): return __IDSTRING(__CONCAT(__sccsid_,__LINE__),s)
    -
    -def __COPYRIGHT(s): return __IDSTRING(__CONCAT(__copyright_,__LINE__),s)
    -
    -_POSIX_C_SOURCE = 199009
    -_POSIX_C_SOURCE = 199209
    -__XSI_VISIBLE = 600
    -_POSIX_C_SOURCE = 200112
    -__XSI_VISIBLE = 500
    -_POSIX_C_SOURCE = 199506
    -_POSIX_C_SOURCE = 198808
    -__POSIX_VISIBLE = 200112
    -__ISO_C_VISIBLE = 1999
    -__POSIX_VISIBLE = 199506
    -__ISO_C_VISIBLE = 1990
    -__POSIX_VISIBLE = 199309
    -__ISO_C_VISIBLE = 1990
    -__POSIX_VISIBLE = 199209
    -__ISO_C_VISIBLE = 1990
    -__POSIX_VISIBLE = 199009
    -__ISO_C_VISIBLE = 1990
    -__POSIX_VISIBLE = 198808
    -__ISO_C_VISIBLE = 0
    -__POSIX_VISIBLE = 0
    -__XSI_VISIBLE = 0
    -__BSD_VISIBLE = 0
    -__ISO_C_VISIBLE = 1990
    -__POSIX_VISIBLE = 0
    -__XSI_VISIBLE = 0
    -__BSD_VISIBLE = 0
    -__ISO_C_VISIBLE = 1999
    -__POSIX_VISIBLE = 200112
    -__XSI_VISIBLE = 600
    -__BSD_VISIBLE = 1
    -__ISO_C_VISIBLE = 1999
    -
    -# Included from sys/_types.h
    -
    -# Included from machine/_types.h
    -
    -# Included from machine/endian.h
    -_QUAD_HIGHWORD = 1
    -_QUAD_LOWWORD = 0
    -_LITTLE_ENDIAN = 1234
    -_BIG_ENDIAN = 4321
    -_PDP_ENDIAN = 3412
    -_BYTE_ORDER = _LITTLE_ENDIAN
    -LITTLE_ENDIAN = _LITTLE_ENDIAN
    -BIG_ENDIAN = _BIG_ENDIAN
    -PDP_ENDIAN = _PDP_ENDIAN
    -BYTE_ORDER = _BYTE_ORDER
    -def __word_swap_int_var(x): return \
    -
    -def __word_swap_int_const(x): return \
    -
    -def __word_swap_int(x): return __word_swap_int_var(x)
    -
    -def __byte_swap_int_var(x): return \
    -
    -def __byte_swap_int_const(x): return \
    -
    -def __byte_swap_int(x): return __byte_swap_int_var(x)
    -
    -def __byte_swap_long_var(x): return \
    -
    -def __byte_swap_long_const(x): return \
    -
    -def __byte_swap_long(x): return __byte_swap_long_var(x)
    -
    -def __byte_swap_word_var(x): return \
    -
    -def __byte_swap_word_const(x): return \
    -
    -def __byte_swap_word(x): return __byte_swap_word_var(x)
    -
    -def __htonl(x): return __bswap32(x)
    -
    -def __htons(x): return __bswap16(x)
    -
    -def __ntohl(x): return __bswap32(x)
    -
    -def __ntohs(x): return __bswap16(x)
    -
    -IPPROTO_IP = 0
    -IPPROTO_ICMP = 1
    -IPPROTO_TCP = 6
    -IPPROTO_UDP = 17
    -def htonl(x): return __htonl(x)
    -
    -def htons(x): return __htons(x)
    -
    -def ntohl(x): return __ntohl(x)
    -
    -def ntohs(x): return __ntohs(x)
    -
    -IPPROTO_RAW = 255
    -INET_ADDRSTRLEN = 16
    -IPPROTO_HOPOPTS = 0
    -IPPROTO_IGMP = 2
    -IPPROTO_GGP = 3
    -IPPROTO_IPV4 = 4
    -IPPROTO_IPIP = IPPROTO_IPV4
    -IPPROTO_ST = 7
    -IPPROTO_EGP = 8
    -IPPROTO_PIGP = 9
    -IPPROTO_RCCMON = 10
    -IPPROTO_NVPII = 11
    -IPPROTO_PUP = 12
    -IPPROTO_ARGUS = 13
    -IPPROTO_EMCON = 14
    -IPPROTO_XNET = 15
    -IPPROTO_CHAOS = 16
    -IPPROTO_MUX = 18
    -IPPROTO_MEAS = 19
    -IPPROTO_HMP = 20
    -IPPROTO_PRM = 21
    -IPPROTO_IDP = 22
    -IPPROTO_TRUNK1 = 23
    -IPPROTO_TRUNK2 = 24
    -IPPROTO_LEAF1 = 25
    -IPPROTO_LEAF2 = 26
    -IPPROTO_RDP = 27
    -IPPROTO_IRTP = 28
    -IPPROTO_TP = 29
    -IPPROTO_BLT = 30
    -IPPROTO_NSP = 31
    -IPPROTO_INP = 32
    -IPPROTO_SEP = 33
    -IPPROTO_3PC = 34
    -IPPROTO_IDPR = 35
    -IPPROTO_XTP = 36
    -IPPROTO_DDP = 37
    -IPPROTO_CMTP = 38
    -IPPROTO_TPXX = 39
    -IPPROTO_IL = 40
    -IPPROTO_IPV6 = 41
    -IPPROTO_SDRP = 42
    -IPPROTO_ROUTING = 43
    -IPPROTO_FRAGMENT = 44
    -IPPROTO_IDRP = 45
    -IPPROTO_RSVP = 46
    -IPPROTO_GRE = 47
    -IPPROTO_MHRP = 48
    -IPPROTO_BHA = 49
    -IPPROTO_ESP = 50
    -IPPROTO_AH = 51
    -IPPROTO_INLSP = 52
    -IPPROTO_SWIPE = 53
    -IPPROTO_NHRP = 54
    -IPPROTO_MOBILE = 55
    -IPPROTO_TLSP = 56
    -IPPROTO_SKIP = 57
    -IPPROTO_ICMPV6 = 58
    -IPPROTO_NONE = 59
    -IPPROTO_DSTOPTS = 60
    -IPPROTO_AHIP = 61
    -IPPROTO_CFTP = 62
    -IPPROTO_HELLO = 63
    -IPPROTO_SATEXPAK = 64
    -IPPROTO_KRYPTOLAN = 65
    -IPPROTO_RVD = 66
    -IPPROTO_IPPC = 67
    -IPPROTO_ADFS = 68
    -IPPROTO_SATMON = 69
    -IPPROTO_VISA = 70
    -IPPROTO_IPCV = 71
    -IPPROTO_CPNX = 72
    -IPPROTO_CPHB = 73
    -IPPROTO_WSN = 74
    -IPPROTO_PVP = 75
    -IPPROTO_BRSATMON = 76
    -IPPROTO_ND = 77
    -IPPROTO_WBMON = 78
    -IPPROTO_WBEXPAK = 79
    -IPPROTO_EON = 80
    -IPPROTO_VMTP = 81
    -IPPROTO_SVMTP = 82
    -IPPROTO_VINES = 83
    -IPPROTO_TTP = 84
    -IPPROTO_IGP = 85
    -IPPROTO_DGP = 86
    -IPPROTO_TCF = 87
    -IPPROTO_IGRP = 88
    -IPPROTO_OSPFIGP = 89
    -IPPROTO_SRPC = 90
    -IPPROTO_LARP = 91
    -IPPROTO_MTP = 92
    -IPPROTO_AX25 = 93
    -IPPROTO_IPEIP = 94
    -IPPROTO_MICP = 95
    -IPPROTO_SCCSP = 96
    -IPPROTO_ETHERIP = 97
    -IPPROTO_ENCAP = 98
    -IPPROTO_APES = 99
    -IPPROTO_GMTP = 100
    -IPPROTO_IPCOMP = 108
    -IPPROTO_SCTP = 132
    -IPPROTO_PIM = 103
    -IPPROTO_CARP = 112
    -IPPROTO_PGM = 113
    -IPPROTO_PFSYNC = 240
    -IPPROTO_OLD_DIVERT = 254
    -IPPROTO_MAX = 256
    -IPPROTO_DONE = 257
    -IPPROTO_DIVERT = 258
    -IPPROTO_SPACER = 32767
    -IPPORT_RESERVED = 1024
    -IPPORT_HIFIRSTAUTO = 49152
    -IPPORT_HILASTAUTO = 65535
    -IPPORT_RESERVEDSTART = 600
    -IPPORT_MAX = 65535
    -def IN_CLASSA(i): return (((u_int32_t)(i) & 0x80000000) == 0)
    -
    -IN_CLASSA_NET = 0xff000000
    -IN_CLASSA_NSHIFT = 24
    -IN_CLASSA_HOST = 0x00ffffff
    -IN_CLASSA_MAX = 128
    -def IN_CLASSB(i): return (((u_int32_t)(i) & 0xc0000000) == 0x80000000)
    -
    -IN_CLASSB_NET = 0xffff0000
    -IN_CLASSB_NSHIFT = 16
    -IN_CLASSB_HOST = 0x0000ffff
    -IN_CLASSB_MAX = 65536
    -def IN_CLASSC(i): return (((u_int32_t)(i) & 0xe0000000) == 0xc0000000)
    -
    -IN_CLASSC_NET = 0xffffff00
    -IN_CLASSC_NSHIFT = 8
    -IN_CLASSC_HOST = 0x000000ff
    -def IN_CLASSD(i): return (((u_int32_t)(i) & 0xf0000000) == 0xe0000000)
    -
    -IN_CLASSD_NET = 0xf0000000
    -IN_CLASSD_NSHIFT = 28
    -IN_CLASSD_HOST = 0x0fffffff
    -def IN_MULTICAST(i): return IN_CLASSD(i)
    -
    -def IN_EXPERIMENTAL(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
    -
    -def IN_BADCLASS(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
    -
    -INADDR_NONE = 0xffffffff
    -IN_LOOPBACKNET = 127
    -IP_OPTIONS = 1
    -IP_HDRINCL = 2
    -IP_TOS = 3
    -IP_TTL = 4
    -IP_RECVOPTS = 5
    -IP_RECVRETOPTS = 6
    -IP_RECVDSTADDR = 7
    -IP_SENDSRCADDR = IP_RECVDSTADDR
    -IP_RETOPTS = 8
    -IP_MULTICAST_IF = 9
    -IP_MULTICAST_TTL = 10
    -IP_MULTICAST_LOOP = 11
    -IP_ADD_MEMBERSHIP = 12
    -IP_DROP_MEMBERSHIP = 13
    -IP_MULTICAST_VIF = 14
    -IP_RSVP_ON = 15
    -IP_RSVP_OFF = 16
    -IP_RSVP_VIF_ON = 17
    -IP_RSVP_VIF_OFF = 18
    -IP_PORTRANGE = 19
    -IP_RECVIF = 20
    -IP_IPSEC_POLICY = 21
    -IP_FAITH = 22
    -IP_ONESBCAST = 23
    -IP_FW_TABLE_ADD = 40
    -IP_FW_TABLE_DEL = 41
    -IP_FW_TABLE_FLUSH = 42
    -IP_FW_TABLE_GETSIZE = 43
    -IP_FW_TABLE_LIST = 44
    -IP_FW_ADD = 50
    -IP_FW_DEL = 51
    -IP_FW_FLUSH = 52
    -IP_FW_ZERO = 53
    -IP_FW_GET = 54
    -IP_FW_RESETLOG = 55
    -IP_DUMMYNET_CONFIGURE = 60
    -IP_DUMMYNET_DEL = 61
    -IP_DUMMYNET_FLUSH = 62
    -IP_DUMMYNET_GET = 64
    -IP_RECVTTL = 65
    -IP_MINTTL = 66
    -IP_DONTFRAG = 67
    -IP_DEFAULT_MULTICAST_TTL = 1
    -IP_DEFAULT_MULTICAST_LOOP = 1
    -IP_MAX_MEMBERSHIPS = 20
    -IP_PORTRANGE_DEFAULT = 0
    -IP_PORTRANGE_HIGH = 1
    -IP_PORTRANGE_LOW = 2
    -IPPROTO_MAXID = (IPPROTO_AH + 1)
    -IPCTL_FORWARDING = 1
    -IPCTL_SENDREDIRECTS = 2
    -IPCTL_DEFTTL = 3
    -IPCTL_DEFMTU = 4
    -IPCTL_RTEXPIRE = 5
    -IPCTL_RTMINEXPIRE = 6
    -IPCTL_RTMAXCACHE = 7
    -IPCTL_SOURCEROUTE = 8
    -IPCTL_DIRECTEDBROADCAST = 9
    -IPCTL_INTRQMAXLEN = 10
    -IPCTL_INTRQDROPS = 11
    -IPCTL_STATS = 12
    -IPCTL_ACCEPTSOURCEROUTE = 13
    -IPCTL_FASTFORWARDING = 14
    -IPCTL_KEEPFAITH = 15
    -IPCTL_GIF_TTL = 16
    -IPCTL_MAXID = 17
    -def in_nullhost(x): return ((x).s_addr == INADDR_ANY)
    -
    -
    -# Included from netinet6/in6.h
    -__KAME_VERSION = "FreeBSD"
    -IPV6PORT_RESERVED = 1024
    -IPV6PORT_ANONMIN = 49152
    -IPV6PORT_ANONMAX = 65535
    -IPV6PORT_RESERVEDMIN = 600
    -IPV6PORT_RESERVEDMAX = (IPV6PORT_RESERVED-1)
    -INET6_ADDRSTRLEN = 46
    -IPV6_ADDR_INT32_ONE = 1
    -IPV6_ADDR_INT32_TWO = 2
    -IPV6_ADDR_INT32_MNL = 0xff010000
    -IPV6_ADDR_INT32_MLL = 0xff020000
    -IPV6_ADDR_INT32_SMP = 0x0000ffff
    -IPV6_ADDR_INT16_ULL = 0xfe80
    -IPV6_ADDR_INT16_USL = 0xfec0
    -IPV6_ADDR_INT16_MLL = 0xff02
    -IPV6_ADDR_INT32_ONE = 0x01000000
    -IPV6_ADDR_INT32_TWO = 0x02000000
    
    From pypy.commits at gmail.com  Tue Aug 27 03:48:26 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 00:48:26 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: Revert the changes to backendopt done
     in 906b820ecdac; instead
    Message-ID: <5d64e04a.1c69fb81.cd0d1.eca6@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97293:322c4bfc9c42
    Date: 2019-08-27 09:47 +0200
    http://bitbucket.org/pypy/pypy/changeset/322c4bfc9c42/
    
    Log:	Revert the changes to backendopt done in 906b820ecdac; instead move
    	the review-checking before running backendopt. More small fixes.
    
    diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py
    --- a/rpython/rlib/jit.py
    +++ b/rpython/rlib/jit.py
    @@ -3,7 +3,9 @@
     import py
     
     from rpython.rlib.nonconst import NonConstant
    -from rpython.rlib.objectmodel import CDefinedIntSymbolic, keepalive_until_here, specialize, not_rpython, we_are_translated
    +from rpython.rlib.objectmodel import CDefinedIntSymbolic, keepalive_until_here
    +from rpython.rlib.objectmodel import specialize, not_rpython, we_are_translated
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib.unroll import unrolling_iterable
     from rpython.rtyper.extregistry import ExtRegistryEntry
     from rpython.tool.sourcetools import rpython_wrapper
    @@ -1196,6 +1198,7 @@
     def _jit_conditional_call(condition, function, *args):
         pass           # special-cased below
     
    + at sandbox_review(reviewed=True)   # for the llop.jit_conditional_call
     @specialize.call_location()
     def conditional_call(condition, function, *args):
         """Does the same as:
    @@ -1217,6 +1220,7 @@
     def _jit_conditional_call_value(value, function, *args):
         return value    # special-cased below
     
    + at sandbox_review(reviewed=True)   # for the llop.jit_conditional_call_value
     @specialize.call_location()
     def conditional_call_elidable(value, function, *args):
         """Does the same as:
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -1600,6 +1600,7 @@
                 lltype.Ptr(rwin32.FILETIME), lltype.Ptr(rwin32.FILETIME)],
             rwin32.BOOL, calling_conv='win')
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('times')
     def times():
         if not _WIN32:
    diff --git a/rpython/translator/backendopt/all.py b/rpython/translator/backendopt/all.py
    --- a/rpython/translator/backendopt/all.py
    +++ b/rpython/translator/backendopt/all.py
    @@ -113,7 +113,7 @@
         if config.profile_based_inline and not secondary:
             threshold = config.profile_based_inline_threshold
             heuristic = get_function(config.profile_based_inline_heuristic)
    -        inline.instrument_inline_candidates(translator, graphs, threshold)
    +        inline.instrument_inline_candidates(graphs, threshold)
             counters = translator.driver_instrument_result(
                 config.profile_based_inline)
             n = len(counters)
    diff --git a/rpython/translator/backendopt/inline.py b/rpython/translator/backendopt/inline.py
    --- a/rpython/translator/backendopt/inline.py
    +++ b/rpython/translator/backendopt/inline.py
    @@ -548,8 +548,7 @@
         return (0.9999 * measure_median_execution_cost(graph) +
                 count), True       # may be NaN
     
    -def inlinable_static_callers(translator, graphs, store_calls=False,
    -                             ok_to_call=None):
    +def inlinable_static_callers(graphs, store_calls=False, ok_to_call=None):
         if ok_to_call is None:
             ok_to_call = set(graphs)
         result = []
    @@ -559,7 +558,6 @@
             else:
                 result.append((parentgraph, graph))
         #
    -    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 for op in block.operations:
    @@ -567,12 +565,13 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None and graph in ok_to_call:
    -                        if dont_inline(funcobj):
    +                        if getattr(getattr(funcobj, '_callable', None),
    +                                   '_dont_inline_', False):
                                 continue
                             add(parentgraph, block, op, graph)
         return result
     
    -def instrument_inline_candidates(translator, graphs, threshold):
    +def instrument_inline_candidates(graphs, threshold):
         cache = {None: False}
         def candidate(graph):
             try:
    @@ -582,7 +581,6 @@
                 cache[graph] = res
                 return res
         n = 0
    -    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 ops = block.operations
    @@ -594,7 +592,8 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None:
    -                        if dont_inline(funcobj):
    +                        if getattr(getattr(funcobj, '_callable', None),
    +                                   '_dont_inline_', False):
                                 continue
                         if candidate(graph):
                             tag = Constant('inline', Void)
    @@ -611,17 +610,6 @@
         return (hasattr(graph, 'func') and
                 getattr(graph.func, '_always_inline_', None))
     
    -def make_dont_inline_checker(translator):
    -    sandbox = translator.config.translation.sandbox
    -
    -    def dont_inline(funcobj):
    -        func = getattr(funcobj, '_callable', None)
    -        if sandbox:
    -            if hasattr(func, '_sandbox_review_'):
    -                return True
    -        return getattr(func, '_dont_inline_', False)
    -    return dont_inline
    -
     def auto_inlining(translator, threshold=None,
                       callgraph=None,
                       call_count_pred=None,
    @@ -633,7 +621,7 @@
         callers = {}     # {graph: {graphs-that-call-it}}
         callees = {}     # {graph: {graphs-that-it-calls}}
         if callgraph is None:
    -        callgraph = inlinable_static_callers(translator, translator.graphs)
    +        callgraph = inlinable_static_callers(translator.graphs)
         for graph1, graph2 in callgraph:
             callers.setdefault(graph2, {})[graph1] = True
             callees.setdefault(graph1, {})[graph2] = True
    @@ -739,8 +727,7 @@
                                     if not hasattr(graph, 'exceptiontransformed')])
         else:
             ok_to_call = None
    -    callgraph = inlinable_static_callers(translator, graphs,
    -                                         ok_to_call=ok_to_call)
    +    callgraph = inlinable_static_callers(graphs, ok_to_call=ok_to_call)
         count = auto_inlining(translator, threshold, callgraph=callgraph,
                               heuristic=heuristic,
                               call_count_pred=call_count_pred)
    diff --git a/rpython/translator/backendopt/test/test_inline.py b/rpython/translator/backendopt/test/test_inline.py
    --- a/rpython/translator/backendopt/test/test_inline.py
    +++ b/rpython/translator/backendopt/test/test_inline.py
    @@ -100,7 +100,7 @@
             call_count_pred = None
             if call_count_check:
                 call_count_pred = lambda lbl: True
    -            instrument_inline_candidates(t, t.graphs, threshold)
    +            instrument_inline_candidates(t.graphs, threshold)
     
             if remove_same_as:
                 for graph in t.graphs:
    diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py
    --- a/rpython/translator/driver.py
    +++ b/rpython/translator/driver.py
    @@ -344,6 +344,12 @@
             rtyper = self.translator.buildrtyper()
             rtyper.specialize(dont_simplify_again=True)
     
    +        # we do the sandbox review checking here, before inlining graphs
    +        # inside each other (and later generating extra graphs for the GC).
    +        if self.config.translation.sandbox:
    +            from rpython.translator.sandbox import graphchecker
    +            graphchecker.check_all_graphs(self.translator)
    +
         @taskdef([RTYPE], "JIT compiler generation")
         def task_pyjitpl_lltype(self):
             """ Generate bytecodes for JIT and flow the JIT helper functions
    @@ -412,10 +418,6 @@
             if translator.annotator is not None:
                 translator.frozen = True
     
    -        if self.config.translation.sandbox:
    -            from rpython.translator.sandbox import graphchecker
    -            graphchecker.check_all_graphs(self.translator)
    -
             standalone = self.standalone
             get_gchooks = self.extra.get('get_gchooks', lambda: None)
             gchooks = get_gchooks()
    
    From pypy.commits at gmail.com  Tue Aug 27 04:11:29 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Tue, 27 Aug 2019 01:11:29 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: pypy3 -b and pypy3 -d now exist,
     add them to the --help text
    Message-ID: <5d64e5b1.1c69fb81.6b767.602b@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97294:39096734e7bc
    Date: 2019-08-27 11:10 +0300
    http://bitbucket.org/pypy/pypy/changeset/39096734e7bc/
    
    Log:	pypy3 -b and pypy3 -d now exist, add them to the --help text
    
    diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
    --- a/pypy/interpreter/app_main.py
    +++ b/pypy/interpreter/app_main.py
    @@ -2,11 +2,14 @@
     # This is pure Python code that handles the main entry point into "pypy3".
     # See test/test_app_main.
     
    -# Missing vs CPython: -b, -d, -x
    +# Missing vs CPython: -x
     USAGE1 = __doc__ = """\
     Options and arguments (and corresponding environment variables):
    +-b     : issue warnings about str(bytes_instance), str(bytearray_instance)\n\
    +         and comparing bytes/bytearray with str. (-bb: issue errors)\n\
     -B     : don't write .py[co] files on import; also PYTHONDONTWRITEBYTECODE=x
     -c cmd : program passed in as string (terminates option list)
    +-d     : debug output from parser; also PYTHONDEBUG=x\n\
     -E     : ignore PYTHON* environment variables (such as PYTHONPATH)
     -h     : print this help message and exit (also --help)
     -i     : inspect interactively after running script; forces a prompt even
    
    From pypy.commits at gmail.com  Tue Aug 27 04:34:24 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 01:34:24 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: Write docs about the
     @sandbox_review() and llexternal(sandboxsafe=..).
    Message-ID: <5d64eb10.1c69fb81.af2fe.976e@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97295:f4ba641484fa
    Date: 2019-08-27 10:33 +0200
    http://bitbucket.org/pypy/pypy/changeset/f4ba641484fa/
    
    Log:	Write docs about the @sandbox_review() and
    	llexternal(sandboxsafe=..).
    
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -2,8 +2,92 @@
     This runs at the start of the database-c step, so it excludes the
     graphs produced later, notably for the GC.  These are "low-level"
     graphs that are assumed to be safe.
    +
    +Here are again the rules around this check.
    +
    +- any graph that contains only "safe" lloperations is itself "safe".
    +  The "safe" lloperations are the ones marked "tryfold" in
    +  rtyper.lltypesystem.lloperation, plus the ones listed explicitly below,
    +  plus a few variants of specific operations coded in graph_in_unsafe().
    +
    +- any graph decorated with @objectmodel.sandbox_review() is "safe".
    +  The different flags we can pass to @sandbox_review() are explained next,
    +  but the decorated graph is itself always "safe".
    +
    +- "unsafe" operations are all special rare operations, plus most importantly
    +  all *writes* into raw memory.  We assume that *reads* from anywhere are
    +  OK to ignore: any information that reaches the sandboxed process can be
    +  detected and used by anything that runs inside this process (i.e. there
    +  is no really "secret" data inside the sandboxed subprocess itself).
    +  At worst, random reads will lead to segfaults.  But random writes are not
    +  safe because that could corrupt memory---e.g. overwrite some GC object
    +  header, or even (although I'm not sure how) actually cause the sandboxed
    +  process to misbehave in more important ways like doing actual system calls
    +  that are supposed to be forbidden.
    +
    +- the decorator @sandbox_review(check_caller=True) means that the graph is
    +  safe, but any call to this graph from somewhere else is an unsafe operation.
    +  This forces all callers to also be reviewed and marked with some form of
    +  @sandbox_review().
    +
    +- @sandbox_review(reviewed=True) means that the graph is safe and all
    +  calls to this graph are also safe.  This should only be used on functions
    +  that do internally "unsafe" stuff like writing to raw memory but don't
    +  take arguments that could lead them to do bogus things.  A typical counter-
    +  example is a function that takes a raw pointer and that writes something to
    +  it; this should *not* be marked with reviewed=True.  On the other hand, many
    +  RPython wrappers to external C functions can be reviewed=True because
    +  they translate GC-safe information (say an RPython string) to raw memory,
    +  do the call, and translate the result back to GC-safe information.
    +
    +- @sandbox_review(abort=True) is reserved for cases where calling this
    +  function at runtime should just immediately abort the subprocess.
    +
    +Note that all flags above should be considered independently of what the
    +actual C function calls are supposed to do.  For example, the RPython
    +wrapper rposix.system() is something you definitely don't want to allow as-is,
    +but the wrapper and the call to the C function are fine.  It's up to the
    +controlling process to refuse to reply to the system() external call
    +(either by having it return ENOSYS or a similar error, or by killing the
    +sandboxed process completely).
    +
    +Like system(), all calls to external C functions are *by default* removed and
    +turned into I/O on stdin/stdout, asking the parent controlling process what
    +to do.  This is controlled in more details by rffi.llexternal().  It takes
    +its own argument "sandboxsafe", which can be one of the following values:
    +
    +- sandboxsafe=False (the default): the external C call is not done but turned
    +  into I/O on stdin/stdout.  Moreover, *if* the function takes or returns a
    +  raw pointer, then it is flagged with @sandbox_review(check_caller=True) to
    +  ensure that all callers do something sane with these raw pointers.  If
    +  the C function only takes and returns integer or float arguments, there is
    +  no real need, so in this case we flag @sandbox_review(reviewed=True) instead.
    +
    +- sandboxsafe=True: means the external call should be done straight from the
    +  sandboxed process.  Reserved for specific functions like rposix.c_strerror(),
    +  or some memory-manipulation functions used by the GC itself.
    +
    +- sandboxsafe="abort": like @sandbox_review(abort=True).
    +
    +- sandboxsafe="check_caller": forces @sandbox_review(check_caller=True).
    +  Useful for llexternal() functions that appear to return an integer but
    +  that's really some address that must be carefully managed.
    +
    +- sandboxsafe="nowrite": forces @sandbox_review(reviewed=True).  This is OK
    +  for C functions that have pointer arguments but none of them can point
    +  to anything that will be written to (hence the name).  The idea is that
    +  for the common case of a function that takes a "const char *" argument,
    +  we should just mark that function as reviewed=True, because it is safe:
    +  the controller process will at most read things from the sandboxed process,
    +  namely what the pointer points to, but it should not attempt to do any
    +  write into the sandboxed process' memory.  Typically the caller itself
    +  calls rffi.str2charp() and rffi.free_charp() around the call, but these
    +  are also @sandbox_review(reviewed=True) helpers, so such a caller doesn't
    +  need to be explicitly reviewed.
    +
     """
     
    +
     from rpython.flowspace.model import SpaceOperation, Constant
     from rpython.rtyper.rmodel import inputconst
     from rpython.rtyper.lltypesystem import lltype, llmemory, rstr
    
    From pypy.commits at gmail.com  Tue Aug 27 04:58:18 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Tue, 27 Aug 2019 01:58:18 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: fixup conftest for removed files
    Message-ID: <5d64f0aa.1c69fb81.5e3d.d6fb@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97296:da64fb221689
    Date: 2019-08-27 11:57 +0300
    http://bitbucket.org/pypy/pypy/changeset/da64fb221689/
    
    Log:	fixup conftest for removed files
    
    diff --git a/lib-python/conftest.py b/lib-python/conftest.py
    --- a/lib-python/conftest.py
    +++ b/lib-python/conftest.py
    @@ -333,14 +333,6 @@
         RegrTest('test_pathlib.py'),
         RegrTest('test_pdb.py'),
         RegrTest('test_peepholer.py'),
    -    RegrTest('test_pep247.py'),
    -    RegrTest('test_pep277.py'),
    -    RegrTest('test_pep3120.py'),
    -    RegrTest('test_pep3131.py'),
    -    RegrTest('test_pep3151.py'),
    -    RegrTest('test_pep352.py'),
    -    RegrTest('test_pep380.py'),
    -    RegrTest('test_pep479.py'),
         RegrTest('test_pickle.py', core=True),
         RegrTest('test_pickletools.py', core=False),
         RegrTest('test_pipes.py'),
    @@ -415,7 +407,6 @@
         RegrTest('test_string.py', core=True),
         RegrTest('test_string_literals.py'),
         RegrTest('test_stringprep.py'),
    -    RegrTest('test_strlit.py'),
         RegrTest('test_strptime.py'),
         RegrTest('test_strtod.py'),
         RegrTest('test_struct.py', usemodules='struct'),
    
    From pypy.commits at gmail.com  Tue Aug 27 06:03:39 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 03:03:39 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge sandbox-2
    Message-ID: <5d64fffb.1c69fb81.d3f4e.fe89@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97297:dcecc1d7905b
    Date: 2019-08-27 10:38 +0200
    http://bitbucket.org/pypy/pypy/changeset/dcecc1d7905b/
    
    Log:	hg merge sandbox-2
    
    diff --git a/pypy/module/__pypy__/interp_time.py b/pypy/module/__pypy__/interp_time.py
    --- a/pypy/module/__pypy__/interp_time.py
    +++ b/pypy/module/__pypy__/interp_time.py
    @@ -6,10 +6,12 @@
     from rpython.rtyper.lltypesystem import rffi, lltype
     from rpython.rlib import rtime
     from rpython.rlib.rtime import HAS_CLOCK_GETTIME
    +from rpython.rlib.objectmodel import sandbox_review
     
     
     if HAS_CLOCK_GETTIME:
     
    +    @sandbox_review(reviewed=True)
         @unwrap_spec(clk_id="c_int")
         def clock_gettime(space, clk_id):
             with lltype.scoped_alloc(rtime.TIMESPEC) as tp:
    @@ -20,6 +22,7 @@
                      float(rffi.getintfield(tp, 'c_tv_nsec')) * 0.000000001)
             return space.newfloat(t)
     
    +    @sandbox_review(reviewed=True)
         @unwrap_spec(clk_id="c_int")
         def clock_getres(space, clk_id):
             with lltype.scoped_alloc(rtime.TIMESPEC) as tp:
    diff --git a/pypy/module/_io/interp_fileio.py b/pypy/module/_io/interp_fileio.py
    --- a/pypy/module/_io/interp_fileio.py
    +++ b/pypy/module/_io/interp_fileio.py
    @@ -458,7 +458,7 @@
             length = rwbuffer.getlength()
     
             target_address = lltype.nullptr(rffi.CCHARP.TO)
    -        if length > 64:
    +        if length > 64 and not space.config.translation.sandbox:
                 try:
                     target_address = rwbuffer.get_raw_address()
                 except ValueError:
    @@ -480,6 +480,13 @@
             else:
                 # optimized case: reading more than 64 bytes into a rwbuffer
                 # with a valid raw address
    +
    +            # XXX note that this is not fully safe, because we don't "lock"
    +            # the buffer so we can't in theory pass its raw address to c_read().
    +            # Another thread could cause it to be freed in parallel.
    +            # Without proper buffer locking, it's not going to be fixed, though.
    +            assert not space.config.translation.sandbox
    +
                 while True:
                     got = c_read(self.fd, target_address, length)
                     keepalive_until_here(rwbuffer)
    diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
    --- a/pypy/module/posix/interp_posix.py
    +++ b/pypy/module/posix/interp_posix.py
    @@ -2227,7 +2227,13 @@
         except OSError as e:
             # 'rurandom' should catch and retry internally if it gets EINTR
             # (at least in os.read(), which is probably enough in practice)
    -        raise wrap_oserror(space, e, eintr_retry=False)
    +        #
    +        # CPython raises NotImplementedError if /dev/urandom cannot be found.
    +        # To maximize compatibility, we should also raise NotImplementedError
    +        # and not OSError (although CPython also raises OSError in case it
    +        # could open /dev/urandom but there are further problems).
    +        raise wrap_oserror(space, e, eintr_retry=False,
    +            w_exception_class=space.w_NotImplementedError)
     
     def ctermid(space):
         """ctermid() -> string
    diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py
    --- a/rpython/rlib/jit.py
    +++ b/rpython/rlib/jit.py
    @@ -3,7 +3,9 @@
     import py
     
     from rpython.rlib.nonconst import NonConstant
    -from rpython.rlib.objectmodel import CDefinedIntSymbolic, keepalive_until_here, specialize, not_rpython, we_are_translated
    +from rpython.rlib.objectmodel import CDefinedIntSymbolic, keepalive_until_here
    +from rpython.rlib.objectmodel import specialize, not_rpython, we_are_translated
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rlib.unroll import unrolling_iterable
     from rpython.rtyper.extregistry import ExtRegistryEntry
     from rpython.tool.sourcetools import rpython_wrapper
    @@ -1196,6 +1198,7 @@
     def _jit_conditional_call(condition, function, *args):
         pass           # special-cased below
     
    + at sandbox_review(reviewed=True)   # for the llop.jit_conditional_call
     @specialize.call_location()
     def conditional_call(condition, function, *args):
         """Does the same as:
    @@ -1217,6 +1220,7 @@
     def _jit_conditional_call_value(value, function, *args):
         return value    # special-cased below
     
    + at sandbox_review(reviewed=True)   # for the llop.jit_conditional_call_value
     @specialize.call_location()
     def conditional_call_elidable(value, function, *args):
         """Does the same as:
    diff --git a/rpython/rlib/rarithmetic.py b/rpython/rlib/rarithmetic.py
    --- a/rpython/rlib/rarithmetic.py
    +++ b/rpython/rlib/rarithmetic.py
    @@ -878,9 +878,8 @@
         Raises ParseStringOverflowError in case the result does not fit.
         """
         from rpython.rlib.rstring import (
    -        NumberStringParser, ParseStringOverflowError, strip_spaces)
    -    s = literal = strip_spaces(s)
    -    p = NumberStringParser(s, literal, base, 'int',
    +        NumberStringParser, ParseStringOverflowError)
    +    p = NumberStringParser(s, s, base, 'int',
                                allow_underscores=allow_underscores,
                                no_implicit_octal=no_implicit_octal)
         base = p.base
    diff --git a/rpython/rlib/rbigint.py b/rpython/rlib/rbigint.py
    --- a/rpython/rlib/rbigint.py
    +++ b/rpython/rlib/rbigint.py
    @@ -296,14 +296,17 @@
         def fromstr(s, base=0, allow_underscores=False):
             """As string_to_int(), but ignores an optional 'l' or 'L' suffix
             and returns an rbigint."""
    +        from rpython.rlib.rstring import NumberStringParser
             from rpython.rlib.rstring import NumberStringParser, \
                 strip_spaces
    -        s = literal = strip_spaces(s)
    +        s = literal = strip_spaces(s) # XXX could get rid of this slice
    +        end = len(s)
             if (s.endswith('l') or s.endswith('L')) and base < 22:
                 # in base 22 and above, 'L' is a valid digit!  try: long('L',22)
    -            s = s[:-1]
    +            end -= 1
             parser = NumberStringParser(s, literal, base, 'long',
    -                                    allow_underscores=allow_underscores)
    +                                    allow_underscores=allow_underscores,
    +                                    end=end)
             return rbigint._from_numberstring_parser(parser)
     
         @staticmethod
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -395,12 +395,14 @@
                       save_err=rffi.RFFI_SAVE_ERRNO)
     c_open = external(UNDERSCORE_ON_WIN32 + 'open',
                       [rffi.CCHARP, rffi.INT, rffi.MODE_T], rffi.INT,
    -                  save_err=rffi.RFFI_SAVE_ERRNO)
    +                  save_err=rffi.RFFI_SAVE_ERRNO,
    +                  sandboxsafe="nowrite")
     
     # Win32 Unicode functions
     c_wopen = external(UNDERSCORE_ON_WIN32 + 'wopen',
                        [rffi.CWCHARP, rffi.INT, rffi.MODE_T], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO,
    +                   sandboxsafe="nowrite")
     
     #___________________________________________________________________
     # Wrappers around posix functions, that accept either strings, or
    @@ -514,6 +516,7 @@
     c_close = external(UNDERSCORE_ON_WIN32 + 'close', [rffi.INT], rffi.INT,
                        releasegil=False, save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('read')
     @signature(types.int(), types.int(), returns=types.any())
     def read(fd, count):
    @@ -525,6 +528,7 @@
                 got = handle_posix_error('read', c_read(fd, void_buf, count))
                 return buf.str(got)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('write')
     @signature(types.int(), types.any(), returns=types.any())
     def write(fd, data):
    @@ -649,13 +653,13 @@
     #___________________________________________________________________
     
     c_chdir = external('chdir', [rffi.CCHARP], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_fchdir = external('fchdir', [rffi.INT], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO)
     c_access = external(UNDERSCORE_ON_WIN32 + 'access',
    -                    [rffi.CCHARP, rffi.INT], rffi.INT)
    +                    [rffi.CCHARP, rffi.INT], rffi.INT, sandboxsafe="nowrite")
     c_waccess = external(UNDERSCORE_ON_WIN32 + 'waccess',
    -                     [rffi.CWCHARP, rffi.INT], rffi.INT)
    +                     [rffi.CWCHARP, rffi.INT], rffi.INT, sandboxsafe="nowrite")
     
     @replace_os_function('chdir')
     @specialize.argtype(0)
    @@ -753,6 +757,7 @@
                          [rffi.CWCHARP, rffi.SIZE_T], rffi.CWCHARP,
                          save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getcwd')
     def getcwd():
         bufsize = 256
    @@ -773,6 +778,7 @@
         lltype.free(buf, flavor='raw')
         return result
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getcwdu')
     def getcwdu():
         bufsize = 256
    @@ -811,9 +817,11 @@
         DIRENT = dirent_config['DIRENT']
         DIRENTP = lltype.Ptr(DIRENT)
         c_opendir = external('opendir',
    -        [rffi.CCHARP], DIRP, save_err=rffi.RFFI_SAVE_ERRNO)
    +        [rffi.CCHARP], DIRP, save_err=rffi.RFFI_SAVE_ERRNO,
    +        sandboxsafe="nowrite")
         c_fdopendir = external('fdopendir',
    -        [rffi.INT], DIRP, save_err=rffi.RFFI_SAVE_ERRNO)
    +        [rffi.INT], DIRP, save_err=rffi.RFFI_SAVE_ERRNO,
    +        sandboxsafe="nowrite")
         c_rewinddir = external('rewinddir',
             [DIRP], lltype.Void, releasegil=False)
         # XXX macro=True is hack to make sure we get the correct kind of
    @@ -828,6 +836,7 @@
     else:
         dirent_config = {}
     
    + at sandbox_review(reviewed=True)
     def _listdir(dirp, rewind=False):
         result = []
         while True:
    @@ -847,6 +856,7 @@
         return result
     
     if not _WIN32:
    +    @sandbox_review(reviewed=True)
         def fdlistdir(dirfd):
             """
             Like listdir(), except that the directory is specified as an open
    @@ -921,17 +931,17 @@
     #___________________________________________________________________
     
     c_execv = external('execv', [rffi.CCHARP, rffi.CCHARPP], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_execve = external('execve',
                         [rffi.CCHARP, rffi.CCHARPP, rffi.CCHARPP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_spawnv = external(UNDERSCORE_ON_WIN32 + 'spawnv',
                         [rffi.INT, rffi.CCHARP, rffi.CCHARPP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_spawnve = external(UNDERSCORE_ON_WIN32 + 'spawnve',
                         [rffi.INT, rffi.CCHARP, rffi.CCHARPP, rffi.CCHARPP],
                          rffi.INT,
    -                     save_err=rffi.RFFI_SAVE_ERRNO)
    +                     save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     @replace_os_function('execv')
     def execv(path, args):
    @@ -1006,6 +1016,7 @@
             debug.debug_forked(ofs)
         return childpid
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('openpty')
     @jit.dont_look_inside
     def openpty():
    @@ -1110,6 +1121,7 @@
     c_getloadavg = external('getloadavg',
                             [rffi.CArrayPtr(lltype.Float), rffi.INT], rffi.INT)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getlogin')
     def getlogin():
         result = c_getlogin()
    @@ -1117,6 +1129,7 @@
             raise OSError(get_saved_errno(), "getlogin failed")
         return rffi.charp2str(result)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getloadavg')
     def getloadavg():
         load = lltype.malloc(rffi.CArrayPtr(lltype.Float).TO, 3, flavor='raw')
    @@ -1134,6 +1147,7 @@
                           [rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T], rffi.SSIZE_T,
                           save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('readlink')
     def readlink(path):
         path = _as_bytes0(path)
    @@ -1168,6 +1182,7 @@
                          releasegil=False,
                          save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('ttyname')
     def ttyname(fd):
         l_name = c_ttyname(fd)
    @@ -1178,6 +1193,7 @@
     c_strerror = external('strerror', [rffi.INT], rffi.CCHARP,
                           releasegil=False, sandboxsafe=True)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('strerror')
     def strerror(errnum):
         res = c_strerror(errnum)
    @@ -1185,20 +1201,20 @@
             raise ValueError("os_strerror failed")
         return rffi.charp2str(res)
     
    -c_system = external('system', [rffi.CCHARP], rffi.INT)
    +c_system = external('system', [rffi.CCHARP], rffi.INT, sandboxsafe="nowrite")
     
     @replace_os_function('system')
     def system(command):
         return widen(c_system(command))
     
     c_unlink = external('unlink', [rffi.CCHARP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_mkdir = external('mkdir', [rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_rmdir = external(UNDERSCORE_ON_WIN32 + 'rmdir', [rffi.CCHARP], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_wrmdir = external(UNDERSCORE_ON_WIN32 + 'wrmdir', [rffi.CWCHARP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     @replace_os_function('unlink')
     @specialize.argtype(0)
    @@ -1232,11 +1248,11 @@
             handle_posix_error('rmdir', c_rmdir(_as_bytes0(path)))
     
     c_chmod = external('chmod', [rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_fchmod = external('fchmod', [rffi.INT, rffi.MODE_T], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO,)
     c_rename = external('rename', [rffi.CCHARP, rffi.CCHARP], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     @replace_os_function('chmod')
     @specialize.argtype(0)
    @@ -1293,10 +1309,11 @@
     #___________________________________________________________________
     
     c_mkfifo = external('mkfifo', [rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_mknod = external('mknod', [rffi.CCHARP, rffi.MODE_T, rffi.INT], rffi.INT,
     #                                           # xxx: actually ^^^ dev_t
    -                   macro=_MACRO_ON_POSIX, save_err=rffi.RFFI_SAVE_ERRNO)
    +                   macro=_MACRO_ON_POSIX, save_err=rffi.RFFI_SAVE_ERRNO,
    +                   sandboxsafe="nowrite")
     
     @replace_os_function('mkfifo')
     @specialize.argtype(0)
    @@ -1337,6 +1354,7 @@
             c_pipe2 = external('pipe2', [INT_ARRAY_P, rffi.INT], rffi.INT,
                               save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('pipe')
     def pipe(flags=0):
         # 'flags' might be ignored.  Check the result.
    @@ -1373,6 +1391,7 @@
             finally:
                 lltype.free(filedes, flavor='raw')
     
    + at sandbox_review(reviewed=True)
     def pipe2(flags):
         # Only available if there is really a c_pipe2 function.
         # No fallback to pipe() if we get ENOSYS.
    @@ -1385,9 +1404,9 @@
             lltype.free(filedes, flavor='raw')
     
     c_link = external('link', [rffi.CCHARP, rffi.CCHARP], rffi.INT,
    -                  save_err=rffi.RFFI_SAVE_ERRNO,)
    +                  save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_symlink = external('symlink', [rffi.CCHARP, rffi.CCHARP], rffi.INT,
    -                     save_err=rffi.RFFI_SAVE_ERRNO)
    +                     save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
     #___________________________________________________________________
     
    @@ -1420,9 +1439,9 @@
         return widen(c_umask(newmask))
     
     c_chown = external('chown', [rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    -                   save_err=rffi.RFFI_SAVE_ERRNO)
    +                   save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_lchown = external('lchown', [rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    -                    save_err=rffi.RFFI_SAVE_ERRNO)
    +                    save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     c_fchown = external('fchown', [rffi.INT, rffi.INT, rffi.INT], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO)
     
    @@ -1581,6 +1600,7 @@
                 lltype.Ptr(rwin32.FILETIME), lltype.Ptr(rwin32.FILETIME)],
             rwin32.BOOL, calling_conv='win')
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('times')
     def times():
         if not _WIN32:
    @@ -1680,12 +1700,14 @@
     
     c_ctermid = external('ctermid', [rffi.CCHARP], rffi.CCHARP)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('ctermid')
     def ctermid():
         return rffi.charp2str(c_ctermid(lltype.nullptr(rffi.CCHARP.TO)))
     
     c_tmpnam = external('tmpnam', [rffi.CCHARP], rffi.CCHARP)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('tmpnam')
     def tmpnam():
         return rffi.charp2str(c_tmpnam(lltype.nullptr(rffi.CCHARP.TO)))
    @@ -1737,8 +1759,10 @@
         c_setgroups = external('setgroups', [rffi.SIZE_T, GID_GROUPS_T], rffi.INT,
                                save_err=rffi.RFFI_SAVE_ERRNO)
         c_initgroups = external('initgroups', [rffi.CCHARP, GID_T], rffi.INT,
    -                            save_err=rffi.RFFI_SAVE_ERRNO)
    +                            save_err=rffi.RFFI_SAVE_ERRNO,
    +                            sandboxsafe="nowrite")
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('getgroups')
     def getgroups():
         n = handle_posix_error('getgroups',
    @@ -1886,6 +1910,7 @@
         c_setresgid = external('setresgid', [GID_T] * 3, rffi.INT,
                                save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         @replace_os_function('getresuid')
         def getresuid():
             out = lltype.malloc(UID_T_P.TO, 3, flavor='raw')
    @@ -1898,6 +1923,7 @@
             finally:
                 lltype.free(out, flavor='raw')
     
    +    @sandbox_review(reviewed=True)
         @replace_os_function('getresgid')
         def getresgid():
             out = lltype.malloc(GID_T_P.TO, 3, flavor='raw')
    @@ -1956,6 +1982,7 @@
     c_chroot = external('chroot', [rffi.CCHARP], rffi.INT,
                         save_err=rffi.RFFI_SAVE_ERRNO,
                         macro=_MACRO_ON_POSIX,
    +                    sandboxsafe="nowrite",
                         compilation_info=ExternalCompilationInfo(includes=['unistd.h']))
     
     @replace_os_function('chroot')
    @@ -1981,6 +2008,7 @@
                            compilation_info=CConfig._compilation_info_,
                            save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('uname')
     def uname():
         l_utsbuf = lltype.malloc(UTSNAMEP.TO, flavor='raw')
    @@ -2024,7 +2052,8 @@
     c_fpathconf = external('fpathconf', [rffi.INT, rffi.INT], rffi.LONG,
                            save_err=rffi.RFFI_FULL_ERRNO_ZERO)
     c_pathconf = external('pathconf', [rffi.CCHARP, rffi.INT], rffi.LONG,
    -                      save_err=rffi.RFFI_FULL_ERRNO_ZERO)
    +                      save_err=rffi.RFFI_FULL_ERRNO_ZERO,
    +                      sandboxsafe="nowrite")
     c_confstr = external('confstr',
                          [rffi.INT, rffi.CCHARP, rffi.SIZE_T], rffi.SIZE_T,
                           save_err=rffi.RFFI_FULL_ERRNO_ZERO)
    @@ -2056,6 +2085,7 @@
                 raise OSError(errno, "pathconf failed")
         return res
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('confstr')
     def confstr(value):
         n = intmask(c_confstr(value, lltype.nullptr(rffi.CCHARP.TO), 0))
    @@ -2129,7 +2159,8 @@
     
     if HAVE_FACCESSAT:
         c_faccessat = external('faccessat',
    -        [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT)
    +        [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    +        sandboxsafe="nowrite")
     
         def faccessat(pathname, mode, dir_fd=AT_FDCWD,
                 effective_ids=False, follow_symlinks=True):
    @@ -2147,7 +2178,7 @@
     if HAVE_FCHMODAT:
         c_fchmodat = external('fchmodat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO,)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def fchmodat(path, mode, dir_fd=AT_FDCWD, follow_symlinks=True):
             if follow_symlinks:
    @@ -2160,7 +2191,7 @@
     if HAVE_FCHOWNAT:
         c_fchownat = external('fchownat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.INT, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO,)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def fchownat(path, owner, group, dir_fd=AT_FDCWD,
                 follow_symlinks=True, empty_path=False):
    @@ -2175,7 +2206,7 @@
     if HAVE_FEXECVE:
         c_fexecve = external('fexecve',
             [rffi.INT, rffi.CCHARPP, rffi.CCHARPP], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def fexecve(fd, args, env):
             envstrs = []
    @@ -2196,7 +2227,7 @@
         c_linkat = external(
             'linkat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.CCHARP, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def linkat(src, dst, src_dir_fd=AT_FDCWD, dst_dir_fd=AT_FDCWD,
                 follow_symlinks=True):
    @@ -2290,7 +2321,7 @@
     if HAVE_MKDIRAT:
         c_mkdirat = external('mkdirat',
             [rffi.INT, rffi.CCHARP, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def mkdirat(pathname, mode, dir_fd=AT_FDCWD):
             error = c_mkdirat(dir_fd, pathname, mode)
    @@ -2299,7 +2330,7 @@
     if HAVE_UNLINKAT:
         c_unlinkat = external('unlinkat',
             [rffi.INT, rffi.CCHARP, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def unlinkat(pathname, dir_fd=AT_FDCWD, removedir=False):
             flag = AT_REMOVEDIR if removedir else 0
    @@ -2337,7 +2368,7 @@
         c_renameat = external(
             'renameat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.CCHARP], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def renameat(src, dst, src_dir_fd=AT_FDCWD, dst_dir_fd=AT_FDCWD):
             error = c_renameat(src_dir_fd, src, dst_dir_fd, dst)
    @@ -2347,7 +2378,7 @@
     if HAVE_SYMLINKAT:
         c_symlinkat = external('symlinkat',
             [rffi.CCHARP, rffi.INT, rffi.CCHARP], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def symlinkat(src, dst, dir_fd=AT_FDCWD):
             error = c_symlinkat(src, dir_fd, dst)
    @@ -2356,7 +2387,7 @@
     if HAVE_OPENAT:
         c_openat = external('openat',
             [rffi.INT, rffi.CCHARP, rffi.INT, rffi.MODE_T], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         @enforceargs(s_Str0, int, int, int, typecheck=False)
         def openat(path, flags, mode, dir_fd=AT_FDCWD):
    @@ -2366,7 +2397,7 @@
     if HAVE_MKFIFOAT:
         c_mkfifoat = external('mkfifoat',
             [rffi.INT, rffi.CCHARP, rffi.MODE_T], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def mkfifoat(path, mode, dir_fd=AT_FDCWD):
             error = c_mkfifoat(dir_fd, path, mode)
    @@ -2375,7 +2406,7 @@
     if HAVE_MKNODAT:
         c_mknodat = external('mknodat',
             [rffi.INT, rffi.CCHARP, rffi.MODE_T, rffi.INT], rffi.INT,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
     
         def mknodat(path, mode, device, dir_fd=AT_FDCWD):
             error = c_mknodat(dir_fd, path, mode, device)
    @@ -2687,29 +2718,29 @@
             [rffi.INT, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T, rffi.INT],
             rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_setxattr = external('setxattr',
             [rffi.CCHARP, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T, rffi.INT],
             rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_lsetxattr = external('lsetxattr',
             [rffi.CCHARP, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T, rffi.INT],
             rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_fremovexattr = external('fremovexattr',
             [rffi.INT, rffi.CCHARP], rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_removexattr = external('removexattr',
             [rffi.CCHARP, rffi.CCHARP], rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_lremovexattr = external('lremovexattr',
             [rffi.CCHARP, rffi.CCHARP], rffi.INT,
             compilation_info=CConfig._compilation_info_,
    -        save_err=rffi.RFFI_SAVE_ERRNO)
    +        save_err=rffi.RFFI_SAVE_ERRNO, sandboxsafe="nowrite")
         c_flistxattr = external('flistxattr',
             [rffi.INT, rffi.CCHARP, rffi.SIZE_T], rffi.SSIZE_T,
             compilation_info=CConfig._compilation_info_,
    @@ -2724,6 +2755,7 @@
             save_err=rffi.RFFI_SAVE_ERRNO)
         buf_sizes = [256, XATTR_SIZE_MAX]
     
    +    @sandbox_review(reviewed=True)
         def fgetxattr(fd, name):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    @@ -2738,6 +2770,7 @@
             else:
                 raise OSError(errno.ERANGE, 'fgetxattr failed')
     
    +    @sandbox_review(reviewed=True)
         def getxattr(path, name, follow_symlinks=True):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    @@ -2783,6 +2816,7 @@
             del result[-1]
             return result
     
    +    @sandbox_review(reviewed=True)
         def flistxattr(fd):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    @@ -2796,6 +2830,7 @@
             else:
                 raise OSError(errno.ERANGE, 'flistxattr failed')
     
    +    @sandbox_review(reviewed=True)
         def listxattr(path, follow_symlinks=True):
             for size in buf_sizes:
                 with rffi.scoped_alloc_buffer(size) as buf:
    diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py
    --- a/rpython/rlib/rposix_environ.py
    +++ b/rpython/rlib/rposix_environ.py
    @@ -2,7 +2,7 @@
     import sys
     from rpython.annotator import model as annmodel
     from rpython.rlib._os_support import _WIN32, StringTraits, UnicodeTraits
    -from rpython.rlib.objectmodel import enforceargs
    +from rpython.rlib.objectmodel import enforceargs, sandbox_review
     # importing rposix here creates a cycle on Windows
     from rpython.rtyper.controllerentry import Controller
     from rpython.rtyper.lltypesystem import rffi, lltype
    @@ -148,6 +148,7 @@
             byname, eq = envkeepalive.bywname, u'='
             from rpython.rlib.rwin32 import lastSavedWindowsError as last_error
     
    +    @sandbox_review(reviewed=True)
         def envitems_llimpl():
             environ = get_environ()
             result = []
    @@ -162,11 +163,13 @@
                 i += 1
             return result
     
    +    @sandbox_review(reviewed=True)
         def getenv_llimpl(name):
             with traits.scoped_str2charp(name) as l_name:
                 l_result = getenv(l_name)
                 return traits.charp2str(l_result) if l_result else None
     
    +    @sandbox_review(reviewed=True)
         def putenv_llimpl(name, value):
             l_string = traits.str2charp(name + eq + value)
             error = rffi.cast(lltype.Signed, putenv(l_string))
    @@ -196,6 +199,7 @@
         os_unsetenv = llexternal('unsetenv', [rffi.CCHARP], rffi.INT,
                                       save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def r_unsetenv(name):
             with rffi.scoped_str2charp(name) as l_name:
                 error = rffi.cast(lltype.Signed, os_unsetenv(l_name))
    diff --git a/rpython/rlib/rposix_stat.py b/rpython/rlib/rposix_stat.py
    --- a/rpython/rlib/rposix_stat.py
    +++ b/rpython/rlib/rposix_stat.py
    @@ -18,6 +18,7 @@
     
     from rpython.rlib._os_support import _preferred_traits, string_traits
     from rpython.rlib.objectmodel import specialize, we_are_translated, not_rpython
    +from rpython.rlib.objectmodel import sandbox_review
     from rpython.rtyper.lltypesystem import lltype, rffi
     from rpython.translator.tool.cbuild import ExternalCompilationInfo
     from rpython.rlib.rarithmetic import intmask
    @@ -534,6 +535,7 @@
                                   compilation_info=compilation_info,
                                   save_err=rffi.RFFI_SAVE_ERRNO)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('fstat')
     def fstat(fd):
         if not _WIN32:
    @@ -574,6 +576,7 @@
             finally:
                 lltype.free(info, flavor='raw')
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('stat')
     @specialize.argtype(0)
     def stat(path):
    @@ -587,6 +590,7 @@
             path = traits.as_str0(path)
             return win32_xstat(traits, path, traverse=True)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('lstat')
     @specialize.argtype(0)
     def lstat(path):
    @@ -639,12 +643,14 @@
                 handle_posix_error('fstatat', error)
                 return build_stat_result(stresult)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('fstatvfs')
     def fstatvfs(fd):
         with lltype.scoped_alloc(STATVFS_STRUCT.TO) as stresult:
             handle_posix_error('fstatvfs', c_fstatvfs(fd, stresult))
             return build_statvfs_result(stresult)
     
    + at sandbox_review(reviewed=True)
     @replace_os_function('statvfs')
     @specialize.argtype(0)
     def statvfs(path):
    diff --git a/rpython/rlib/rstring.py b/rpython/rlib/rstring.py
    --- a/rpython/rlib/rstring.py
    +++ b/rpython/rlib/rstring.py
    @@ -500,26 +500,34 @@
                                    (self.fname, self.original_base))
     
         def __init__(self, s, literal, base, fname, allow_underscores=False,
    -                 no_implicit_octal=False):
    +                 no_implicit_octal=False, start=0, end=-1):
             self.fname = fname
             sign = 1
    -        if s.startswith('-'):
    +        self.s = s
    +        self.start = start
    +        if end == -1:
    +            end = len(s)
    +        self.end = end
    +        self._strip_spaces()
    +        if self._startswith('-'):
                 sign = -1
    -            s = strip_spaces(s[1:])
    -        elif s.startswith('+'):
    -            s = strip_spaces(s[1:])
    +            self.start += 1
    +            self._strip_spaces()
    +        elif self._startswith('+'):
    +            self.start += 1
    +            self._strip_spaces()
             self.sign = sign
             self.original_base = base
             self.allow_underscores = allow_underscores
     
             if base == 0:
    -            if s.startswith('0x') or s.startswith('0X'):
    +            if self._startswith('0x') or self._startswith('0X'):
                     base = 16
    -            elif s.startswith('0b') or s.startswith('0B'):
    +            elif self._startswith('0b') or self._startswith('0B'):
                     base = 2
    -            elif s.startswith('0'): # also covers the '0o' case
    -                if no_implicit_octal and not (s.startswith('0o') or
    -                                              s.startswith('0O')):
    +            elif self._startswith('0'): # also covers the '0o' case
    +                if no_implicit_octal and not (self._startswith('0o') or
    +                                              self._startswith('0O')):
                         base = 1    # this makes only the digit '0' valid...
                     else:
                         base = 8
    @@ -530,30 +538,44 @@
             self.base = base
     
             # Leading underscores are not allowed
    -        if s.startswith('_'):
    +        if self._startswith('_'):
                 self.error()
     
    -        if base == 16 and (s.startswith('0x') or s.startswith('0X')):
    -            s = s[2:]
    -        if base == 8 and (s.startswith('0o') or s.startswith('0O')):
    -            s = s[2:]
    -        if base == 2 and (s.startswith('0b') or s.startswith('0B')):
    -            s = s[2:]
    -        if not s:
    +        if base == 16 and (self._startswith('0x') or self._startswith('0X')):
    +            self.start += 2
    +        if base == 8 and (self._startswith('0o') or self._startswith('0O')):
    +            self.start += 2
    +        if base == 2 and (self._startswith('0b') or self._startswith('0B')):
    +            self.start += 2
    +        if self.start == self.end:
                 self.error()
    -        self.s = s
    -        self.n = len(s)
    -        self.i = 0
    +        self.i = self.start
    +
    +    def _startswith(self, prefix):
    +        return startswith(self.s, prefix, start=self.start, end=self.end)
    +
    +    def _strip_spaces(self):
    +        # XXX this is not locale-dependent
    +        p = self.start
    +        q = self.end
    +        s = self.s
    +        while p < q and s[p] in ' \f\n\r\t\v':
    +            p += 1
    +        while p < q and s[q-1] in ' \f\n\r\t\v':
    +            q -= 1
    +        assert q >= p
    +        self.start = p
    +        self.end = q
     
         def rewind(self):
             self.i = 0
     
         def next_digit(self): # -1 => exhausted
    -        if self.i < self.n:
    +        if self.i < self.end:
                 c = self.s[self.i]
                 if self.allow_underscores and c == '_':
                     self.i += 1
    -                if self.i >= self.n:
    +                if self.i >= self.end:
                         self.error()
                     c = self.s[self.i]
                 digit = ord(c)
    @@ -576,7 +598,7 @@
             # After exhausting all n digits in next_digit(), you can walk them
             # again in reverse order by calling prev_digit() exactly n times
             i = self.i - 1
    -        assert i >= 0
    +        assert i >= self.start
             self.i = i
             c = self.s[i]
             if self.allow_underscores and c == '_':
    diff --git a/rpython/rlib/rtime.py b/rpython/rlib/rtime.py
    --- a/rpython/rlib/rtime.py
    +++ b/rpython/rlib/rtime.py
    @@ -236,6 +236,7 @@
             diff = a[0] - state.counter_start
         return float(diff) / state.divisor
     
    + at sandbox_review(reviewed=True)
     @replace_time_function('clock')
     def clock():
         if _WIN32:
    diff --git a/rpython/rlib/test/test_rarithmetic.py b/rpython/rlib/test/test_rarithmetic.py
    --- a/rpython/rlib/test/test_rarithmetic.py
    +++ b/rpython/rlib/test/test_rarithmetic.py
    @@ -337,6 +337,10 @@
             res = self.interpret(f, [123])
             assert res == 4 + 2
     
    +    def test_string_to_int_translates(self):
    +        def f(s):
    +            return string_to_int(str(s))
    +        self.interpret(f, [123]) == 123
     
     def test_int_real_union():
         from rpython.rtyper.lltypesystem.rffi import r_int_real
    diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py
    --- a/rpython/rtyper/lltypesystem/rffi.py
    +++ b/rpython/rtyper/lltypesystem/rffi.py
    @@ -102,7 +102,13 @@
                      is sandboxed.  If False, it will turn into a stdin/stdout
                      communication with the parent process.  If "check_caller",
                      it is like True but we call @sandbox_review(check_caller=True)
    -                 which means that we need to also check the callers.
    +                 which means that we need to also check the callers.  If
    +                 "nowrite", we don't need to check the callers.  The default
    +                 of False either implies "check_caller" or "nowrite"
    +                 depending on whether the function takes and returns pointer
    +                 arguments or not.  Use "nowrite" only if the external
    +                 function call will only *read* from 'char *' or other data
    +                 structures passed in.
     
         calling_conv: if 'unknown' or 'win', the C function is not directly seen
                       by the JIT.  If 'c', it can be seen (depending on
    @@ -344,13 +350,19 @@
             wrapper = sandbox_review(check_caller=True)(wrapper)
         elif sandboxsafe == 'abort':
             wrapper = sandbox_review(abort=True)(wrapper)
    +    elif sandboxsafe == 'nowrite':
    +        wrapper = sandbox_review(reviewed=True)(wrapper)
         else:
             assert isinstance(sandboxsafe, bool)
    -        wrapper = sandbox_review(reviewed=True)(wrapper)
    +        if sandboxsafe or (all(_sandbox_type_safe(ARG) for ARG in args) and
    +                           _sandbox_type_safe(result)):
    +            wrapper = sandbox_review(reviewed=True)(wrapper)
    +        else:
    +            wrapper = sandbox_review(check_caller=True)(wrapper)
         return wrapper
     
    -def sandbox_check_type(TYPE):
    -    return not isinstance(TYPE, lltype.Primitive) or TYPE == llmemory.Address
    +def _sandbox_type_safe(TYPE):
    +    return isinstance(TYPE, lltype.Primitive) and TYPE != llmemory.Address
     
     
     class CallbackHolder:
    diff --git a/rpython/translator/backendopt/all.py b/rpython/translator/backendopt/all.py
    --- a/rpython/translator/backendopt/all.py
    +++ b/rpython/translator/backendopt/all.py
    @@ -113,7 +113,7 @@
         if config.profile_based_inline and not secondary:
             threshold = config.profile_based_inline_threshold
             heuristic = get_function(config.profile_based_inline_heuristic)
    -        inline.instrument_inline_candidates(translator, graphs, threshold)
    +        inline.instrument_inline_candidates(graphs, threshold)
             counters = translator.driver_instrument_result(
                 config.profile_based_inline)
             n = len(counters)
    diff --git a/rpython/translator/backendopt/inline.py b/rpython/translator/backendopt/inline.py
    --- a/rpython/translator/backendopt/inline.py
    +++ b/rpython/translator/backendopt/inline.py
    @@ -548,8 +548,7 @@
         return (0.9999 * measure_median_execution_cost(graph) +
                 count), True       # may be NaN
     
    -def inlinable_static_callers(translator, graphs, store_calls=False,
    -                             ok_to_call=None):
    +def inlinable_static_callers(graphs, store_calls=False, ok_to_call=None):
         if ok_to_call is None:
             ok_to_call = set(graphs)
         result = []
    @@ -559,7 +558,6 @@
             else:
                 result.append((parentgraph, graph))
         #
    -    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 for op in block.operations:
    @@ -567,12 +565,13 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None and graph in ok_to_call:
    -                        if dont_inline(funcobj):
    +                        if getattr(getattr(funcobj, '_callable', None),
    +                                   '_dont_inline_', False):
                                 continue
                             add(parentgraph, block, op, graph)
         return result
     
    -def instrument_inline_candidates(translator, graphs, threshold):
    +def instrument_inline_candidates(graphs, threshold):
         cache = {None: False}
         def candidate(graph):
             try:
    @@ -582,7 +581,6 @@
                 cache[graph] = res
                 return res
         n = 0
    -    dont_inline = make_dont_inline_checker(translator)
         for parentgraph in graphs:
             for block in parentgraph.iterblocks():
                 ops = block.operations
    @@ -594,7 +592,8 @@
                         funcobj = op.args[0].value._obj
                         graph = getattr(funcobj, 'graph', None)
                         if graph is not None:
    -                        if dont_inline(funcobj):
    +                        if getattr(getattr(funcobj, '_callable', None),
    +                                   '_dont_inline_', False):
                                 continue
                         if candidate(graph):
                             tag = Constant('inline', Void)
    @@ -611,17 +610,6 @@
         return (hasattr(graph, 'func') and
                 getattr(graph.func, '_always_inline_', None))
     
    -def make_dont_inline_checker(translator):
    -    sandbox = translator.config.translation.sandbox
    -
    -    def dont_inline(funcobj):
    -        func = getattr(funcobj, '_callable', None)
    -        if sandbox:
    -            if hasattr(func, '_sandbox_review_'):
    -                return True
    -        return getattr(func, '_dont_inline_', False)
    -    return dont_inline
    -
     def auto_inlining(translator, threshold=None,
                       callgraph=None,
                       call_count_pred=None,
    @@ -633,7 +621,7 @@
         callers = {}     # {graph: {graphs-that-call-it}}
         callees = {}     # {graph: {graphs-that-it-calls}}
         if callgraph is None:
    -        callgraph = inlinable_static_callers(translator, translator.graphs)
    +        callgraph = inlinable_static_callers(translator.graphs)
         for graph1, graph2 in callgraph:
             callers.setdefault(graph2, {})[graph1] = True
             callees.setdefault(graph1, {})[graph2] = True
    @@ -739,8 +727,7 @@
                                     if not hasattr(graph, 'exceptiontransformed')])
         else:
             ok_to_call = None
    -    callgraph = inlinable_static_callers(translator, graphs,
    -                                         ok_to_call=ok_to_call)
    +    callgraph = inlinable_static_callers(graphs, ok_to_call=ok_to_call)
         count = auto_inlining(translator, threshold, callgraph=callgraph,
                               heuristic=heuristic,
                               call_count_pred=call_count_pred)
    diff --git a/rpython/translator/backendopt/test/test_inline.py b/rpython/translator/backendopt/test/test_inline.py
    --- a/rpython/translator/backendopt/test/test_inline.py
    +++ b/rpython/translator/backendopt/test/test_inline.py
    @@ -100,7 +100,7 @@
             call_count_pred = None
             if call_count_check:
                 call_count_pred = lambda lbl: True
    -            instrument_inline_candidates(t, t.graphs, threshold)
    +            instrument_inline_candidates(t.graphs, threshold)
     
             if remove_same_as:
                 for graph in t.graphs:
    diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py
    --- a/rpython/translator/driver.py
    +++ b/rpython/translator/driver.py
    @@ -344,6 +344,12 @@
             rtyper = self.translator.buildrtyper()
             rtyper.specialize(dont_simplify_again=True)
     
    +        # we do the sandbox review checking here, before inlining graphs
    +        # inside each other (and later generating extra graphs for the GC).
    +        if self.config.translation.sandbox:
    +            from rpython.translator.sandbox import graphchecker
    +            graphchecker.check_all_graphs(self.translator)
    +
         @taskdef([RTYPE], "JIT compiler generation")
         def task_pyjitpl_lltype(self):
             """ Generate bytecodes for JIT and flow the JIT helper functions
    @@ -412,10 +418,6 @@
             if translator.annotator is not None:
                 translator.frozen = True
     
    -        if self.config.translation.sandbox:
    -            from rpython.translator.sandbox import graphchecker
    -            graphchecker.check_all_graphs(self.translator)
    -
             standalone = self.standalone
             get_gchooks = self.extra.get('get_gchooks', lambda: None)
             gchooks = get_gchooks()
    diff --git a/rpython/translator/sandbox/graphchecker.py b/rpython/translator/sandbox/graphchecker.py
    --- a/rpython/translator/sandbox/graphchecker.py
    +++ b/rpython/translator/sandbox/graphchecker.py
    @@ -2,8 +2,92 @@
     This runs at the start of the database-c step, so it excludes the
     graphs produced later, notably for the GC.  These are "low-level"
     graphs that are assumed to be safe.
    +
    +Here are again the rules around this check.
    +
    +- any graph that contains only "safe" lloperations is itself "safe".
    +  The "safe" lloperations are the ones marked "tryfold" in
    +  rtyper.lltypesystem.lloperation, plus the ones listed explicitly below,
    +  plus a few variants of specific operations coded in graph_in_unsafe().
    +
    +- any graph decorated with @objectmodel.sandbox_review() is "safe".
    +  The different flags we can pass to @sandbox_review() are explained next,
    +  but the decorated graph is itself always "safe".
    +
    +- "unsafe" operations are all special rare operations, plus most importantly
    +  all *writes* into raw memory.  We assume that *reads* from anywhere are
    +  OK to ignore: any information that reaches the sandboxed process can be
    +  detected and used by anything that runs inside this process (i.e. there
    +  is no really "secret" data inside the sandboxed subprocess itself).
    +  At worst, random reads will lead to segfaults.  But random writes are not
    +  safe because that could corrupt memory---e.g. overwrite some GC object
    +  header, or even (although I'm not sure how) actually cause the sandboxed
    +  process to misbehave in more important ways like doing actual system calls
    +  that are supposed to be forbidden.
    +
    +- the decorator @sandbox_review(check_caller=True) means that the graph is
    +  safe, but any call to this graph from somewhere else is an unsafe operation.
    +  This forces all callers to also be reviewed and marked with some form of
    +  @sandbox_review().
    +
    +- @sandbox_review(reviewed=True) means that the graph is safe and all
    +  calls to this graph are also safe.  This should only be used on functions
    +  that do internally "unsafe" stuff like writing to raw memory but don't
    +  take arguments that could lead them to do bogus things.  A typical counter-
    +  example is a function that takes a raw pointer and that writes something to
    +  it; this should *not* be marked with reviewed=True.  On the other hand, many
    +  RPython wrappers to external C functions can be reviewed=True because
    +  they translate GC-safe information (say an RPython string) to raw memory,
    +  do the call, and translate the result back to GC-safe information.
    +
    +- @sandbox_review(abort=True) is reserved for cases where calling this
    +  function at runtime should just immediately abort the subprocess.
    +
    +Note that all flags above should be considered independently of what the
    +actual C function calls are supposed to do.  For example, the RPython
    +wrapper rposix.system() is something you definitely don't want to allow as-is,
    +but the wrapper and the call to the C function are fine.  It's up to the
    +controlling process to refuse to reply to the system() external call
    +(either by having it return ENOSYS or a similar error, or by killing the
    +sandboxed process completely).
    +
    +Like system(), all calls to external C functions are *by default* removed and
    +turned into I/O on stdin/stdout, asking the parent controlling process what
    +to do.  This is controlled in more details by rffi.llexternal().  It takes
    +its own argument "sandboxsafe", which can be one of the following values:
    +
    +- sandboxsafe=False (the default): the external C call is not done but turned
    +  into I/O on stdin/stdout.  Moreover, *if* the function takes or returns a
    +  raw pointer, then it is flagged with @sandbox_review(check_caller=True) to
    +  ensure that all callers do something sane with these raw pointers.  If
    +  the C function only takes and returns integer or float arguments, there is
    +  no real need, so in this case we flag @sandbox_review(reviewed=True) instead.
    +
    +- sandboxsafe=True: means the external call should be done straight from the
    +  sandboxed process.  Reserved for specific functions like rposix.c_strerror(),
    +  or some memory-manipulation functions used by the GC itself.
    +
    +- sandboxsafe="abort": like @sandbox_review(abort=True).
    +
    +- sandboxsafe="check_caller": forces @sandbox_review(check_caller=True).
    +  Useful for llexternal() functions that appear to return an integer but
    +  that's really some address that must be carefully managed.
    +
    +- sandboxsafe="nowrite": forces @sandbox_review(reviewed=True).  This is OK
    +  for C functions that have pointer arguments but none of them can point
    +  to anything that will be written to (hence the name).  The idea is that
    +  for the common case of a function that takes a "const char *" argument,
    +  we should just mark that function as reviewed=True, because it is safe:
    +  the controller process will at most read things from the sandboxed process,
    +  namely what the pointer points to, but it should not attempt to do any
    +  write into the sandboxed process' memory.  Typically the caller itself
    +  calls rffi.str2charp() and rffi.free_charp() around the call, but these
    +  are also @sandbox_review(reviewed=True) helpers, so such a caller doesn't
    +  need to be explicitly reviewed.
    +
     """
     
    +
     from rpython.flowspace.model import SpaceOperation, Constant
     from rpython.rtyper.rmodel import inputconst
     from rpython.rtyper.lltypesystem import lltype, llmemory, rstr
    @@ -105,7 +189,7 @@
                 elif opname in ('cast_ptr_to_adr', 'force_cast',
                                 'cast_int_to_ptr'):
                     if is_gc_ptr(op.result.concretetype):
    -                    return "result is a GC ptr: %r" % (opname,)
    +                    return "result is a GC ptr: %r" % (op,)
     
                 else:
                     return "unsupported llop: %r" % (opname,)
    diff --git a/rpython/translator/sandbox/sandlib.py b/rpython/translator/sandbox/sandlib.py
    deleted file mode 100644
    --- a/rpython/translator/sandbox/sandlib.py
    +++ /dev/null
    @@ -1,517 +0,0 @@
    -"""
    -A Python library to execute and communicate with a subprocess that
    -was translated from RPython code with --sandbox.  This library is
    -for the outer process, which can run CPython or PyPy.
    -"""
    -
    -import sys, os, posixpath, errno, stat, time
    -import subprocess
    -from rpython.tool.killsubprocess import killsubprocess
    -from rpython.translator.sandbox.vfs import UID, GID
    -import py
    -
    -WIN32 = os.name == "nt"
    -
    -
    -def create_log():
    -    """Make and return a log for the sandbox to use, if needed."""
    -    from rpython.tool.ansi_print import AnsiLogger
    -    return AnsiLogger("sandlib")
    -
    -def write_exception(g, exception, tb=None):
    -    for i, excclass in EXCEPTION_TABLE:
    -        if isinstance(exception, excclass):
    -            write_message(g, i)
    -            if excclass is OSError:
    -                error = exception.errno
    -                if error is None:
    -                    error = errno.EPERM
    -                write_message(g, error)
    -            g.flush()
    -            break
    -    else:
    -        # just re-raise the exception
    -        raise exception.__class__, exception, tb
    -
    -def shortrepr(x):
    -    r = repr(x)
    -    if len(r) >= 80:
    -        r = r[:20] + '...' + r[-8:]
    -    return r
    -
    -def signal_name(n):
    -    import signal
    -    for key, value in signal.__dict__.items():
    -        if key.startswith('SIG') and not key.startswith('SIG_') and value == n:
    -            return key
    -    return 'signal %d' % (n,)
    -
    -
    -class SandboxedProc(object):
    -    """Base class to control a sandboxed subprocess.
    -    Inherit from this class and implement all the do_xxx() methods
    -    for the external functions xxx that you want to support.
    -    """
    -    debug = False
    -    log = None
    -    os_level_sandboxing = False   # Linux only: /proc/PID/seccomp
    -
    -    def __init__(self, args, executable=None):
    -        """'args' should a sequence of argument for the subprocess,
    -        starting with the full path of the executable.
    -        """
    -        self.popen = subprocess.Popen(args, executable=executable,
    -                                      bufsize=-1,
    -                                      stdin=subprocess.PIPE,
    -                                      stdout=subprocess.PIPE,
    -                                      close_fds=False if WIN32 else True,
    -                                      env={})
    -        self.popenlock = None
    -        self.currenttimeout = None
    -        self.currentlyidlefrom = None
    -
    -        if self.debug:
    -            self.log = create_log()
    -
    -    def withlock(self, function, *args, **kwds):
    -        lock = self.popenlock
    -        if lock is not None:
    -            lock.acquire()
    -        try:
    -            return function(*args, **kwds)
    -        finally:
    -            if lock is not None:
    -                lock.release()
    -
    -    def settimeout(self, timeout, interrupt_main=False):
    -        """Start a timeout that will kill the subprocess after the given
    -        amount of time.  Only one timeout can be active at a time.
    -        """
    -        import thread
    -
    -        def _waiting_thread():
    -            while True:
    -                while self.currentlyidlefrom is not None:
    -                    time.sleep(1)   # can't timeout while idle
    -                t = self.currenttimeout
    -                if t is None:
    -                    return  # cancelled
    -                delay = t - time.time()
    -                if delay <= 0.0:
    -                    break   # expired!
    -                time.sleep(min(delay*1.001, 1))
    -            if self.log:
    -                self.log.timeout("timeout!")
    -            self.kill()
    -            #if interrupt_main:
    -            #    if hasattr(os, 'kill'):
    -            #        import signal
    -            #        os.kill(os.getpid(), signal.SIGINT)
    -            #    else:
    -            #        thread.interrupt_main()
    -
    -        def _settimeout():
    -            need_new_thread = self.currenttimeout is None
    -            self.currenttimeout = time.time() + timeout
    -            if need_new_thread:
    -                thread.start_new_thread(_waiting_thread, ())
    -
    -        if self.popenlock is None:
    -            self.popenlock = thread.allocate_lock()
    -        self.withlock(_settimeout)
    -
    -    def canceltimeout(self):
    -        """Cancel the current timeout."""
    -        self.currenttimeout = None
    -        self.currentlyidlefrom = None
    -
    -    def enter_idle(self):
    -        self.currentlyidlefrom = time.time()
    -
    -    def leave_idle(self):
    -        def _postpone_timeout():
    -            t = self.currentlyidlefrom
    -            if t is not None and self.currenttimeout is not None:
    -                self.currenttimeout += time.time() - t
    -        try:
    -            self.withlock(_postpone_timeout)
    -        finally:
    -            self.currentlyidlefrom = None
    -
    -    def poll(self):
    -        returncode = self.withlock(self.popen.poll)
    -        if returncode is not None:
    -            self.canceltimeout()
    -        return returncode
    -
    -    def wait(self):
    -        returncode = self.withlock(self.popen.wait)
    -        if returncode is not None:
    -            self.canceltimeout()
    -        return returncode
    -
    -    def kill(self):
    -        self.withlock(killsubprocess, self.popen)
    -
    -    def handle_forever(self):
    -        returncode = self.handle_until_return()
    -        if returncode != 0:
    -            raise OSError("the sandboxed subprocess exited with code %d" % (
    -                returncode,))
    -
    -    def handle_until_return(self):
    -        child_stdin  = self.popen.stdin
    -        child_stdout = self.popen.stdout
    -        if self.os_level_sandboxing and sys.platform.startswith('linux'):
    -            # rationale: we wait until the child process started completely,
    -            # letting the C library do any system calls it wants for
    -            # initialization.  When the RPython code starts up, it quickly
    -            # does its first system call.  At this point we turn seccomp on.
    -            import select
    -            select.select([child_stdout], [], [])
    -            f = open('/proc/%d/seccomp' % self.popen.pid, 'w')
    -            print >> f, 1
    -            f.close()
    -        while True:
    -            try:
    -                fnname = read_message(child_stdout)
    -                args   = read_message(child_stdout)
    -            except EOFError as e:
    -                break
    -            if self.log and not self.is_spam(fnname, *args):
    -                self.log.call('%s(%s)' % (fnname,
    -                                     ', '.join([shortrepr(x) for x in args])))
    -            try:
    -                answer, resulttype = self.handle_message(fnname, *args)
    -            except Exception as e:
    -                tb = sys.exc_info()[2]
    -                write_exception(child_stdin, e, tb)
    -                if self.log:
    -                    if str(e):
    -                        self.log.exception('%s: %s' % (e.__class__.__name__, e))
    -                    else:
    -                        self.log.exception('%s' % (e.__class__.__name__,))
    -            else:
    -                if self.log and not self.is_spam(fnname, *args):
    -                    self.log.result(shortrepr(answer))
    -                try:
    -                    write_message(child_stdin, 0)  # error code - 0 for ok
    -                    write_message(child_stdin, answer, resulttype)
    -                    child_stdin.flush()
    -                except (IOError, OSError):
    -                    # likely cause: subprocess is dead, child_stdin closed
    -                    if self.poll() is not None:
    -                        break
    -                    else:
    -                        raise
    -        returncode = self.wait()
    -        return returncode
    -
    -    def is_spam(self, fnname, *args):
    -        # To hide the spamming amounts of reads and writes to stdin and stdout
    -        # in interactive sessions
    -        return (fnname in ('ll_os.ll_os_read', 'll_os.ll_os_write') and
    -                args[0] in (0, 1, 2))
    -
    -    def handle_message(self, fnname, *args):
    -        if '__' in fnname:
    -            raise ValueError("unsafe fnname")
    -        try:
    -            handler = getattr(self, 'do_' + fnname.replace('.', '__'))
    -        except AttributeError:
    -            raise RuntimeError("no handler for this function")
    -        resulttype = getattr(handler, 'resulttype', None)
    -        return handler(*args), resulttype
    -
    -
    -class SimpleIOSandboxedProc(SandboxedProc):
    -    """Control a sandboxed subprocess which is only allowed to read from
    -    its stdin and write to its stdout and stderr.
    -    """
    -    _input = None
    -    _output = None
    -    _error = None
    -    inputlogfile = None
    -
    -    def communicate(self, input=None):
    -        """Send data to stdin. Read data from stdout and stderr,
    -        until end-of-file is reached. Wait for process to terminate.
    -        """
    -        import cStringIO
    -        if input:
    -            if isinstance(input, str):
    -                input = cStringIO.StringIO(input)
    -            self._input = input
    -        self._output = cStringIO.StringIO()
    -        self._error = cStringIO.StringIO()
    -        self.handle_forever()
    -        output = self._output.getvalue()
    -        self._output = None
    -        error = self._error.getvalue()
    -        self._error = None
    -        return (output, error)
    -
    -    def interact(self, stdin=None, stdout=None, stderr=None):
    -        """Interact with the subprocess.  By default, stdin, stdout and
    -        stderr are set to the ones from 'sys'."""
    -        import sys
    -        self._input  = stdin  or sys.stdin
    -        self._output = stdout or sys.stdout
    -        self._error  = stderr or sys.stderr
    -        returncode = self.handle_until_return()
    -        if returncode != 0:
    -            if os.name == 'posix' and returncode < 0:
    -                print >> self._error, "[Subprocess killed by %s]" % (
    -                    signal_name(-returncode),)
    -            else:
    -                print >> self._error, "[Subprocess exit code: %d]" % (
    -                    returncode,)
    -        self._input = None
    -        self._output = None
    -        self._error = None
    -        return returncode
    -
    -    def setlogfile(self, filename):
    -        self.inputlogfile = open(filename, 'a')
    -
    -    def do_ll_os__ll_os_read(self, fd, size):
    -        if fd == 0:
    -            if self._input is None:
    -                return ""
    -            elif (getattr(self, 'virtual_console_isatty', False) or
    -                  self._input.isatty()):
    -                # don't wait for all 'size' chars if reading from a tty,
    -                # to avoid blocking.  Instead, stop after reading a line.
    -
    -                # For now, waiting at the interactive console is the
    -                # only time that counts as idle.
    -                self.enter_idle()
    -                try:
    -                    inputdata = self._input.readline(size)
    -                finally:
    -                    self.leave_idle()
    -            else:
    -                inputdata = self._input.read(size)
    -            if self.inputlogfile is not None:
    -                self.inputlogfile.write(inputdata)
    -            return inputdata
    -        raise OSError("trying to read from fd %d" % (fd,))
    -
    -    def do_ll_os__ll_os_write(self, fd, data):
    -        if fd == 1:
    -            self._output.write(data)
    -            return len(data)
    -        if fd == 2:
    -            self._error.write(data)
    -            return len(data)
    -        raise OSError("trying to write to fd %d" % (fd,))
    -
    -    # let's allow access to the real time
    -    def do_ll_time__ll_time_sleep(self, seconds):
    -        # regularly check for timeouts that could have killed the
    -        # subprocess
    -        while seconds > 5.0:
    -            time.sleep(5.0)
    -            seconds -= 5.0
    -            if self.poll() is not None:   # subprocess finished?
    -                return
    -        time.sleep(seconds)
    -
    -    def do_ll_time__ll_time_time(self):
    -        return time.time()
    -
    -    def do_ll_time__ll_time_clock(self):
    -        # measuring the CPU time of the controller process has
    -        # not much meaning, so let's emulate this and return
    -        # the real time elapsed since the first call to clock()
    -        # (this is one of the behaviors allowed by the docs)
    -        try:
    -            starttime = self.starttime
    -        except AttributeError:
    -            starttime = self.starttime = time.time()
    -        return time.time() - starttime
    -
    -class VirtualizedSandboxedProc(SandboxedProc):
    -    """Control a virtualized sandboxed process, which is given a custom
    -    view on the filesystem and a custom environment.
    -    """
    -    virtual_env = {}
    -    virtual_cwd = '/tmp'
    -    virtual_console_isatty = False
    -    virtual_fd_range = range(3, 50)
    -
    -    def __init__(self, *args, **kwds):
    -        super(VirtualizedSandboxedProc, self).__init__(*args, **kwds)
    -        self.virtual_root = self.build_virtual_root()
    -        self.open_fds = {}   # {virtual_fd: (real_file_object, node)}
    -
    -    def build_virtual_root(self):
    -        raise NotImplementedError("must be overridden")
    -
    -    def do_ll_os__ll_os_envitems(self):
    -        return self.virtual_env.items()
    -
    -    def do_ll_os__ll_os_getenv(self, name):
    -        return self.virtual_env.get(name)
    -
    -    def translate_path(self, vpath):
    -        # XXX this assumes posix vpaths for now, but os-specific real paths
    -        vpath = posixpath.normpath(posixpath.join(self.virtual_cwd, vpath))
    -        dirnode = self.virtual_root
    -        components = [component for component in vpath.split('/')]
    -        for component in components[:-1]:
    -            if component:
    -                dirnode = dirnode.join(component)
    -                if dirnode.kind != stat.S_IFDIR:
    -                    raise OSError(errno.ENOTDIR, component)
    -        return dirnode, components[-1]
    -
    -    def get_node(self, vpath):
    -        dirnode, name = self.translate_path(vpath)
    -        if name:
    -            node = dirnode.join(name)
    -        else:
    -            node = dirnode
    -        if self.log:
    -            self.log.vpath('%r => %r' % (vpath, node))
    -        return node
    -
    -    def do_ll_os__ll_os_stat(self, vpathname):
    -        node = self.get_node(vpathname)
    -        return node.stat()
    -    do_ll_os__ll_os_stat.resulttype = RESULTTYPE_STATRESULT
    -
    -    do_ll_os__ll_os_lstat = do_ll_os__ll_os_stat
    -
    -    def do_ll_os__ll_os_access(self, vpathname, mode):
    -        try:
    -            node = self.get_node(vpathname)
    -        except OSError as e:
    -            if e.errno == errno.ENOENT:
    -                return False
    -            raise
    -        return node.access(mode)
    -
    -    def do_ll_os__ll_os_isatty(self, fd):
    -        return self.virtual_console_isatty and fd in (0, 1, 2)
    -
    -    def allocate_fd(self, f, node=None):
    -        for fd in self.virtual_fd_range:
    -            if fd not in self.open_fds:
    -                self.open_fds[fd] = (f, node)
    -                return fd
    -        else:
    -            raise OSError(errno.EMFILE, "trying to open too many files")
    -
    -    def get_fd(self, fd, throw=True):
    -        """Get the objects implementing file descriptor `fd`.
    -
    -        Returns a pair, (open file, vfs node)
    -
    -        `throw`: if true, raise OSError for bad fd, else return (None, None).
    -        """
    -        try:
    -            f, node = self.open_fds[fd]
    -        except KeyError:
    -            if throw:
    -                raise OSError(errno.EBADF, "bad file descriptor")
    -            return None, None
    -        return f, node
    -
    -    def get_file(self, fd, throw=True):
    -        """Return the open file for file descriptor `fd`."""
    -        return self.get_fd(fd, throw)[0]
    -
    -    def do_ll_os__ll_os_open(self, vpathname, flags, mode):
    -        node = self.get_node(vpathname)
    -        if flags & (os.O_RDONLY|os.O_WRONLY|os.O_RDWR) != os.O_RDONLY:
    -            raise OSError(errno.EPERM, "write access denied")
    -        # all other flags are ignored
    -        f = node.open()
    -        return self.allocate_fd(f, node)
    -
    -    def do_ll_os__ll_os_close(self, fd):
    -        f = self.get_file(fd)
    -        del self.open_fds[fd]
    -        f.close()
    -
    -    def do_ll_os__ll_os_read(self, fd, size):
    -        f = self.get_file(fd, throw=False)
    -        if f is None:
    -            return super(VirtualizedSandboxedProc, self).do_ll_os__ll_os_read(
    -                fd, size)
    -        else:
    -            if not (0 <= size <= sys.maxint):
    -                raise OSError(errno.EINVAL, "invalid read size")
    -            # don't try to read more than 256KB at once here
    -            return f.read(min(size, 256*1024))
    -
    -    def do_ll_os__ll_os_fstat(self, fd):
    -        f, node = self.get_fd(fd)
    -        return node.stat()
    -    do_ll_os__ll_os_fstat.resulttype = RESULTTYPE_STATRESULT
    -
    -    def do_ll_os__ll_os_lseek(self, fd, pos, how):
    -        f = self.get_file(fd)
    -        f.seek(pos, how)
    -        return f.tell()
    -    do_ll_os__ll_os_lseek.resulttype = RESULTTYPE_LONGLONG
    -
    -    def do_ll_os__ll_os_getcwd(self):
    -        return self.virtual_cwd
    -
    -    def do_ll_os__ll_os_strerror(self, errnum):
    -        # unsure if this shouldn't be considered safeboxsafe
    -        return os.strerror(errnum) or ('Unknown error %d' % (errnum,))
    -
    -    def do_ll_os__ll_os_listdir(self, vpathname):
    -        node = self.get_node(vpathname)
    -        return node.keys()
    -
    -    def do_ll_os__ll_os_unlink(self, vpathname):
    -        raise OSError(errno.EPERM, "write access denied")
    -
    -    def do_ll_os__ll_os_mkdir(self, vpathname, mode=None):
    -        raise OSError(errno.EPERM, "write access denied")
    -
    -    def do_ll_os__ll_os_getuid(self):
    -        return UID
    -    do_ll_os__ll_os_geteuid = do_ll_os__ll_os_getuid
    -
    -    def do_ll_os__ll_os_getgid(self):
    -        return GID
    -    do_ll_os__ll_os_getegid = do_ll_os__ll_os_getgid
    -
    -
    -class VirtualizedSocketProc(VirtualizedSandboxedProc):
    -    """ Extends VirtualizedSandboxProc with socket
    -    options, ie tcp://host:port as args to os.open
    -    """
    -    def __init__(self, *args, **kwds):
    -        super(VirtualizedSocketProc, self).__init__(*args, **kwds)
    -        self.sockets = {}
    -
    -    def do_ll_os__ll_os_open(self, name, flags, mode):
    -        if not name.startswith("tcp://"):
    -            return super(VirtualizedSocketProc, self).do_ll_os__ll_os_open(
    -                name, flags, mode)
    -        import socket
    -        host, port = name[6:].split(":")
    -        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    -        sock.connect((host, int(port)))
    -        fd = self.allocate_fd(sock)
    -        self.sockets[fd] = True
    -        return fd
    -
    -    def do_ll_os__ll_os_read(self, fd, size):
    -        if fd in self.sockets:
    -            return self.get_file(fd).recv(size)
    -        return super(VirtualizedSocketProc, self).do_ll_os__ll_os_read(
    -            fd, size)
    -
    -    def do_ll_os__ll_os_write(self, fd, data):
    -        if fd in self.sockets:
    -            return self.get_file(fd).send(data)
    -        return super(VirtualizedSocketProc, self).do_ll_os__ll_os_write(
    -            fd, data)
    -
    diff --git a/rpython/translator/sandbox/test/test_graphchecker.py b/rpython/translator/sandbox/test/test_graphchecker.py
    --- a/rpython/translator/sandbox/test/test_graphchecker.py
    +++ b/rpython/translator/sandbox/test/test_graphchecker.py
    @@ -52,7 +52,11 @@
                 return llop.force_cast(lltype.Signed, x)
             self.check_safe(f, [float])
             self.check_safe(f, [lltype.Ptr(SRAW)])
    -        self.check_unsafe("argument is a GC ptr", f, [lltype.Ptr(SGC)])
    +        self.check_safe(f, [lltype.Ptr(SGC)])
    +        #
    +        def g(x):
    +            return llop.force_cast(lltype.Ptr(SGC), x)
    +        self.check_unsafe("result is a GC ptr", g, [int])
     
         def test_direct_call_to_check_caller(self):
             @sandbox_review(check_caller=True)
    diff --git a/rpython/translator/sandbox/vfs.py b/rpython/translator/sandbox/vfs.py
    deleted file mode 100644
    --- a/rpython/translator/sandbox/vfs.py
    +++ /dev/null
    @@ -1,137 +0,0 @@
    -import os
    -import stat, errno
    -
    -UID = 1000
    -GID = 1000
    -ATIME = MTIME = CTIME = 0
    -INO_COUNTER = 0
    -
    -
    -class FSObject(object):
    -    read_only = True
    -
    -    def stat(self):
    -        try:
    -            st_ino = self._st_ino
    -        except AttributeError:
    -            global INO_COUNTER
    -            INO_COUNTER += 1
    -            st_ino = self._st_ino = INO_COUNTER
    -        st_dev = 1
    -        st_nlink = 1
    -        st_size = self.getsize()
    -        st_mode = self.kind
    -        st_mode |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
    -        if stat.S_ISDIR(self.kind):
    -            st_mode |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
    -        if self.read_only:
    -            st_uid = 0       # read-only files are virtually owned by root
    -            st_gid = 0
    -        else:
    -            st_uid = UID     # read-write files are owned by this virtual user
    -            st_gid = GID
    -        st_atime = ATIME
    -        st_mtime = MTIME
    -        st_ctime = CTIME
    -        return os.stat_result(
    -            (st_mode, st_ino, st_dev, st_nlink, st_uid, st_gid,
    -             st_size, st_atime, st_mtime, st_ctime))
    -
    -    def access(self, mode):
    -        s = self.stat()
    -        e_mode = s.st_mode & stat.S_IRWXO
    -        if UID == s.st_uid:
    -            e_mode |= (s.st_mode & stat.S_IRWXU) >> 6
    -        if GID == s.st_gid:
    -            e_mode |= (s.st_mode & stat.S_IRWXG) >> 3
    -        return (e_mode & mode) == mode
    -
    -    def keys(self):
    -        raise OSError(errno.ENOTDIR, self)
    -
    -    def open(self):
    -        raise OSError(errno.EACCES, self)
    -
    -    def getsize(self):
    -        return 0
    -
    -
    -class Dir(FSObject):
    -    kind = stat.S_IFDIR
    -    def __init__(self, entries={}):
    -        self.entries = entries
    -    def keys(self):
    -        return self.entries.keys()
    -    def join(self, name):
    -        try:
    -            return self.entries[name]
    -        except KeyError:
    -            raise OSError(errno.ENOENT, name)
    -
    -class RealDir(Dir):
    -    # If show_dotfiles=False, we pretend that all files whose name starts
    -    # with '.' simply don't exist.  If follow_links=True, then symlinks are
    -    # transparently followed (they look like a regular file or directory to
    -    # the sandboxed process).  If follow_links=False, the subprocess is
    -    # not allowed to access them at all.  Finally, exclude is a list of
    -    # file endings that we filter out (note that we also filter out files
    -    # with the same ending but a different case, to be safe).
    -    def __init__(self, path, show_dotfiles=False, follow_links=False,
    -                 exclude=[]):
    -        self.path = path
    -        self.show_dotfiles = show_dotfiles
    -        self.follow_links  = follow_links
    -        self.exclude       = [excl.lower() for excl in exclude]
    -    def __repr__(self):
    -        return '' % (self.path,)
    -    def keys(self):
    -        names = os.listdir(self.path)
    -        if not self.show_dotfiles:
    -            names = [name for name in names if not name.startswith('.')]
    -        for excl in self.exclude:
    -            names = [name for name in names if not name.lower().endswith(excl)]
    -        return names
    -    def join(self, name):
    -        if name.startswith('.') and not self.show_dotfiles:
    -            raise OSError(errno.ENOENT, name)
    -        for excl in self.exclude:
    -            if name.lower().endswith(excl):
    -                raise OSError(errno.ENOENT, name)
    -        path = os.path.join(self.path, name)
    -        if self.follow_links:
    -            st = os.stat(path)
    -        else:
    -            st = os.lstat(path)
    -        if stat.S_ISDIR(st.st_mode):
    -            return RealDir(path, show_dotfiles = self.show_dotfiles,
    -                                 follow_links  = self.follow_links,
    -                                 exclude       = self.exclude)
    -        elif stat.S_ISREG(st.st_mode):
    -            return RealFile(path)
    -        else:
    -            # don't allow access to symlinks and other special files
    -            raise OSError(errno.EACCES, path)
    -
    -class File(FSObject):
    -    kind = stat.S_IFREG
    -    def __init__(self, data=''):
    -        self.data = data
    -    def getsize(self):
    -        return len(self.data)
    -    def open(self):
    -        import cStringIO
    -        return cStringIO.StringIO(self.data)
    -
    -class RealFile(File):
    -    def __init__(self, path, mode=0):
    -        self.path = path
    -        self.kind |= mode
    -    def __repr__(self):
    -        return '' % (self.path,)
    -    def getsize(self):
    -        return os.stat(self.path).st_size
    -    def open(self):
    -        try:
    -            return open(self.path, "rb")
    -        except IOError as e:
    -            raise OSError(e.errno, "open failed")
    
    From pypy.commits at gmail.com  Tue Aug 27 06:03:41 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 03:03:41 -0700 (PDT)
    Subject: [pypy-commit] pypy sandbox-2: More sandbox_review in rlib for py3.6
    Message-ID: <5d64fffd.1c69fb81.54359.c1a5@mx.google.com>
    
    Author: Armin Rigo 
    Branch: sandbox-2
    Changeset: r97298:a066296c84d2
    Date: 2019-08-27 11:25 +0200
    http://bitbucket.org/pypy/pypy/changeset/a066296c84d2/
    
    Log:	More sandbox_review in rlib for py3.6
    
    diff --git a/rpython/rlib/rfile.py b/rpython/rlib/rfile.py
    --- a/rpython/rlib/rfile.py
    +++ b/rpython/rlib/rfile.py
    @@ -5,7 +5,7 @@
     
     import os, stat, errno, sys
     from rpython.rlib import rposix, rgc
    -from rpython.rlib.objectmodel import enforceargs
    +from rpython.rlib.objectmodel import enforceargs, sandbox_review
     from rpython.rlib.rarithmetic import intmask
     from rpython.rlib.rstring import StringBuilder
     from rpython.rtyper.lltypesystem import rffi, lltype
    @@ -242,6 +242,7 @@
         _newlinetypes = NEWLINE_UNKNOWN
         _skipnextlf = False
     
    +    @sandbox_review(check_caller=True)
         def __init__(self, ll_file, mode=None, close2=_fclose2):
             self._ll_file = ll_file
             if mode is not None:
    @@ -548,6 +549,7 @@
                     c_ungetc(c, self._ll_file)
             return res
     
    +    @sandbox_review(reviewed=True)
         def fileno(self):
             self._check_closed()
             return intmask(c_fileno(self._ll_file))
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -570,6 +570,7 @@
                            save_err=rffi.RFFI_SAVE_ERRNO)
     
         @enforceargs(int, int, None)
    +    @sandbox_review(reviewed=True)
         def pread(fd, count, offset):
             if count < 0:
                 raise OSError(errno.EINVAL, None)
    @@ -578,6 +579,7 @@
                 return buf.str(handle_posix_error('pread', c_pread(fd, void_buf, count, offset)))
     
         @enforceargs(int, None, None)
    +    @sandbox_review(reviewed=True)
         def pwrite(fd, data, offset):
             count = len(data)
             with rffi.scoped_nonmovingbuffer(data) as buf:
    @@ -2343,6 +2345,7 @@
             [rffi.INT, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T], rffi.SSIZE_T,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def readlinkat(pathname, dir_fd=AT_FDCWD):
             pathname = _as_bytes0(pathname)
             bufsize = 1023
    @@ -2683,6 +2686,7 @@
                 res = c_sendfile(out_fd, in_fd, p_offset, count)
             return handle_posix_error('sendfile', res)
     
    +    @sandbox_review(reviewed=True)
         def sendfile_no_offset(out_fd, in_fd, count):
             """Passes offset==NULL; not support on all OSes"""
             res = c_sendfile(out_fd, in_fd, lltype.nullptr(_OFF_PTR_T.TO), count)
    diff --git a/rpython/rlib/rposix_scandir.py b/rpython/rlib/rposix_scandir.py
    --- a/rpython/rlib/rposix_scandir.py
    +++ b/rpython/rlib/rposix_scandir.py
    @@ -1,5 +1,5 @@
     from rpython.rlib import rposix, rwin32
    -from rpython.rlib.objectmodel import specialize
    +from rpython.rlib.objectmodel import specialize, sandbox_review
     from rpython.rtyper.lltypesystem import lltype, rffi
     from rpython.rlib.rarithmetic import intmask
     
    @@ -16,11 +16,13 @@
                 raise OSError(rposix.get_saved_errno(), "opendir failed")
             return dirp
     
    +    @sandbox_review(check_caller=True)
         def closedir(dirp):
             rposix.c_closedir(dirp)
     
         NULL_DIRP = lltype.nullptr(rposix.DIRP.TO)
     
    +    @sandbox_review(check_caller=True)
         def nextentry(dirp):
             """Read the next entry and returns an opaque object.
             Use the methods has_xxx() and get_xxx() to read from that
    diff --git a/rpython/rlib/rposix_stat.py b/rpython/rlib/rposix_stat.py
    --- a/rpython/rlib/rposix_stat.py
    +++ b/rpython/rlib/rposix_stat.py
    @@ -633,6 +633,7 @@
             compilation_info=compilation_info,
             save_err=rffi.RFFI_SAVE_ERRNO, macro=True)
     
    +    @sandbox_review(reviewed=True)
         def fstatat(pathname, dir_fd=AT_FDCWD, follow_symlinks=True):
             if follow_symlinks:
                 flags = 0
    
    From pypy.commits at gmail.com  Tue Aug 27 06:03:43 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 03:03:43 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: hg merge sandbox-2
    Message-ID: <5d64ffff.1c69fb81.aa7dc.4835@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97299:1c3f982ecaec
    Date: 2019-08-27 11:26 +0200
    http://bitbucket.org/pypy/pypy/changeset/1c3f982ecaec/
    
    Log:	hg merge sandbox-2
    
    diff --git a/rpython/rlib/rfile.py b/rpython/rlib/rfile.py
    --- a/rpython/rlib/rfile.py
    +++ b/rpython/rlib/rfile.py
    @@ -5,7 +5,7 @@
     
     import os, stat, errno, sys
     from rpython.rlib import rposix, rgc
    -from rpython.rlib.objectmodel import enforceargs
    +from rpython.rlib.objectmodel import enforceargs, sandbox_review
     from rpython.rlib.rarithmetic import intmask
     from rpython.rlib.rstring import StringBuilder
     from rpython.rtyper.lltypesystem import rffi, lltype
    @@ -242,6 +242,7 @@
         _newlinetypes = NEWLINE_UNKNOWN
         _skipnextlf = False
     
    +    @sandbox_review(check_caller=True)
         def __init__(self, ll_file, mode=None, close2=_fclose2):
             self._ll_file = ll_file
             if mode is not None:
    @@ -548,6 +549,7 @@
                     c_ungetc(c, self._ll_file)
             return res
     
    +    @sandbox_review(reviewed=True)
         def fileno(self):
             self._check_closed()
             return intmask(c_fileno(self._ll_file))
    diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py
    --- a/rpython/rlib/rposix.py
    +++ b/rpython/rlib/rposix.py
    @@ -570,6 +570,7 @@
                            save_err=rffi.RFFI_SAVE_ERRNO)
     
         @enforceargs(int, int, None)
    +    @sandbox_review(reviewed=True)
         def pread(fd, count, offset):
             if count < 0:
                 raise OSError(errno.EINVAL, None)
    @@ -578,6 +579,7 @@
                 return buf.str(handle_posix_error('pread', c_pread(fd, void_buf, count, offset)))
     
         @enforceargs(int, None, None)
    +    @sandbox_review(reviewed=True)
         def pwrite(fd, data, offset):
             count = len(data)
             with rffi.scoped_nonmovingbuffer(data) as buf:
    @@ -2343,6 +2345,7 @@
             [rffi.INT, rffi.CCHARP, rffi.CCHARP, rffi.SIZE_T], rffi.SSIZE_T,
             save_err=rffi.RFFI_SAVE_ERRNO)
     
    +    @sandbox_review(reviewed=True)
         def readlinkat(pathname, dir_fd=AT_FDCWD):
             pathname = _as_bytes0(pathname)
             bufsize = 1023
    @@ -2683,6 +2686,7 @@
                 res = c_sendfile(out_fd, in_fd, p_offset, count)
             return handle_posix_error('sendfile', res)
     
    +    @sandbox_review(reviewed=True)
         def sendfile_no_offset(out_fd, in_fd, count):
             """Passes offset==NULL; not support on all OSes"""
             res = c_sendfile(out_fd, in_fd, lltype.nullptr(_OFF_PTR_T.TO), count)
    diff --git a/rpython/rlib/rposix_scandir.py b/rpython/rlib/rposix_scandir.py
    --- a/rpython/rlib/rposix_scandir.py
    +++ b/rpython/rlib/rposix_scandir.py
    @@ -1,5 +1,5 @@
     from rpython.rlib import rposix, rwin32
    -from rpython.rlib.objectmodel import specialize
    +from rpython.rlib.objectmodel import specialize, sandbox_review
     from rpython.rtyper.lltypesystem import lltype, rffi
     from rpython.rlib.rarithmetic import intmask
     
    @@ -16,11 +16,13 @@
                 raise OSError(rposix.get_saved_errno(), "opendir failed")
             return dirp
     
    +    @sandbox_review(check_caller=True)
         def closedir(dirp):
             rposix.c_closedir(dirp)
     
         NULL_DIRP = lltype.nullptr(rposix.DIRP.TO)
     
    +    @sandbox_review(check_caller=True)
         def nextentry(dirp):
             """Read the next entry and returns an opaque object.
             Use the methods has_xxx() and get_xxx() to read from that
    diff --git a/rpython/rlib/rposix_stat.py b/rpython/rlib/rposix_stat.py
    --- a/rpython/rlib/rposix_stat.py
    +++ b/rpython/rlib/rposix_stat.py
    @@ -633,6 +633,7 @@
             compilation_info=compilation_info,
             save_err=rffi.RFFI_SAVE_ERRNO, macro=True)
     
    +    @sandbox_review(reviewed=True)
         def fstatat(pathname, dir_fd=AT_FDCWD, follow_symlinks=True):
             if follow_symlinks:
                 flags = 0
    
    From pypy.commits at gmail.com  Tue Aug 27 06:03:44 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 03:03:44 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: More sandbox_reviews
    Message-ID: <5d650000.1c69fb81.5b985.142a@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97300:6dcefdad5651
    Date: 2019-08-27 11:27 +0200
    http://bitbucket.org/pypy/pypy/changeset/6dcefdad5651/
    
    Log:	More sandbox_reviews
    
    diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
    --- a/pypy/module/posix/interp_posix.py
    +++ b/pypy/module/posix/interp_posix.py
    @@ -10,7 +10,7 @@
     
     from rpython.rlib import rposix, rposix_stat, rfile
     from rpython.rlib import objectmodel, rurandom
    -from rpython.rlib.objectmodel import specialize, not_rpython
    +from rpython.rlib.objectmodel import specialize, not_rpython, sandbox_review
     from rpython.rlib.rarithmetic import r_longlong, intmask, r_uint, r_int
     from rpython.rlib.unroll import unrolling_iterable
     from rpython.rtyper.lltypesystem import lltype
    @@ -2432,6 +2432,7 @@
     if _WIN32:
         have_functions.append("HAVE_MS_WINDOWS")
     
    + at sandbox_review(reviewed=True)
     def _get_terminal_size(space, w_fd=None):
         if w_fd is None:
             fd = rfile.RFile(rfile.c_stdout(), close2=(None, None)).fileno()
    diff --git a/pypy/module/posix/interp_scandir.py b/pypy/module/posix/interp_scandir.py
    --- a/pypy/module/posix/interp_scandir.py
    +++ b/pypy/module/posix/interp_scandir.py
    @@ -2,6 +2,7 @@
     from errno import ENOENT
     from rpython.rlib import rgc
     from rpython.rlib import rposix, rposix_scandir, rposix_stat
    +from rpython.rlib.objectmodel import sandbox_review
     
     from pypy.interpreter.gateway import unwrap_spec, WrappedDefault, interp2app
     from pypy.interpreter.error import OperationError, oefmt, wrap_oserror2
    @@ -11,6 +12,7 @@
     from pypy.module.posix.interp_posix import unwrap_fd, build_stat_result, _WIN32
     
     
    + at sandbox_review(reviewed=True)
     def scandir(space, w_path=None):
         "scandir(path='.') -> iterator of DirEntry objects for given path"
         if space.is_none(w_path):
    @@ -78,6 +80,7 @@
                     e.write_unraisable(space, '', self)
             self._close()
     
    +    @sandbox_review(reviewed=True)
         def _close(self):
             dirp = self.dirp
             if dirp:
    @@ -87,6 +90,7 @@
         def iter_w(self):
             return self
     
    +    @sandbox_review(reviewed=True)
         def fail(self, err=None):
             dirp = self.dirp
             if dirp:
    @@ -98,6 +102,7 @@
             else:
                 raise err
     
    +    @sandbox_review(reviewed=True)
         def next_w(self):
             if not self.dirp:
                 raise self.fail()
    diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py
    --- a/pypy/module/time/interp_time.py
    +++ b/pypy/module/time/interp_time.py
    @@ -286,6 +286,7 @@
             else:
                 c_gettimeofday = external('gettimeofday',
                                           [lltype.Ptr(TIMEVAL), rffi.VOIDP], rffi.INT)
    +    @sandbox_review(reviewed=True)
         def gettimeofday(space, w_info=None):
             if HAVE_GETTIMEOFDAY:
                 with lltype.scoped_alloc(TIMEVAL) as timeval:
    @@ -662,6 +663,7 @@
         if not 0 <= rffi.getintfield(t_ref, 'c_tm_yday') <= 365:
             raise oefmt(space.w_ValueError, "day of year out of range")
     
    + at sandbox_review(reviewed=True)
     def time(space, w_info=None):
         """time() -> floating point number
     
    @@ -792,6 +794,7 @@
         def _timespec_to_seconds(timespec):
             return widen(timespec.c_tv_sec) + widen(timespec.c_tv_nsec) * 1e-9
     
    +    @sandbox_review(reviewed=True)
         @unwrap_spec(clk_id='c_int')
         def clock_gettime(space, clk_id):
             with lltype.scoped_alloc(TIMESPEC) as timespec:
    @@ -813,6 +816,7 @@
                 if ret != 0:
                     raise exception_from_saved_errno(space, space.w_OSError)
     
    +    @sandbox_review(reviewed=True)
         @unwrap_spec(clk_id='c_int')
         def clock_getres(space, clk_id):
             with lltype.scoped_alloc(TIMESPEC) as timespec:
    @@ -955,6 +959,7 @@
     
         else:
             assert _POSIX
    +        @sandbox_review(reviewed=True)
             def monotonic(space, w_info=None):
                 if rtime.CLOCK_HIGHRES is not None:
                     clk_id = rtime.CLOCK_HIGHRES
    @@ -1045,6 +1050,7 @@
     else:
         have_times = hasattr(rposix, 'c_times')
     
    +    @sandbox_review(reviewed=True)
         def process_time(space, w_info=None):
             if HAS_CLOCK_GETTIME and (
                     rtime.CLOCK_PROF is not None or
    
    From pypy.commits at gmail.com  Tue Aug 27 06:03:46 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Tue, 27 Aug 2019 03:03:46 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6-sandbox-2: Found out there are two
     versions of os.urandom() around, and
    Message-ID: <5d650002.1c69fb81.901b6.c335@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6-sandbox-2
    Changeset: r97301:3a1756763860
    Date: 2019-08-27 11:56 +0200
    http://bitbucket.org/pypy/pypy/changeset/3a1756763860/
    
    Log:	Found out there are two versions of os.urandom() around, and figured
    	out when one is shadowed by the other
    
    diff --git a/pypy/module/_random/interp_random.py b/pypy/module/_random/interp_random.py
    --- a/pypy/module/_random/interp_random.py
    +++ b/pypy/module/_random/interp_random.py
    @@ -30,7 +30,8 @@
                 try:
                     w_n = interp_posix.urandom(space, 8)
                 except OperationError as e:
    -                if not e.match(space, space.w_OSError):
    +                if not (e.match(space, space.w_NotImplementedError) or
    +                        e.match(space, space.w_OSError)):
                         raise
                     w_n = space.newint(int(time.time() * 256))
             if space.isinstance_w(w_n, space.w_int):
    diff --git a/pypy/module/posix/app_posix.py b/pypy/module/posix/app_posix.py
    --- a/pypy/module/posix/app_posix.py
    +++ b/pypy/module/posix/app_posix.py
    @@ -156,6 +156,9 @@
             Return a string of n random bytes suitable for cryptographic use.
     
             """
    +        # NOTE: we also have interp_posix.urandom(), which we use on Windows.
    +        # XXX unsure we shouldn't be removing the code below, though, because
    +        # the interp version seems more complete
             try:
                 with open('/dev/urandom', 'rb', buffering=0) as fd:
                     return fd.read(n)
    diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
    --- a/pypy/module/posix/interp_posix.py
    +++ b/pypy/module/posix/interp_posix.py
    @@ -2217,6 +2217,10 @@
     
         Return a string of 'size' random bytes suitable for cryptographic use.
         """
    +    # NOTE: this is not used for 'os.urandom' on POSIX; instead,
    +    # app_posix.urandom() is.  However, module/_random/ actually
    +    # calls this code directly.  XXX Unsure the version in app_posix
    +    # is needed (or complete enough) nowadays.
         context = get(space).random_context
         try:
             # urandom() takes a final argument that should be a regular function,
    
    From pypy.commits at gmail.com  Tue Aug 27 07:16:35 2019
    From: pypy.commits at gmail.com (cfbolz)
    Date: Tue, 27 Aug 2019 04:16:35 -0700 (PDT)
    Subject: [pypy-commit] pypy default: remove duplicate import
    Message-ID: <5d651113.1c69fb81.1ca36.7a5e@mx.google.com>
    
    Author: Carl Friedrich Bolz-Tereick 
    Branch: 
    Changeset: r97302:8a5b5159abd4
    Date: 2019-08-26 21:15 +0200
    http://bitbucket.org/pypy/pypy/changeset/8a5b5159abd4/
    
    Log:	remove duplicate import
    
    diff --git a/rpython/rlib/rbigint.py b/rpython/rlib/rbigint.py
    --- a/rpython/rlib/rbigint.py
    +++ b/rpython/rlib/rbigint.py
    @@ -296,7 +296,6 @@
         def fromstr(s, base=0, allow_underscores=False):
             """As string_to_int(), but ignores an optional 'l' or 'L' suffix
             and returns an rbigint."""
    -        from rpython.rlib.rstring import NumberStringParser
             from rpython.rlib.rstring import NumberStringParser, \
                 strip_spaces
             s = literal = strip_spaces(s) # XXX could get rid of this slice
    
    From pypy.commits at gmail.com  Tue Aug 27 07:16:37 2019
    From: pypy.commits at gmail.com (cfbolz)
    Date: Tue, 27 Aug 2019 04:16:37 -0700 (PDT)
    Subject: [pypy-commit] pypy default: fix untested rewind method
    Message-ID: <5d651115.1c69fb81.57a05.0ffc@mx.google.com>
    
    Author: Carl Friedrich Bolz-Tereick 
    Branch: 
    Changeset: r97303:0d2efab3484a
    Date: 2019-08-27 13:09 +0200
    http://bitbucket.org/pypy/pypy/changeset/0d2efab3484a/
    
    Log:	fix untested rewind method
    
    diff --git a/rpython/rlib/rarithmetic.py b/rpython/rlib/rarithmetic.py
    --- a/rpython/rlib/rarithmetic.py
    +++ b/rpython/rlib/rarithmetic.py
    @@ -877,6 +877,8 @@
         characters of 's'.  Raises ParseStringError in case of error.
         Raises ParseStringOverflowError in case the result does not fit.
         """
    +    if "99999" in s:
    +        import pdb; pdb.set_trace()
         from rpython.rlib.rstring import (
             NumberStringParser, ParseStringOverflowError)
         p = NumberStringParser(s, s, base, 'int',
    diff --git a/rpython/rlib/rstring.py b/rpython/rlib/rstring.py
    --- a/rpython/rlib/rstring.py
    +++ b/rpython/rlib/rstring.py
    @@ -568,7 +568,7 @@
             self.end = q
     
         def rewind(self):
    -        self.i = 0
    +        self.i = self.start
     
         def next_digit(self): # -1 => exhausted
             if self.i < self.end:
    diff --git a/rpython/rlib/test/test_rbigint.py b/rpython/rlib/test/test_rbigint.py
    --- a/rpython/rlib/test/test_rbigint.py
    +++ b/rpython/rlib/test/test_rbigint.py
    @@ -356,6 +356,30 @@
             assert rbigint.fromstr('123L', 21).tolong() == 441 + 42 + 3
             assert rbigint.fromstr('1891234174197319').tolong() == 1891234174197319
     
    +    def test__from_numberstring_parser_rewind_bug(self):
    +        from rpython.rlib.rstring import NumberStringParser
    +        s = "-99"
    +        p = NumberStringParser(s, s, 10, 'int')
    +        import pdb; pdb.set_trace()
    +        assert p.sign == -1
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == -1
    +        p.rewind()
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == -1
    +
    +    @given(longs)
    +    def test_fromstr_hypothesis(self, l):
    +        assert rbigint.fromstr(str(l)).tolong() == l
    +
         def test_from_numberstring_parser(self):
             from rpython.rlib.rstring import NumberStringParser
             parser = NumberStringParser("1231231241", "1231231241", 10, "long")
    
    From pypy.commits at gmail.com  Tue Aug 27 07:16:39 2019
    From: pypy.commits at gmail.com (cfbolz)
    Date: Tue, 27 Aug 2019 04:16:39 -0700 (PDT)
    Subject: [pypy-commit] pypy default: remove pdb :-(
    Message-ID: <5d651117.1c69fb81.e87da.c168@mx.google.com>
    
    Author: Carl Friedrich Bolz-Tereick 
    Branch: 
    Changeset: r97304:7850e3eaeff8
    Date: 2019-08-27 13:11 +0200
    http://bitbucket.org/pypy/pypy/changeset/7850e3eaeff8/
    
    Log:	remove pdb :-(
    
    diff --git a/rpython/rlib/rarithmetic.py b/rpython/rlib/rarithmetic.py
    --- a/rpython/rlib/rarithmetic.py
    +++ b/rpython/rlib/rarithmetic.py
    @@ -877,8 +877,6 @@
         characters of 's'.  Raises ParseStringError in case of error.
         Raises ParseStringOverflowError in case the result does not fit.
         """
    -    if "99999" in s:
    -        import pdb; pdb.set_trace()
         from rpython.rlib.rstring import (
             NumberStringParser, ParseStringOverflowError)
         p = NumberStringParser(s, s, base, 'int',
    
    From pypy.commits at gmail.com  Tue Aug 27 07:50:30 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Tue, 27 Aug 2019 04:50:30 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: synch text of -O, -OO options to cpython,
     clean extraneous \n\
    Message-ID: <5d651906.1c69fb81.98bb2.8901@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97305:c919b265937a
    Date: 2019-08-27 14:41 +0300
    http://bitbucket.org/pypy/pypy/changeset/c919b265937a/
    
    Log:	synch text of -O, -OO options to cpython, clean extraneous \n\
    
    diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
    --- a/pypy/interpreter/app_main.py
    +++ b/pypy/interpreter/app_main.py
    @@ -5,8 +5,8 @@
     # Missing vs CPython: -x
     USAGE1 = __doc__ = """\
     Options and arguments (and corresponding environment variables):
    --b     : issue warnings about str(bytes_instance), str(bytearray_instance)\n\
    -         and comparing bytes/bytearray with str. (-bb: issue errors)\n\
    +-b     : issue warnings about str(bytes_instance), str(bytearray_instance)
    +         and comparing bytes/bytearray with str. (-bb: issue errors)
     -B     : don't write .py[co] files on import; also PYTHONDONTWRITEBYTECODE=x
     -c cmd : program passed in as string (terminates option list)
     -d     : debug output from parser; also PYTHONDEBUG=x\n\
    @@ -16,8 +16,10 @@
              if stdin does not appear to be a terminal; also PYTHONINSPECT=x
     -I     : isolate Python from the user's environment (implies -E and -s)
     -m mod : run library module as a script (terminates option list)
    --O     : skip assert statements; also PYTHONOPTIMIZE=x
    --OO    : remove docstrings when importing modules in addition to -O
    +-O     : remove assert and __debug__-dependent statements; add .opt-1 before
    +         .pyc extension; also PYTHONOPTIMIZE=x
    +-OO    : do -O changes and also discard docstrings; add .opt-2 before
    +         .pyc extension
     -q     : don't print version and copyright messages on interactive startup
     -s     : don't add user site directory to sys.path; also PYTHONNOUSERSITE
     -S     : don't imply 'import site' on initialization
    
    From pypy.commits at gmail.com  Tue Aug 27 08:26:08 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Tue, 27 Aug 2019 05:26:08 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: test, fix cpython3 exception compatibility
    Message-ID: <5d652160.1c69fb81.c7f4e.7e77@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97306:50fa3485909f
    Date: 2019-08-27 15:22 +0300
    http://bitbucket.org/pypy/pypy/changeset/50fa3485909f/
    
    Log:	test, fix cpython3 exception compatibility
    
    diff --git a/pypy/objspace/std/newformat.py b/pypy/objspace/std/newformat.py
    --- a/pypy/objspace/std/newformat.py
    +++ b/pypy/objspace/std/newformat.py
    @@ -545,7 +545,8 @@
                         pass # ok
                     else:
                         raise oefmt(space.w_ValueError,
    -                                "invalid type with ',' or '_'")
    +                                "Cannot specify '%s' with '%s'.", 
    +                                self._thousands_sep, tp)
                 return False
     
             def _calc_padding(self, string, length):
    diff --git a/pypy/objspace/std/test/test_newformat.py b/pypy/objspace/std/test/test_newformat.py
    --- a/pypy/objspace/std/test/test_newformat.py
    +++ b/pypy/objspace/std/test/test_newformat.py
    @@ -260,7 +260,8 @@
             a = self.i(ord("a"))
             assert format(a, "c") == "a"
             raises(ValueError, format, a, "-c")
    -        raises(ValueError, format, a, ",c")
    +        exc = raises(ValueError, format, a, ",c")
    +        assert str(exc.value) == "Cannot specify ',' with 'c'.", str(exc.value)
             raises(ValueError, format, a, "_c")
             raises(ValueError, format, a, "#c")
             assert format(a, "3c") == "  a"
    
    From pypy.commits at gmail.com  Tue Aug 27 08:26:10 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Tue, 27 Aug 2019 05:26:10 -0700 (PDT)
    Subject: [pypy-commit] pypy default: remove pdb from test
    Message-ID: <5d652162.1c69fb81.2e485.ce38@mx.google.com>
    
    Author: Matti Picus 
    Branch: 
    Changeset: r97307:2fc0a2974836
    Date: 2019-08-27 15:25 +0300
    http://bitbucket.org/pypy/pypy/changeset/2fc0a2974836/
    
    Log:	remove pdb from test
    
    diff --git a/rpython/rlib/test/test_rbigint.py b/rpython/rlib/test/test_rbigint.py
    --- a/rpython/rlib/test/test_rbigint.py
    +++ b/rpython/rlib/test/test_rbigint.py
    @@ -360,7 +360,6 @@
             from rpython.rlib.rstring import NumberStringParser
             s = "-99"
             p = NumberStringParser(s, s, 10, 'int')
    -        import pdb; pdb.set_trace()
             assert p.sign == -1
             res = p.next_digit()
             assert res == 9
    
    From pypy.commits at gmail.com  Tue Aug 27 11:01:24 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Tue, 27 Aug 2019 08:01:24 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Remove file deleted in CPython
    Message-ID: <5d6545c4.1c69fb81.28649.cd8c@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97308:92ad09ea1c06
    Date: 2019-08-27 16:00 +0100
    http://bitbucket.org/pypy/pypy/changeset/92ad09ea1c06/
    
    Log:	Remove file deleted in CPython
    
    diff --git a/lib-python/3/collections/__main__.py b/lib-python/3/collections/__main__.py
    deleted file mode 100644
    --- a/lib-python/3/collections/__main__.py
    +++ /dev/null
    @@ -1,38 +0,0 @@
    -################################################################################
    -### Simple tests
    -################################################################################
    -
    -# verify that instances can be pickled
    -from collections import namedtuple
    -from pickle import loads, dumps
    -Point = namedtuple('Point', 'x, y', True)
    -p = Point(x=10, y=20)
    -assert p == loads(dumps(p))
    -
    -# test and demonstrate ability to override methods
    -class Point(namedtuple('Point', 'x y')):
    -    __slots__ = ()
    -    @property
    -    def hypot(self):
    -        return (self.x ** 2 + self.y ** 2) ** 0.5
    -    def __str__(self):
    -        return 'Point: x=%6.3f  y=%6.3f  hypot=%6.3f' % (self.x, self.y, self.hypot)
    -
    -for p in Point(3, 4), Point(14, 5/7.):
    -    print (p)
    -
    -class Point(namedtuple('Point', 'x y')):
    -    'Point class with optimized _make() and _replace() without error-checking'
    -    __slots__ = ()
    -    _make = classmethod(tuple.__new__)
    -    def _replace(self, _map=map, **kwds):
    -        return self._make(_map(kwds.get, ('x', 'y'), self))
    -
    -print(Point(11, 22)._replace(x=100))
    -
    -Point3D = namedtuple('Point3D', Point._fields + ('z',))
    -print(Point3D.__doc__)
    -
    -import doctest, collections
    -TestResults = namedtuple('TestResults', 'failed attempted')
    -print(TestResults(*doctest.testmod(collections)))
    
    From pypy.commits at gmail.com  Tue Aug 27 11:23:05 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Tue, 27 Aug 2019 08:23:05 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Remove accidentally duplicated definition
     for isfuture()
    Message-ID: <5d654ad9.1c69fb81.7c938.190f@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97310:cfffac8c5062
    Date: 2019-08-27 16:21 +0100
    http://bitbucket.org/pypy/pypy/changeset/cfffac8c5062/
    
    Log:	Remove accidentally duplicated definition for isfuture()
    
    diff --git a/lib-python/3/asyncio/futures.py b/lib-python/3/asyncio/futures.py
    --- a/lib-python/3/asyncio/futures.py
    +++ b/lib-python/3/asyncio/futures.py
    @@ -107,17 +107,6 @@
                 self.loop.call_exception_handler({'message': msg})
     
     
    -def isfuture(obj):
    -    """Check for a Future.
    -
    -    This returns True when obj is a Future instance or is advertising
    -    itself as duck-type compatible by setting _asyncio_future_blocking.
    -    See comment in Future for more details.
    -    """
    -    return (hasattr(obj.__class__, '_asyncio_future_blocking') and
    -            obj._asyncio_future_blocking is not None)
    -
    -
     class Future:
         """This class is *almost* compatible with concurrent.futures.Future.
     
    
    From pypy.commits at gmail.com  Tue Aug 27 11:58:47 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Tue, 27 Aug 2019 08:58:47 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: fix bad merge
    Message-ID: <5d655337.1c69fb81.ea7c8.d87b@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97311:79ea93489fc4
    Date: 2019-08-27 16:58 +0100
    http://bitbucket.org/pypy/pypy/changeset/79ea93489fc4/
    
    Log:	fix bad merge
    
    diff --git a/lib-python/3/ssl.py b/lib-python/3/ssl.py
    --- a/lib-python/3/ssl.py
    +++ b/lib-python/3/ssl.py
    @@ -52,7 +52,8 @@
     PROTOCOL_SSLv3
     PROTOCOL_SSLv23
     PROTOCOL_TLS
    -(??)
    +PROTOCOL_TLS_CLIENT
    +PROTOCOL_TLS_SERVER
     PROTOCOL_TLSv1
     PROTOCOL_TLSv1_1
     PROTOCOL_TLSv1_2
    
    From pypy.commits at gmail.com  Tue Aug 27 13:35:42 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Tue, 27 Aug 2019 10:35:42 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: fix old merge cruft
    Message-ID: <5d6569ee.1c69fb81.f1f4a.0314@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97312:b70c9c1a4b02
    Date: 2019-08-27 18:34 +0100
    http://bitbucket.org/pypy/pypy/changeset/b70c9c1a4b02/
    
    Log:	fix old merge cruft
    
    diff --git a/lib-python/3/test/support/__init__.py b/lib-python/3/test/support/__init__.py
    --- a/lib-python/3/test/support/__init__.py
    +++ b/lib-python/3/test/support/__init__.py
    @@ -2309,9 +2309,6 @@
     requires_type_collecting = unittest.skipIf(hasattr(sys, 'getcounts'),
                             'types are immortal if COUNT_ALLOCS is defined')
     
    -requires_type_collecting = unittest.skipIf(hasattr(sys, 'getcounts'),
    -                        'types are immortal if COUNT_ALLOCS is defined')
    -
     def args_from_interpreter_flags():
         """Return a list of command-line arguments reproducing the current
         settings in sys.flags and sys.warnoptions."""
    diff --git a/lib-python/3/test/test_telnetlib.py b/lib-python/3/test/test_telnetlib.py
    --- a/lib-python/3/test/test_telnetlib.py
    +++ b/lib-python/3/test/test_telnetlib.py
    @@ -398,5 +398,4 @@
     
     
     if __name__ == '__main__':
    -    import unittest
         unittest.main()
    diff --git a/lib-python/3/test/test_typing.py b/lib-python/3/test/test_typing.py
    --- a/lib-python/3/test/test_typing.py
    +++ b/lib-python/3/test/test_typing.py
    @@ -628,15 +628,6 @@
             with self.assertRaises(TypeError):
                 class MyGeneric(List[T], Generic[S]): ...
     
    -    def test_generic_errors(self):
    -        T = TypeVar('T')
    -        with self.assertRaises(TypeError):
    -            Generic[T]()
    -        with self.assertRaises(TypeError):
    -            isinstance([], List[int])
    -        with self.assertRaises(TypeError):
    -            issubclass(list, List[int])
    -
         def test_init(self):
             T = TypeVar('T')
             S = TypeVar('S')
    diff --git a/lib-python/3/test/test_zipfile.py b/lib-python/3/test/test_zipfile.py
    --- a/lib-python/3/test/test_zipfile.py
    +++ b/lib-python/3/test/test_zipfile.py
    @@ -2251,71 +2251,5 @@
                             with open(path, 'rb') as f:
                                 self.assertEqual(f.read(), zf.read(zi))
     
    -
    -class CommandLineTest(unittest.TestCase):
    -
    -    def zipfilecmd(self, *args, **kwargs):
    -        rc, out, err = script_helper.assert_python_ok('-m', 'zipfile', *args,
    -                                                      **kwargs)
    -        return out.replace(os.linesep.encode(), b'\n')
    -
    -    def zipfilecmd_failure(self, *args):
    -        return script_helper.assert_python_failure('-m', 'zipfile', *args)
    -
    -    def test_test_command(self):
    -        zip_name = findfile('zipdir.zip')
    -        out = self.zipfilecmd('-t', zip_name)
    -        self.assertEqual(out.rstrip(), b'Done testing')
    -        zip_name = findfile('testtar.tar')
    -        rc, out, err = self.zipfilecmd_failure('-t', zip_name)
    -        self.assertEqual(out, b'')
    -
    -    def test_list_command(self):
    -        zip_name = findfile('zipdir.zip')
    -        t = io.StringIO()
    -        with zipfile.ZipFile(zip_name, 'r') as tf:
    -            tf.printdir(t)
    -        expected = t.getvalue().encode('ascii', 'backslashreplace')
    -        out = self.zipfilecmd('-l', zip_name,
    -                              PYTHONIOENCODING='ascii:backslashreplace')
    -        self.assertEqual(out, expected)
    -
    -    @requires_zlib
    -    def test_create_command(self):
    -        self.addCleanup(unlink, TESTFN)
    -        with open(TESTFN, 'w') as f:
    -            f.write('test 1')
    -        os.mkdir(TESTFNDIR)
    -        self.addCleanup(rmtree, TESTFNDIR)
    -        with open(os.path.join(TESTFNDIR, 'file.txt'), 'w') as f:
    -            f.write('test 2')
    -        files = [TESTFN, TESTFNDIR]
    -        namelist = [TESTFN, TESTFNDIR + '/', TESTFNDIR + '/file.txt']
    -        try:
    -            out = self.zipfilecmd('-c', TESTFN2, *files)
    -            self.assertEqual(out, b'')
    -            with zipfile.ZipFile(TESTFN2) as zf:
    -                self.assertEqual(zf.namelist(), namelist)
    -                self.assertEqual(zf.read(namelist[0]), b'test 1')
    -                self.assertEqual(zf.read(namelist[2]), b'test 2')
    -        finally:
    -            unlink(TESTFN2)
    -
    -    def test_extract_command(self):
    -        zip_name = findfile('zipdir.zip')
    -        with temp_dir() as extdir:
    -            out = self.zipfilecmd('-e', zip_name, extdir)
    -            self.assertEqual(out, b'')
    -            with zipfile.ZipFile(zip_name) as zf:
    -                for zi in zf.infolist():
    -                    path = os.path.join(extdir,
    -                                zi.filename.replace('/', os.sep))
    -                    if zi.filename.endswith('/'):
    -                        self.assertTrue(os.path.isdir(path))
    -                    else:
    -                        self.assertTrue(os.path.isfile(path))
    -                        with open(path, 'rb') as f:
    -                            self.assertEqual(f.read(), zf.read(zi))
    -
     if __name__ == "__main__":
         unittest.main()
    
    From pypy.commits at gmail.com  Tue Aug 27 14:42:43 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Tue, 27 Aug 2019 11:42:43 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Don't use @xfail for tests that can't
     possibly work on pypy
    Message-ID: <5d6579a3.1c69fb81.66bd6.083e@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97313:dfa10ae95bae
    Date: 2019-08-27 19:41 +0100
    http://bitbucket.org/pypy/pypy/changeset/dfa10ae95bae/
    
    Log:	Don't use @xfail for tests that can't possibly work on pypy
    
    diff --git a/lib-python/3/ctypes/test/test_callbacks.py b/lib-python/3/ctypes/test/test_callbacks.py
    --- a/lib-python/3/ctypes/test/test_callbacks.py
    +++ b/lib-python/3/ctypes/test/test_callbacks.py
    @@ -1,7 +1,7 @@
     import functools
     import unittest
    +from test import support
     from ctypes import *
    -from ctypes.test import xfail
     from ctypes.test import need_symbol
     import _ctypes_test
     
    @@ -96,7 +96,7 @@
             self.check_type(c_char_p, "abc")
             self.check_type(c_char_p, "def")
     
    -    @xfail
    +    @support.refcount_test
         def test_pyobject(self):
             o = ()
             from sys import getrefcount as grc
    diff --git a/lib-python/3/ctypes/test/test_python_api.py b/lib-python/3/ctypes/test/test_python_api.py
    --- a/lib-python/3/ctypes/test/test_python_api.py
    +++ b/lib-python/3/ctypes/test/test_python_api.py
    @@ -1,5 +1,4 @@
     from ctypes import *
    -from ctypes.test import xfail
     import unittest, sys
     from test import support
     
    @@ -19,9 +18,9 @@
     else:
         c_py_ssize_t = c_int
     
    + at support.cpython_only
     class PythonAPITestCase(unittest.TestCase):
     
    -    @xfail
         def test_PyBytes_FromStringAndSize(self):
             PyBytes_FromStringAndSize = pythonapi.PyBytes_FromStringAndSize
     
    @@ -71,7 +70,6 @@
             del pyobj
             self.assertEqual(grc(s), ref)
     
    -    @xfail
         def test_PyOS_snprintf(self):
             PyOS_snprintf = pythonapi.PyOS_snprintf
             PyOS_snprintf.argtypes = POINTER(c_char), c_size_t, c_char_p
    @@ -86,7 +84,6 @@
             # not enough arguments
             self.assertRaises(TypeError, PyOS_snprintf, buf)
     
    -    @xfail
         def test_pyobject_repr(self):
             self.assertEqual(repr(py_object()), "py_object()")
             self.assertEqual(repr(py_object(42)), "py_object(42)")
    
    From pypy.commits at gmail.com  Tue Aug 27 14:57:33 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Tue, 27 Aug 2019 11:57:33 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Use @support.refcount_test to skip tests
     relying on sys.getrefcount()
    Message-ID: <5d657d1d.1c69fb81.b6100.2960@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97314:18630a74b4d0
    Date: 2019-08-27 19:56 +0100
    http://bitbucket.org/pypy/pypy/changeset/18630a74b4d0/
    
    Log:	Use @support.refcount_test to skip tests relying on
    	sys.getrefcount()
    
    diff --git a/lib-python/3/ctypes/test/test_memfunctions.py b/lib-python/3/ctypes/test/test_memfunctions.py
    --- a/lib-python/3/ctypes/test/test_memfunctions.py
    +++ b/lib-python/3/ctypes/test/test_memfunctions.py
    @@ -52,7 +52,7 @@
             self.assertEqual(cast(a, POINTER(c_byte))[:7:7],
                                  [97])
     
    -    @support.refcount_test
    +    #@support.refcount_test
         def test_string_at(self):
             s = string_at(b"foo bar")
             # XXX The following may be wrong, depending on how Python
    diff --git a/lib-python/3/ctypes/test/test_refcounts.py b/lib-python/3/ctypes/test/test_refcounts.py
    --- a/lib-python/3/ctypes/test/test_refcounts.py
    +++ b/lib-python/3/ctypes/test/test_refcounts.py
    @@ -13,10 +13,7 @@
     
         @support.refcount_test
         def test_1(self):
    -        try:
    -            from sys import getrefcount as grc
    -        except ImportError:
    -            return unittest.skip("no sys.getrefcount()")
    +        from sys import getrefcount as grc
     
             f = dll._testfunc_callback_i_if
             f.restype = ctypes.c_int
    @@ -41,10 +38,7 @@
     
         @support.refcount_test
         def test_refcount(self):
    -        try:
    -            from sys import getrefcount as grc
    -        except ImportError:
    -            return unittest.skip("no sys.getrefcount()")
    +        from sys import getrefcount as grc
             def func(*args):
                 pass
             # this is the standard refcount for func
    @@ -91,19 +85,15 @@
             self.assertEqual(grc(func), 2)
     
     class AnotherLeak(unittest.TestCase):
    +    @support.refcount_test
         def test_callback(self):
             import sys
    -        try:
    -            from sys import getrefcount
    -        except ImportError:
    -            return unittest.skip("no sys.getrefcount()")
     
             proto = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_int, ctypes.c_int)
             def func(a, b):
                 return a * b * 2
             f = proto(func)
     
    -        gc.collect()
             a = sys.getrefcount(ctypes.c_int)
             f(1, 2)
             self.assertEqual(sys.getrefcount(ctypes.c_int), a)
    diff --git a/lib-python/3/test/test_coroutines.py b/lib-python/3/test/test_coroutines.py
    --- a/lib-python/3/test/test_coroutines.py
    +++ b/lib-python/3/test/test_coroutines.py
    @@ -2078,15 +2078,6 @@
                 support.gc_collect()
             self.assertIn("was never awaited", stderr.getvalue())
     
    -    def test_fatal_coro_warning(self):
    -        # Issue 27811
    -        async def func(): pass
    -        with warnings.catch_warnings(), support.captured_stderr() as stderr:
    -            warnings.filterwarnings("error")
    -            func()
    -            support.gc_collect()
    -        self.assertIn("was never awaited", stderr.getvalue())
    -
     
     class CoroAsyncIOCompatTest(unittest.TestCase):
     
    diff --git a/lib-python/3/test/test_socket.py b/lib-python/3/test/test_socket.py
    --- a/lib-python/3/test/test_socket.py
    +++ b/lib-python/3/test/test_socket.py
    @@ -4218,12 +4218,12 @@
             self.write_file.write(self.write_msg)
             self.write_file.flush()
     
    +    @support.refcount_test
         def testMakefileCloseSocketDestroy(self):
    -        if hasattr(sys, "getrefcount"):
    -            refcount_before = sys.getrefcount(self.cli_conn)
    -            self.read_file.close()
    -            refcount_after = sys.getrefcount(self.cli_conn)
    -            self.assertEqual(refcount_before - 1, refcount_after)
    +        refcount_before = sys.getrefcount(self.cli_conn)
    +        self.read_file.close()
    +        refcount_after = sys.getrefcount(self.cli_conn)
    +        self.assertEqual(refcount_before - 1, refcount_after)
     
         def _testMakefileCloseSocketDestroy(self):
             pass
    diff --git a/lib-python/3/unittest/test/test_case.py b/lib-python/3/unittest/test/test_case.py
    --- a/lib-python/3/unittest/test/test_case.py
    +++ b/lib-python/3/unittest/test/test_case.py
    @@ -1287,8 +1287,7 @@
             with self.assertRaises(TypeError):
                 self.assertRaises((ValueError, object))
     
    -    @unittest.skipUnless(hasattr(sys, 'getrefcount'),
    -                         'test needs sys.getrefcount()')
    +    @support.refcount_test
         def testAssertRaisesRefcount(self):
             # bpo-23890: assertRaises() must not keep objects alive longer
             # than expected
    
    From pypy.commits at gmail.com  Wed Aug 28 03:06:50 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Wed, 28 Aug 2019 00:06:50 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: merge default into py3.6
    Message-ID: <5d66280a.1c69fb81.e54dc.5b36@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97317:1b696e1a6058
    Date: 2019-08-28 09:50 +0300
    http://bitbucket.org/pypy/pypy/changeset/1b696e1a6058/
    
    Log:	merge default into py3.6
    
    diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
    --- a/pypy/module/posix/interp_posix.py
    +++ b/pypy/module/posix/interp_posix.py
    @@ -2225,9 +2225,12 @@
             _sigcheck.space = space
             return space.newbytes(rurandom.urandom(context, size, _signal_checker))
         except OSError as e:
    -        # 'rurandom' should catch and retry internally if it gets EINTR
    -        # (at least in os.read(), which is probably enough in practice)
    -        raise wrap_oserror(space, e, eintr_retry=False)
    +        # CPython raises NotImplementedError if /dev/urandom cannot be found.
    +        # To maximize compatibility, we should also raise NotImplementedError
    +        # and not OSError (although CPython also raises OSError in case it
    +        # could open /dev/urandom but there are further problems).
    +        raise wrap_oserror(space, e,
    +            w_exception_class=space.w_NotImplementedError, einter_retry=False)
     
     def ctermid(space):
         """ctermid() -> string
    diff --git a/rpython/rlib/rarithmetic.py b/rpython/rlib/rarithmetic.py
    --- a/rpython/rlib/rarithmetic.py
    +++ b/rpython/rlib/rarithmetic.py
    @@ -878,9 +878,8 @@
         Raises ParseStringOverflowError in case the result does not fit.
         """
         from rpython.rlib.rstring import (
    -        NumberStringParser, ParseStringOverflowError, strip_spaces)
    -    s = literal = strip_spaces(s)
    -    p = NumberStringParser(s, literal, base, 'int',
    +        NumberStringParser, ParseStringOverflowError)
    +    p = NumberStringParser(s, s, base, 'int',
                                allow_underscores=allow_underscores,
                                no_implicit_octal=no_implicit_octal)
         base = p.base
    diff --git a/rpython/rlib/rbigint.py b/rpython/rlib/rbigint.py
    --- a/rpython/rlib/rbigint.py
    +++ b/rpython/rlib/rbigint.py
    @@ -298,12 +298,14 @@
             and returns an rbigint."""
             from rpython.rlib.rstring import NumberStringParser, \
                 strip_spaces
    -        s = literal = strip_spaces(s)
    +        s = literal = strip_spaces(s) # XXX could get rid of this slice
    +        end = len(s)
             if (s.endswith('l') or s.endswith('L')) and base < 22:
                 # in base 22 and above, 'L' is a valid digit!  try: long('L',22)
    -            s = s[:-1]
    +            end -= 1
             parser = NumberStringParser(s, literal, base, 'long',
    -                                    allow_underscores=allow_underscores)
    +                                    allow_underscores=allow_underscores,
    +                                    end=end)
             return rbigint._from_numberstring_parser(parser)
     
         @staticmethod
    diff --git a/rpython/rlib/rstring.py b/rpython/rlib/rstring.py
    --- a/rpython/rlib/rstring.py
    +++ b/rpython/rlib/rstring.py
    @@ -500,26 +500,34 @@
                                    (self.fname, self.original_base))
     
         def __init__(self, s, literal, base, fname, allow_underscores=False,
    -                 no_implicit_octal=False):
    +                 no_implicit_octal=False, start=0, end=-1):
             self.fname = fname
             sign = 1
    -        if s.startswith('-'):
    +        self.s = s
    +        self.start = start
    +        if end == -1:
    +            end = len(s)
    +        self.end = end
    +        self._strip_spaces()
    +        if self._startswith('-'):
                 sign = -1
    -            s = strip_spaces(s[1:])
    -        elif s.startswith('+'):
    -            s = strip_spaces(s[1:])
    +            self.start += 1
    +            self._strip_spaces()
    +        elif self._startswith('+'):
    +            self.start += 1
    +            self._strip_spaces()
             self.sign = sign
             self.original_base = base
             self.allow_underscores = allow_underscores
     
             if base == 0:
    -            if s.startswith('0x') or s.startswith('0X'):
    +            if self._startswith('0x') or self._startswith('0X'):
                     base = 16
    -            elif s.startswith('0b') or s.startswith('0B'):
    +            elif self._startswith('0b') or self._startswith('0B'):
                     base = 2
    -            elif s.startswith('0'): # also covers the '0o' case
    -                if no_implicit_octal and not (s.startswith('0o') or
    -                                              s.startswith('0O')):
    +            elif self._startswith('0'): # also covers the '0o' case
    +                if no_implicit_octal and not (self._startswith('0o') or
    +                                              self._startswith('0O')):
                         base = 1    # this makes only the digit '0' valid...
                     else:
                         base = 8
    @@ -530,30 +538,44 @@
             self.base = base
     
             # Leading underscores are not allowed
    -        if s.startswith('_'):
    +        if self._startswith('_'):
                 self.error()
     
    -        if base == 16 and (s.startswith('0x') or s.startswith('0X')):
    -            s = s[2:]
    -        if base == 8 and (s.startswith('0o') or s.startswith('0O')):
    -            s = s[2:]
    -        if base == 2 and (s.startswith('0b') or s.startswith('0B')):
    -            s = s[2:]
    -        if not s:
    +        if base == 16 and (self._startswith('0x') or self._startswith('0X')):
    +            self.start += 2
    +        if base == 8 and (self._startswith('0o') or self._startswith('0O')):
    +            self.start += 2
    +        if base == 2 and (self._startswith('0b') or self._startswith('0B')):
    +            self.start += 2
    +        if self.start == self.end:
                 self.error()
    -        self.s = s
    -        self.n = len(s)
    -        self.i = 0
    +        self.i = self.start
    +
    +    def _startswith(self, prefix):
    +        return startswith(self.s, prefix, start=self.start, end=self.end)
    +
    +    def _strip_spaces(self):
    +        # XXX this is not locale-dependent
    +        p = self.start
    +        q = self.end
    +        s = self.s
    +        while p < q and s[p] in ' \f\n\r\t\v':
    +            p += 1
    +        while p < q and s[q-1] in ' \f\n\r\t\v':
    +            q -= 1
    +        assert q >= p
    +        self.start = p
    +        self.end = q
     
         def rewind(self):
    -        self.i = 0
    +        self.i = self.start
     
         def next_digit(self): # -1 => exhausted
    -        if self.i < self.n:
    +        if self.i < self.end:
                 c = self.s[self.i]
                 if self.allow_underscores and c == '_':
                     self.i += 1
    -                if self.i >= self.n:
    +                if self.i >= self.end:
                         self.error()
                     c = self.s[self.i]
                 digit = ord(c)
    @@ -576,7 +598,7 @@
             # After exhausting all n digits in next_digit(), you can walk them
             # again in reverse order by calling prev_digit() exactly n times
             i = self.i - 1
    -        assert i >= 0
    +        assert i >= self.start
             self.i = i
             c = self.s[i]
             if self.allow_underscores and c == '_':
    diff --git a/rpython/rlib/test/test_rarithmetic.py b/rpython/rlib/test/test_rarithmetic.py
    --- a/rpython/rlib/test/test_rarithmetic.py
    +++ b/rpython/rlib/test/test_rarithmetic.py
    @@ -337,6 +337,10 @@
             res = self.interpret(f, [123])
             assert res == 4 + 2
     
    +    def test_string_to_int_translates(self):
    +        def f(s):
    +            return string_to_int(str(s))
    +        self.interpret(f, [123]) == 123
     
     def test_int_real_union():
         from rpython.rtyper.lltypesystem.rffi import r_int_real
    diff --git a/rpython/rlib/test/test_rbigint.py b/rpython/rlib/test/test_rbigint.py
    --- a/rpython/rlib/test/test_rbigint.py
    +++ b/rpython/rlib/test/test_rbigint.py
    @@ -356,6 +356,29 @@
             assert rbigint.fromstr('123L', 21).tolong() == 441 + 42 + 3
             assert rbigint.fromstr('1891234174197319').tolong() == 1891234174197319
     
    +    def test__from_numberstring_parser_rewind_bug(self):
    +        from rpython.rlib.rstring import NumberStringParser
    +        s = "-99"
    +        p = NumberStringParser(s, s, 10, 'int')
    +        assert p.sign == -1
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == -1
    +        p.rewind()
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == 9
    +        res = p.next_digit()
    +        assert res == -1
    +
    +    @given(longs)
    +    def test_fromstr_hypothesis(self, l):
    +        assert rbigint.fromstr(str(l)).tolong() == l
    +
         def test_from_numberstring_parser(self):
             from rpython.rlib.rstring import NumberStringParser
             parser = NumberStringParser("1231231241", "1231231241", 10, "long")
    
    From pypy.commits at gmail.com  Wed Aug 28 03:53:35 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Wed, 28 Aug 2019 00:53:35 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: typo
    Message-ID: <5d6632ff.1c69fb81.ca20f.f3a6@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97318:f922a8f2c952
    Date: 2019-08-28 10:52 +0300
    http://bitbucket.org/pypy/pypy/changeset/f922a8f2c952/
    
    Log:	typo
    
    diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
    --- a/pypy/module/posix/interp_posix.py
    +++ b/pypy/module/posix/interp_posix.py
    @@ -2230,7 +2230,7 @@
             # and not OSError (although CPython also raises OSError in case it
             # could open /dev/urandom but there are further problems).
             raise wrap_oserror(space, e,
    -            w_exception_class=space.w_NotImplementedError, einter_retry=False)
    +            w_exception_class=space.w_NotImplementedError, eintr_retry=False)
     
     def ctermid(space):
         """ctermid() -> string
    
    From pypy.commits at gmail.com  Wed Aug 28 07:55:40 2019
    From: pypy.commits at gmail.com (stevie_92)
    Date: Wed, 28 Aug 2019 04:55:40 -0700 (PDT)
    Subject: [pypy-commit] pypy cpyext-gc-cycle: Fixed bug when traversing rrc
     lists in gc
    Message-ID: <5d666bbc.1c69fb81.44941.dcb7@mx.google.com>
    
    Author: Stefan Beyer 
    Branch: cpyext-gc-cycle
    Changeset: r97319:d326d15810a4
    Date: 2019-08-28 13:54 +0200
    http://bitbucket.org/pypy/pypy/changeset/d326d15810a4/
    
    Log:	Fixed bug when traversing rrc lists in gc
    
    diff --git a/pypy/module/cpyext/state.py b/pypy/module/cpyext/state.py
    --- a/pypy/module/cpyext/state.py
    +++ b/pypy/module/cpyext/state.py
    @@ -192,7 +192,7 @@
     
                         # special traverse for list
                         if self.C._PyList_CheckExact(pyobj) != 0:
    -                        if pyobj.c_ob_pypy_link != 0:
    +                        if pyobj.c_ob_pypy_link != 0: # actually a refcount now
                                 w_obj = from_ref(space, pyobj)
                                 if w_obj:
                                     debug_print('rrc list traverse ', pyobj)
    diff --git a/rpython/memory/gc/rrc/mark.py b/rpython/memory/gc/rrc/mark.py
    --- a/rpython/memory/gc/rrc/mark.py
    +++ b/rpython/memory/gc/rrc/mark.py
    @@ -17,7 +17,7 @@
     
             # Only trace and mark rawrefcounted object if we are not doing
             # something special, like building gc.garbage.
    -        if (self.state == self.STATE_MARKING and self.cycle_enabled):
    +        if self.state == self.STATE_MARKING and self.cycle_enabled:
                 merged_old_list = False
                 # check objects with finalizers from last collection cycle
                 if not self._gc_list_is_empty(self.pyobj_old_list):
    @@ -58,10 +58,19 @@
             self.refcnt_dict.foreach(self._fix_refcnt_back, None)
             self.refcnt_dict.delete()
             self.refcnt_dict = self.gc.AddressDict()
    +        self.use_refcntdict = False
     
             self.state = self.STATE_DEFAULT
             return True
     
    +    def to_obj(self, pyobject):
    +        if self.use_refcntdict:
    +            obj = self.refcnt_dict.get(pyobject)
    +        else:
    +            obj = llmemory.cast_int_to_adr(
    +                self._pyobj(pyobject).c_ob_pypy_link)
    +        return llmemory.cast_adr_to_ptr(obj, llmemory.GCREF)
    +
         def _collect_roots(self, pygclist):
             # Initialize the cyclic refcount with the real refcount.
             self._collect_roots_init_list(pygclist)
    @@ -69,6 +78,7 @@
             # Save the real refcount of objects at border
             self.p_list_old.foreach(self._obj_save_refcnt, None)
             self.o_list_old.foreach(self._obj_save_refcnt, None)
    +        self.use_refcntdict = True
     
             # Subtract all internal refcounts from the cyclic refcount
             # of rawrefcounted objects
    
    From pypy.commits at gmail.com  Wed Aug 28 08:36:18 2019
    From: pypy.commits at gmail.com (stevie_92)
    Date: Wed, 28 Aug 2019 05:36:18 -0700 (PDT)
    Subject: [pypy-commit] pypy cpyext-gc-cycle: Adapted rrc gc to support
     incremental collections (still need to reduce pauses)
    Message-ID: <5d667542.1c69fb81.f2d5b.c40a@mx.google.com>
    
    Author: Stefan Beyer 
    Branch: cpyext-gc-cycle
    Changeset: r97320:adc05b1fd46b
    Date: 2019-08-28 14:35 +0200
    http://bitbucket.org/pypy/pypy/changeset/adc05b1fd46b/
    
    Log:	Adapted rrc gc to support incremental collections (still need to
    	reduce pauses)
    
    diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
    --- a/rpython/memory/gc/incminimark.py
    +++ b/rpython/memory/gc/incminimark.py
    @@ -2402,9 +2402,12 @@
                     self.visit_all_objects()
                     #
                     # If enabled, do a major collection step for rrc objects.
    +                # TODO: move up before "if remaining >= estimate // 2" to
    +                #       improve pause times, issues:
    +                #         - (non-inc) mark expects all objects to be marked
    +                #         - both do not rescan nonstack-roots
                     if self.rrc_enabled:
    -                    while not rrc_finished: # TODO: remove this line to do incremental collection
    -                        rrc_finished = self.rrc_gc.major_collection_trace_step()
    +                    rrc_finished = self.rrc_gc.major_collection_trace_step()
                     else:
                         rrc_finished = True
     
    diff --git a/rpython/memory/gc/rrc/incmark.py b/rpython/memory/gc/rrc/incmark.py
    --- a/rpython/memory/gc/rrc/incmark.py
    +++ b/rpython/memory/gc/rrc/incmark.py
    @@ -12,9 +12,9 @@
                 self._debug_check_consistency(print_label="end-mark")
                 return True
     
    -        elif self.state == self.STATE_DEFAULT:
    -            # First, untrack all tuples with only non-gc rrc objects and promote
    -            # all other tuples to the pyobj_list
    +        if self.state == self.STATE_DEFAULT:
    +            # First, untrack all tuples with only non-gc rrc objects and
    +            # promote all other tuples to the pyobj_list
                 self._untrack_tuples()
     
                 merged_old_list = False
    @@ -47,17 +47,25 @@
                 self.state = self.STATE_MARKING
                 return False
     
    -        elif self.state == self.STATE_MARKING:
    +        if self.state == self.STATE_MARKING:
                 # mark all objects reachable from rawrefcounted roots
    -            self._mark_rawrefcount()
    +            all_rrc_marked = self._mark_rawrefcount()
     
    -            self._debug_check_consistency(print_label="before-fin")
    -            self.state = self.STATE_GARBAGE_MARKING
    +            if (all_rrc_marked and not self.gc.objects_to_trace.non_empty() and
    +                    not self.gc.more_objects_to_trace.non_empty()):
    +                # all objects have been marked, dead objects will stay dead
    +                self._debug_check_consistency(print_label="before-fin")
    +                self.state = self.STATE_GARBAGE_MARKING
    +
                 return False
     
    -        # now move all dead objs still in pyob_list to garbage
    -        # dead -> pyobj_old_list
    -        # live -> set cyclic refcount to > 0
    +        # we are finished with marking, now finish things up
    +        ll_assert(self.state == self.STATE_GARBAGE_MARKING, "invalid state")
    +
    +        # sync snapshot with pyob_list:
    +        #  * move all dead objs still in pyob_list to pyobj_old_list
    +        #  * for all other objects (in snapshot and new),
    +        #    set their cyclic refcount to > 0, to mark them as live
             pygchdr = self.pyobj_list.c_gc_next
             while pygchdr <> self.pyobj_list:
                 next_old = pygchdr.c_gc_next
    @@ -75,13 +83,15 @@
                     pygchdr.c_gc_refs = 1 # new object, keep alive
                 pygchdr = next_old
     
    -        if self._find_garbage(False):  # handle legacy finalizers
    +        # handle legacy finalizers (assumption: not a lot of legacy finalizers,
    +        # so no need to do it incrementally)
    +        if self._find_garbage(False):
                 self._mark_garbage(False)
                 self._debug_check_consistency(print_label="end-legacy-fin")
             self.state = self.STATE_DEFAULT
     
    -        # We are finished with marking, now finish things up
    -        found_finalizer = self._find_finalizer()  # modern finalizers
    +        # handle modern finalizers
    +        found_finalizer = self._find_finalizer()
             if found_finalizer:
                 self._gc_list_move(self.pyobj_old_list,
                                    self.pyobj_isolate_list)
    @@ -114,19 +124,22 @@
             # as long as new objects with cyclic a refcount > 0 or alive border
             # objects are found, increment the refcount of all referenced objects
             # of those newly found objects
    +        reached_limit = False
             found_alive = True
    +        simple_limit = 0
             #
    -        while found_alive: # TODO: working set to improve performance?
    +        while found_alive and not reached_limit: # TODO: working set to improve performance?
                 found_alive = False
                 for i in range(0, self.total_objs):
                     obj = self.snapshot_objs[i]
                     found_alive |= self._mark_rawrefcount_obj(obj)
    -        #
    -        # now all rawrefcounted objects, which are alive, have a cyclic
    -        # refcount > 0 or are marked
    +            simple_limit += 1
    +            if simple_limit > 3:
    +                reached_limit
    +        return not reached_limit # are there any objects left?
     
         def _mark_rawrefcount_obj(self, snapobj):
    -        if snapobj.status == 0:
    +        if snapobj.status == 0: # already processed
                 return False
     
             alive = snapobj.refcnt_external > 0
    @@ -149,7 +162,7 @@
                     intobj = snapobj.pypy_link
                     obj = llmemory.cast_int_to_adr(intobj)
                     self.gc.objects_to_trace.append(obj)
    -                self.gc.visit_all_objects()
    +                self.gc.visit_all_objects()  # TODO: remove to improve pause times
                 # mark as processed
                 snapobj.status = 0
             return alive
    
    From pypy.commits at gmail.com  Wed Aug 28 10:03:38 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Wed, 28 Aug 2019 07:03:38 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: fix typo
    Message-ID: <5d6689ba.1c69fb81.2c325.5136@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97321:c0d46759b1dc
    Date: 2019-08-28 15:02 +0100
    http://bitbucket.org/pypy/pypy/changeset/c0d46759b1dc/
    
    Log:	fix typo
    
    diff --git a/lib-python/3/test/test_class.py b/lib-python/3/test/test_class.py
    --- a/lib-python/3/test/test_class.py
    +++ b/lib-python/3/test/test_class.py
    @@ -588,7 +588,7 @@
             self.assertEqual(A() + 1, 'summa')
     
             name2 = str(b'__add__', 'ascii')
    -        if support.check_impl_detail()::
    +        if support.check_impl_detail():
                 self.assertIsNot(name2, '__add__')
                 self.assertIsNot(name2, name)
             type.__delattr__(A, name2)
    
    From pypy.commits at gmail.com  Wed Aug 28 10:42:59 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Wed, 28 Aug 2019 07:42:59 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Fix error handler calls when encoding to
     utf-16
    Message-ID: <5d6692f3.1c69fb81.89bc9.9a7e@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97322:7ace73ce896a
    Date: 2019-08-28 15:41 +0100
    http://bitbucket.org/pypy/pypy/changeset/7ace73ce896a/
    
    Log:	Fix error handler calls when encoding to utf-16
    
    diff --git a/pypy/interpreter/unicodehelper.py b/pypy/interpreter/unicodehelper.py
    --- a/pypy/interpreter/unicodehelper.py
    +++ b/pypy/interpreter/unicodehelper.py
    @@ -1215,21 +1215,22 @@
         while pos < size:
             try:
                 cp = rutf8.codepoint_at_pos(s, pos)
    +            pos = rutf8.next_codepoint_pos(s, pos)
             except IndexError:
                 # malformed codepoint, blindly use ch
                 pos += 1
                 if errorhandler:
                     r, newindex, rettype = errorhandler(
                         errors, public_encoding_name, 'malformed unicode',
    -                    s, pos - 1, pos)
    +                    s, index, index + 1)
                     if rettype == 'u':
                         for cp in rutf8.Utf8StringIterator(r):
                             if cp < 0xD800:
                                 _STORECHAR(result, cp, byteorder)
                             else:
    -                            errorhandler('strict', public_encoding_name,
    -                                         'malformed unicode',
    -                                     s, pos-1, pos)
    +                            errorhandler(
    +                                'strict', public_encoding_name,
    +                                'malformed unicode', s, index, index + 1)
                     else:
                         for ch in r:
                             cp = ord(ch)
    @@ -1238,7 +1239,7 @@
                             else:
                                 errorhandler('strict', public_encoding_name,
                                              'malformed unicode',
    -                                     s, pos-1, pos)
    +                                     s, index, index + 1)
                 else:
                     cp = ord(s[pos])
                     _STORECHAR(result, cp, byteorder)
    @@ -1253,7 +1254,7 @@
             else:
                 r, newindex, rettype = errorhandler(
                     errors, public_encoding_name, 'surrogates not allowed',
    -                s, pos, pos+1)
    +                s, index, index+1)
                 if rettype == 'u':
                     for cp in rutf8.Utf8StringIterator(r):
                         if cp < 0xD800 or allow_surrogates:
    @@ -1261,7 +1262,7 @@
                         else:
                             errorhandler('strict', public_encoding_name,
                                          'surrogates not allowed',
    -                                     s, pos, pos+1)
    +                                     s, index, index+1)
                 else:
                     for ch in r:
                         cp = ord(ch)
    @@ -1270,13 +1271,11 @@
                         else:
                             errorhandler('strict', public_encoding_name,
                                          'surrogates not allowed',
    -                                 s, pos, pos+1)
    +                                 s, index, index+1)
                 if index != newindex:  # Should be uncommon
                     index = newindex
                     pos = rutf8._pos_at_index(s, newindex)
                 continue
    -
    -        pos = rutf8.next_codepoint_pos(s, pos)
             index += 1
     
         return result.build()
    @@ -1516,19 +1515,19 @@
         return result.build()
     
     def utf8_encode_utf_32(s, errors,
    -                          errorhandler=None, allow_surrogates=True):
    +                       errorhandler=None, allow_surrogates=True):
         return utf8_encode_utf_32_helper(s, errors, errorhandler,
                                             allow_surrogates, "native",
                                             'utf-32-' + BYTEORDER2)
     
     def utf8_encode_utf_32_be(s, errors,
    -                                  errorhandler=None, allow_surrogates=True):
    +                          errorhandler=None, allow_surrogates=True):
         return utf8_encode_utf_32_helper(s, errors, errorhandler,
                                             allow_surrogates, "big",
                                             'utf-32-be')
     
     def utf8_encode_utf_32_le(s, errors,
    -                                  errorhandler=None, allow_surrogates=True):
    +                          errorhandler=None, allow_surrogates=True):
         return utf8_encode_utf_32_helper(s, errors, errorhandler,
                                             allow_surrogates, "little",
                                             'utf-32-le')
    diff --git a/pypy/module/_codecs/test/test_codecs.py b/pypy/module/_codecs/test/test_codecs.py
    --- a/pypy/module/_codecs/test/test_codecs.py
    +++ b/pypy/module/_codecs/test/test_codecs.py
    @@ -271,10 +271,10 @@
                 assert 'unexpected end of data' in str(exc.value)
                 useq = bseq.decode('utf-8', 'replace')
                 assert  useq == u'\ufffd', (bseq, useq)
    -            assert ((b'aaaa' + bseq + b'bbbb').decode('utf-8', 'replace') == 
    +            assert ((b'aaaa' + bseq + b'bbbb').decode('utf-8', 'replace') ==
                         u'aaaa\ufffdbbbb')
                 assert bseq.decode('utf-8', 'ignore') == ''
    -            assert ((b'aaaa' + bseq + b'bbbb').decode('utf-8', 'ignore') == 
    +            assert ((b'aaaa' + bseq + b'bbbb').decode('utf-8', 'ignore') ==
                         u'aaaabbbb')
     
         def test_invalid_cb_for_3bytes_seq(self):
    @@ -337,7 +337,7 @@
                 exc = raises(UnicodeDecodeError, seq.decode, 'utf-8')
                 assert err in str(exc.value)
                 assert seq.decode('utf-8', 'replace') == res
    -            assert ((b'aaaa' + seq + b'bbbb').decode('utf-8', 'replace') == 
    +            assert ((b'aaaa' + seq + b'bbbb').decode('utf-8', 'replace') ==
                              'aaaa' + res + 'bbbb')
                 res = res.replace('\ufffd', '')
                 assert seq.decode('utf-8', 'ignore') == res
    @@ -425,7 +425,7 @@
                 exc = raises(UnicodeDecodeError, seq.decode, 'utf-8')
                 assert err in str(exc.value)
                 assert seq.decode('utf-8', 'replace') == res
    -            assert ((b'aaaa' + seq + b'bbbb').decode('utf-8', 'replace') == 
    +            assert ((b'aaaa' + seq + b'bbbb').decode('utf-8', 'replace') ==
                              'aaaa' + res + 'bbbb')
                 res = res.replace('\ufffd', '')
                 assert seq.decode('utf-8', 'ignore') == res
    @@ -1148,6 +1148,11 @@
                     '[]'.encode(encoding))
                 assert (u'[\udc80]'.encode(encoding, "replace") ==
                     '[?]'.encode(encoding))
    +            # surrogate sequences
    +            assert (u'[\ud800\udc80]'.encode(encoding, "ignore") ==
    +                '[]'.encode(encoding))
    +            assert (u'[\ud800\udc80]'.encode(encoding, "replace") ==
    +                '[??]'.encode(encoding))
             for encoding, ill_surrogate in [('utf-8', b'\xed\xb2\x80'),
                                             ('utf-16-le', b'\x80\xdc'),
                                             ('utf-16-be', b'\xdc\x80'),
    @@ -1167,7 +1172,7 @@
                     assert test_string.encode(encoding, 'surrogatepass') == test_sequence
                     assert test_sequence.decode(encoding, 'surrogatepass') == test_string
                     assert test_sequence.decode(encoding, 'ignore') == before + after
    -                assert test_sequence.decode(encoding, 'replace') == (before + 
    +                assert test_sequence.decode(encoding, 'replace') == (before +
                                                     ill_formed_sequence_replace + after), str(
                     (encoding, test_sequence, before + ill_formed_sequence_replace + after))
                     backslashreplace = ''.join('\\x%02x' % b for b in ill_surrogate)
    @@ -1388,7 +1393,7 @@
             # from stdlib tests, bad byte: \xa5 is unmapped in iso-8859-3
             assert (b"foo\xa5bar".decode("iso-8859-3", "surrogateescape") ==
                          "foo\udca5bar")
    -        assert ("foo\udca5bar".encode("iso-8859-3", "surrogateescape") == 
    +        assert ("foo\udca5bar".encode("iso-8859-3", "surrogateescape") ==
                              b"foo\xa5bar")
     
         def test_warn_escape_decode(self):
    
    From pypy.commits at gmail.com  Wed Aug 28 11:36:44 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Wed, 28 Aug 2019 08:36:44 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Remove confusing and unused error path
     for invalid-at-interplevel input in encoders
    Message-ID: <5d669f8c.1c69fb81.1244b.274b@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97323:9abe3f6c09a4
    Date: 2019-08-28 16:26 +0100
    http://bitbucket.org/pypy/pypy/changeset/9abe3f6c09a4/
    
    Log:	Remove confusing and unused error path for invalid-at-interplevel
    	input in encoders
    
    diff --git a/pypy/interpreter/unicodehelper.py b/pypy/interpreter/unicodehelper.py
    --- a/pypy/interpreter/unicodehelper.py
    +++ b/pypy/interpreter/unicodehelper.py
    @@ -1213,37 +1213,7 @@
         pos = 0
         index = 0
         while pos < size:
    -        try:
    -            cp = rutf8.codepoint_at_pos(s, pos)
    -            pos = rutf8.next_codepoint_pos(s, pos)
    -        except IndexError:
    -            # malformed codepoint, blindly use ch
    -            pos += 1
    -            if errorhandler:
    -                r, newindex, rettype = errorhandler(
    -                    errors, public_encoding_name, 'malformed unicode',
    -                    s, index, index + 1)
    -                if rettype == 'u':
    -                    for cp in rutf8.Utf8StringIterator(r):
    -                        if cp < 0xD800:
    -                            _STORECHAR(result, cp, byteorder)
    -                        else:
    -                            errorhandler(
    -                                'strict', public_encoding_name,
    -                                'malformed unicode', s, index, index + 1)
    -                else:
    -                    for ch in r:
    -                        cp = ord(ch)
    -                        if cp < 0xD800:
    -                            _STORECHAR(result, cp, byteorder)
    -                        else:
    -                            errorhandler('strict', public_encoding_name,
    -                                         'malformed unicode',
    -                                     s, index, index + 1)
    -            else:
    -                cp = ord(s[pos])
    -                _STORECHAR(result, cp, byteorder)
    -            continue
    +        cp = rutf8.codepoint_at_pos(s, pos)
             if cp < 0xD800:
                 _STORECHAR(result, cp, byteorder)
             elif cp >= 0x10000:
    @@ -1276,6 +1246,7 @@
                     index = newindex
                     pos = rutf8._pos_at_index(s, newindex)
                 continue
    +        pos = rutf8.next_codepoint_pos(s, pos)
             index += 1
     
         return result.build()
    @@ -1450,40 +1421,7 @@
         pos = 0
         index = 0
         while pos < size:
    -        try:
    -            ch = rutf8.codepoint_at_pos(s, pos)
    -            pos = rutf8.next_codepoint_pos(s, pos)
    -        except IndexError:
    -            # malformed codepoint, blindly use ch
    -            ch = ord(s[pos])
    -            pos += 1
    -            if errorhandler:
    -                r, newindex, rettype = errorhandler(
    -                    errors, public_encoding_name, 'malformed unicode',
    -                    s, index, index+1)
    -                if rettype == 'u' and r:
    -                    for cp in rutf8.Utf8StringIterator(r):
    -                        if cp < 0xD800:
    -                            _STORECHAR32(result, cp, byteorder)
    -                        else:
    -                            errorhandler('strict', public_encoding_name,
    -                                     'malformed unicode',
    -                                 s, index, index+1)
    -                elif r:
    -                    for ch in r:
    -                        cp = ord(ch)
    -                        if cp < 0xD800:
    -                            _STORECHAR32(result, cp, byteorder)
    -                        else:
    -                            errorhandler('strict', public_encoding_name,
    -                                     'malformed unicode',
    -                                 s, index, index+1)
    -                else:
    -                    _STORECHAR32(result, ch, byteorder)
    -            else:
    -                _STORECHAR32(result, ch, byteorder)
    -            index += 1
    -            continue
    +        ch = rutf8.codepoint_at_pos(s, pos)
             if not allow_surrogates and 0xD800 <= ch < 0xE000:
                 r, newindex, rettype = errorhandler(
                     errors, public_encoding_name, 'surrogates not allowed',
    @@ -1509,6 +1447,7 @@
                     index = newindex
                     pos = rutf8._pos_at_index(s, newindex)
                 continue
    +        pos = rutf8.next_codepoint_pos(s, pos)
             _STORECHAR32(result, ch, byteorder)
             index += 1
     
    
    From pypy.commits at gmail.com  Wed Aug 28 15:32:47 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Wed, 28 Aug 2019 12:32:47 -0700 (PDT)
    Subject: [pypy-commit] pypy default: CPython compatibility (bpo-31285)
    Message-ID: <5d66d6df.1c69fb81.9d380.0bfc@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: 
    Changeset: r97325:578667b3fef9
    Date: 2019-08-28 20:32 +0100
    http://bitbucket.org/pypy/pypy/changeset/578667b3fef9/
    
    Log:	CPython compatibility (bpo-31285)
    
    diff --git a/pypy/module/_warnings/interp_warnings.py b/pypy/module/_warnings/interp_warnings.py
    --- a/pypy/module/_warnings/interp_warnings.py
    +++ b/pypy/module/_warnings/interp_warnings.py
    @@ -354,7 +354,7 @@
             return None
     
         # Split the source into lines.
    -    w_source_list = space.call_method(w_source, "splitlines")
    +    w_source_list = space.call_method(space.w_text, "splitlines", w_source)
     
         # Get the source line.
         w_source_line = space.getitem(w_source_list, space.newint(lineno - 1))
    diff --git a/pypy/module/_warnings/test/test_warnings.py b/pypy/module/_warnings/test/test_warnings.py
    --- a/pypy/module/_warnings/test/test_warnings.py
    +++ b/pypy/module/_warnings/test/test_warnings.py
    @@ -89,3 +89,20 @@
                                  u':831: UserWarning: \u1234\u5678\n')
             finally:
                 sys.stderr = old
    +
    +    def test_issue31285(self):
    +        import _warnings
    +        def get_bad_loader(splitlines_ret_val):
    +            class BadLoader:
    +                def get_source(self, fullname):
    +                    class BadSource(str):
    +                        def splitlines(self):
    +                            return splitlines_ret_val
    +                    return BadSource('spam')
    +            return BadLoader()
    +        # does not raise:
    +        _warnings.warn_explicit(
    +            'eggs', UserWarning, 'bar', 1,
    +            module_globals={'__loader__': get_bad_loader(42),
    +                            '__name__': 'foobar'})
    +
    
    From pypy.commits at gmail.com  Thu Aug 29 04:08:10 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Thu, 29 Aug 2019 01:08:10 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Merged in
     lunixbochs/pypy-1/Ryan-Hileman/add-support-for-zipfile-stdlib-1562420744699
     (pull request #648)
    Message-ID: <5d6787ea.1c69fb81.78d4a.b678@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97327:5b42890d48c3
    Date: 2019-08-29 08:07 +0000
    http://bitbucket.org/pypy/pypy/changeset/5b42890d48c3/
    
    Log:	Merged in lunixbochs/pypy-1/Ryan-Hileman/add-support-for-zipfile-
    	stdlib-1562420744699 (pull request #648)
    
    	add support for zipfile stdlib
    
    diff --git a/pypy/module/sys/initpath.py b/pypy/module/sys/initpath.py
    --- a/pypy/module/sys/initpath.py
    +++ b/pypy/module/sys/initpath.py
    @@ -148,10 +148,14 @@
         OSError.
         """
         from pypy.module.sys.version import CPYTHON_VERSION
    -    dirname = '%d' % CPYTHON_VERSION[0]
    -    lib_python = os.path.join(prefix, 'lib-python')
    -    python_std_lib = os.path.join(lib_python, dirname)
    -    _checkdir(python_std_lib)
    +    lib_pyzip = os.path.join(prefix, 'python%d%d.zip' % CPYTHON_VERSION[:2])
    +    if os.path.isfile(lib_pyzip):
    +        python_std_lib = lib_pyzip
    +    else:
    +        dirname = '%d' % CPYTHON_VERSION[0]
    +        lib_python = os.path.join(prefix, 'lib-python')
    +        python_std_lib = os.path.join(lib_python, dirname)
    +        _checkdir(python_std_lib)
     
         lib_pypy = os.path.join(prefix, 'lib_pypy')
         _checkdir(lib_pypy)
    
    From pypy.commits at gmail.com  Thu Aug 29 04:08:20 2019
    From: pypy.commits at gmail.com (lunixbochs)
    Date: Thu, 29 Aug 2019 01:08:20 -0700 (PDT)
    Subject: [pypy-commit] pypy
     Ryan-Hileman/add-support-for-zipfile-stdlib-1562420744699: add support for
     zipfile stdlib
    Message-ID: <5d6787f4.1c69fb81.1061d.f060@mx.google.com>
    
    Author: Ryan Hileman 
    Branch: Ryan-Hileman/add-support-for-zipfile-stdlib-1562420744699
    Changeset: r97326:870bee284470
    Date: 2019-07-06 13:48 +0000
    http://bitbucket.org/pypy/pypy/changeset/870bee284470/
    
    Log:	add support for zipfile stdlib
    
    diff --git a/pypy/module/sys/initpath.py b/pypy/module/sys/initpath.py
    --- a/pypy/module/sys/initpath.py
    +++ b/pypy/module/sys/initpath.py
    @@ -148,10 +148,14 @@
         OSError.
         """
         from pypy.module.sys.version import CPYTHON_VERSION
    -    dirname = '%d' % CPYTHON_VERSION[0]
    -    lib_python = os.path.join(prefix, 'lib-python')
    -    python_std_lib = os.path.join(lib_python, dirname)
    -    _checkdir(python_std_lib)
    +    lib_pyzip = os.path.join(prefix, 'python%d%d.zip' % CPYTHON_VERSION[:2])
    +    if os.path.isfile(lib_pyzip):
    +        python_std_lib = lib_pyzip
    +    else:
    +        dirname = '%d' % CPYTHON_VERSION[0]
    +        lib_python = os.path.join(prefix, 'lib-python')
    +        python_std_lib = os.path.join(lib_python, dirname)
    +        _checkdir(python_std_lib)
     
         lib_pypy = os.path.join(prefix, 'lib_pypy')
         _checkdir(lib_pypy)
    
    From pypy.commits at gmail.com  Thu Aug 29 09:17:57 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 06:17:57 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: hg merge default
    Message-ID: <5d67d085.1c69fb81.1859.8631@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97328:d041869f8bd4
    Date: 2019-08-29 14:16 +0100
    http://bitbucket.org/pypy/pypy/changeset/d041869f8bd4/
    
    Log:	hg merge default
    
    diff --git a/pypy/module/_warnings/interp_warnings.py b/pypy/module/_warnings/interp_warnings.py
    --- a/pypy/module/_warnings/interp_warnings.py
    +++ b/pypy/module/_warnings/interp_warnings.py
    @@ -400,7 +400,7 @@
             return None
     
         # Split the source into lines.
    -    w_source_list = space.call_method(w_source, "splitlines")
    +    w_source_list = space.call_method(space.w_text, "splitlines", w_source)
     
         # Get the source line.
         w_source_line = space.getitem(w_source_list, space.newint(lineno - 1))
    diff --git a/pypy/module/_warnings/test/test_warnings.py b/pypy/module/_warnings/test/test_warnings.py
    --- a/pypy/module/_warnings/test/test_warnings.py
    +++ b/pypy/module/_warnings/test/test_warnings.py
    @@ -107,3 +107,20 @@
                 except UnicodeEncodeError:
                     continue
                 _warnings.warn_explicit("text", UserWarning, filename, 1)
    +
    +    def test_issue31285(self):
    +        import _warnings
    +        def get_bad_loader(splitlines_ret_val):
    +            class BadLoader:
    +                def get_source(self, fullname):
    +                    class BadSource(str):
    +                        def splitlines(self):
    +                            return splitlines_ret_val
    +                    return BadSource('spam')
    +            return BadLoader()
    +        # does not raise:
    +        _warnings.warn_explicit(
    +            'eggs', UserWarning, 'bar', 1,
    +            module_globals={'__loader__': get_bad_loader(42),
    +                            '__name__': 'foobar'})
    +
    
    From pypy.commits at gmail.com  Thu Aug 29 10:15:37 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 07:15:37 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Update Python version numbers
    Message-ID: <5d67de09.1c69fb81.58d92.81e2@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97330:0354a250d371
    Date: 2019-08-29 15:13 +0100
    http://bitbucket.org/pypy/pypy/changeset/0354a250d371/
    
    Log:	Update Python version numbers
    
    diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h
    --- a/pypy/module/cpyext/include/patchlevel.h
    +++ b/pypy/module/cpyext/include/patchlevel.h
    @@ -20,13 +20,13 @@
     
     /* Version parsed out into numeric values */
     #define PY_MAJOR_VERSION	3
    -#define PY_MINOR_VERSION	6
    -#define PY_MICRO_VERSION	9
    +#define PY_MINOR_VERSION	7
    +#define PY_MICRO_VERSION	4
     #define PY_RELEASE_LEVEL	PY_RELEASE_LEVEL_FINAL
     #define PY_RELEASE_SERIAL	0
     
     /* Version as a string */
    -#define PY_VERSION		"3.6.9"
    +#define PY_VERSION		"3.7.4"
     
     /* PyPy version as a string: make sure to keep this in sync with:
      *     module/sys/version.py
    diff --git a/pypy/module/sys/version.py b/pypy/module/sys/version.py
    --- a/pypy/module/sys/version.py
    +++ b/pypy/module/sys/version.py
    @@ -6,7 +6,7 @@
     from pypy.interpreter import gateway
     
     #XXX # the release serial 42 is not in range(16)
    -CPYTHON_VERSION            = (3, 6, 9, "final", 0)
    +CPYTHON_VERSION            = (3, 7, 4, "final", 0)
     #XXX # sync CPYTHON_VERSION with patchlevel.h, package.py
     CPYTHON_API_VERSION        = 1013   #XXX # sync with include/modsupport.h
     
    
    From pypy.commits at gmail.com  Thu Aug 29 12:04:16 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 09:04:16 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Update stdlib to 3.7.4
    Message-ID: <5d67f780.1c69fb81.78d4a.74d5@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97331:c252447e48e9
    Date: 2019-08-29 17:03 +0100
    http://bitbucket.org/pypy/pypy/changeset/c252447e48e9/
    
    Log:	Update stdlib to 3.7.4
    
    diff too long, truncating to 2000 out of 107794 lines
    
    diff --git a/lib-python/3/__future__.py b/lib-python/3/__future__.py
    --- a/lib-python/3/__future__.py
    +++ b/lib-python/3/__future__.py
    @@ -57,13 +57,14 @@
         "unicode_literals",
         "barry_as_FLUFL",
         "generator_stop",
    +    "annotations",
     ]
     
     __all__ = ["all_feature_names"] + all_feature_names
     
    -# The CO_xxx symbols are defined here under the same names used by
    -# compile.h, so that an editor search will find them here.  However,
    -# they're not exported in __all__, because they don't really belong to
    +# The CO_xxx symbols are defined here under the same names defined in
    +# code.h and used by compile.h, so that an editor search will find them here.
    +# However, they're not exported in __all__, because they don't really belong to
     # this module.
     CO_NESTED            = 0x0010   # nested_scopes
     CO_GENERATOR_ALLOWED = 0        # generators (obsolete, was 0x1000)
    @@ -74,6 +75,7 @@
     CO_FUTURE_UNICODE_LITERALS = 0x20000 # unicode string literals
     CO_FUTURE_BARRY_AS_BDFL = 0x40000
     CO_FUTURE_GENERATOR_STOP  = 0x80000 # StopIteration becomes RuntimeError in generators
    +CO_FUTURE_ANNOTATIONS     = 0x100000  # annotations become strings at runtime
     
     class _Feature:
         def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
    @@ -132,9 +134,13 @@
                                 CO_FUTURE_UNICODE_LITERALS)
     
     barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2),
    -                         (3, 9, 0, "alpha", 0),
    -                         CO_FUTURE_BARRY_AS_BDFL)
    +                          (3, 9, 0, "alpha", 0),
    +                          CO_FUTURE_BARRY_AS_BDFL)
     
     generator_stop = _Feature((3, 5, 0, "beta", 1),
    -                         (3, 7, 0, "alpha", 0),
    -                         CO_FUTURE_GENERATOR_STOP)
    +                          (3, 7, 0, "alpha", 0),
    +                          CO_FUTURE_GENERATOR_STOP)
    +
    +annotations = _Feature((3, 7, 0, "beta", 1),
    +                       (4, 0, 0, "alpha", 0),
    +                       CO_FUTURE_ANNOTATIONS)
    diff --git a/lib-python/3/_bootlocale.py b/lib-python/3/_bootlocale.py
    --- a/lib-python/3/_bootlocale.py
    +++ b/lib-python/3/_bootlocale.py
    @@ -9,19 +9,31 @@
     
     if sys.platform.startswith("win"):
         def getpreferredencoding(do_setlocale=True):
    +        if sys.flags.utf8_mode:
    +            return 'UTF-8'
             return _locale._getdefaultlocale()[1]
     else:
         try:
             _locale.CODESET
         except AttributeError:
    -        def getpreferredencoding(do_setlocale=True):
    -            # This path for legacy systems needs the more complex
    -            # getdefaultlocale() function, import the full locale module.
    -            import locale
    -            return locale.getpreferredencoding(do_setlocale)
    +        if hasattr(sys, 'getandroidapilevel'):
    +            # On Android langinfo.h and CODESET are missing, and UTF-8 is
    +            # always used in mbstowcs() and wcstombs().
    +            def getpreferredencoding(do_setlocale=True):
    +                return 'UTF-8'
    +        else:
    +            def getpreferredencoding(do_setlocale=True):
    +                if sys.flags.utf8_mode:
    +                    return 'UTF-8'
    +                # This path for legacy systems needs the more complex
    +                # getdefaultlocale() function, import the full locale module.
    +                import locale
    +                return locale.getpreferredencoding(do_setlocale)
         else:
             def getpreferredencoding(do_setlocale=True):
                 assert not do_setlocale
    +            if sys.flags.utf8_mode:
    +                return 'UTF-8'
                 result = _locale.nl_langinfo(_locale.CODESET)
                 if not result and sys.platform == 'darwin':
                     # nl_langinfo can return an empty string
    diff --git a/lib-python/3/_collections_abc.py b/lib-python/3/_collections_abc.py
    --- a/lib-python/3/_collections_abc.py
    +++ b/lib-python/3/_collections_abc.py
    @@ -589,7 +589,7 @@
             try:
                 value = next(it)
             except StopIteration:
    -            raise KeyError
    +            raise KeyError from None
             self.discard(value)
             return value
     
    @@ -746,7 +746,7 @@
     ItemsView.register(dict_items)
     
     
    -class ValuesView(MappingView):
    +class ValuesView(MappingView, Collection):
     
         __slots__ = ()
     
    @@ -808,7 +808,7 @@
             try:
                 key = next(iter(self))
             except StopIteration:
    -            raise KeyError
    +            raise KeyError from None
             value = self[key]
             del self[key]
             return key, value
    diff --git a/lib-python/3/_dummy_thread.py b/lib-python/3/_dummy_thread.py
    --- a/lib-python/3/_dummy_thread.py
    +++ b/lib-python/3/_dummy_thread.py
    @@ -14,7 +14,7 @@
     # Exports only things specified by thread documentation;
     # skipping obsolete synonyms allocate(), start_new(), exit_thread().
     __all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
    -           'interrupt_main', 'LockType']
    +           'interrupt_main', 'LockType', 'RLock']
     
     # A dummy value
     TIMEOUT_MAX = 2**31
    @@ -69,7 +69,7 @@
         available, it is safe to assume that the current process is the
         only thread.  Thus a constant can be safely returned.
         """
    -    return -1
    +    return 1
     
     def allocate_lock():
         """Dummy implementation of _thread.allocate_lock()."""
    @@ -148,6 +148,36 @@
                 hex(id(self))
             )
     
    +
    +class RLock(LockType):
    +    """Dummy implementation of threading._RLock.
    +
    +    Re-entrant lock can be aquired multiple times and needs to be released
    +    just as many times. This dummy implemention does not check wheter the
    +    current thread actually owns the lock, but does accounting on the call
    +    counts.
    +    """
    +    def __init__(self):
    +        super().__init__()
    +        self._levels = 0
    +
    +    def acquire(self, waitflag=None, timeout=-1):
    +        """Aquire the lock, can be called multiple times in succession.
    +        """
    +        locked = super().acquire(waitflag, timeout)
    +        if locked:
    +            self._levels += 1
    +        return locked
    +
    +    def release(self):
    +        """Release needs to be called once for every call to acquire().
    +        """
    +        if self._levels == 0:
    +            raise error
    +        if self._levels == 1:
    +            super().release()
    +        self._levels -= 1
    +
     # Used to signal that interrupt_main was called in a "thread"
     _interrupt = False
     # True when not executing in a "thread"
    diff --git a/lib-python/3/_py_abc.py b/lib-python/3/_py_abc.py
    new file mode 100644
    --- /dev/null
    +++ b/lib-python/3/_py_abc.py
    @@ -0,0 +1,147 @@
    +from _weakrefset import WeakSet
    +
    +
    +def get_cache_token():
    +    """Returns the current ABC cache token.
    +
    +    The token is an opaque object (supporting equality testing) identifying the
    +    current version of the ABC cache for virtual subclasses. The token changes
    +    with every call to ``register()`` on any ABC.
    +    """
    +    return ABCMeta._abc_invalidation_counter
    +
    +
    +class ABCMeta(type):
    +    """Metaclass for defining Abstract Base Classes (ABCs).
    +
    +    Use this metaclass to create an ABC.  An ABC can be subclassed
    +    directly, and then acts as a mix-in class.  You can also register
    +    unrelated concrete classes (even built-in classes) and unrelated
    +    ABCs as 'virtual subclasses' -- these and their descendants will
    +    be considered subclasses of the registering ABC by the built-in
    +    issubclass() function, but the registering ABC won't show up in
    +    their MRO (Method Resolution Order) nor will method
    +    implementations defined by the registering ABC be callable (not
    +    even via super()).
    +    """
    +
    +    # A global counter that is incremented each time a class is
    +    # registered as a virtual subclass of anything.  It forces the
    +    # negative cache to be cleared before its next use.
    +    # Note: this counter is private. Use `abc.get_cache_token()` for
    +    #       external code.
    +    _abc_invalidation_counter = 0
    +
    +    def __new__(mcls, name, bases, namespace, **kwargs):
    +        cls = super().__new__(mcls, name, bases, namespace, **kwargs)
    +        # Compute set of abstract method names
    +        abstracts = {name
    +                     for name, value in namespace.items()
    +                     if getattr(value, "__isabstractmethod__", False)}
    +        for base in bases:
    +            for name in getattr(base, "__abstractmethods__", set()):
    +                value = getattr(cls, name, None)
    +                if getattr(value, "__isabstractmethod__", False):
    +                    abstracts.add(name)
    +        cls.__abstractmethods__ = frozenset(abstracts)
    +        # Set up inheritance registry
    +        cls._abc_registry = WeakSet()
    +        cls._abc_cache = WeakSet()
    +        cls._abc_negative_cache = WeakSet()
    +        cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
    +        return cls
    +
    +    def register(cls, subclass):
    +        """Register a virtual subclass of an ABC.
    +
    +        Returns the subclass, to allow usage as a class decorator.
    +        """
    +        if not isinstance(subclass, type):
    +            raise TypeError("Can only register classes")
    +        if issubclass(subclass, cls):
    +            return subclass  # Already a subclass
    +        # Subtle: test for cycles *after* testing for "already a subclass";
    +        # this means we allow X.register(X) and interpret it as a no-op.
    +        if issubclass(cls, subclass):
    +            # This would create a cycle, which is bad for the algorithm below
    +            raise RuntimeError("Refusing to create an inheritance cycle")
    +        cls._abc_registry.add(subclass)
    +        ABCMeta._abc_invalidation_counter += 1  # Invalidate negative cache
    +        return subclass
    +
    +    def _dump_registry(cls, file=None):
    +        """Debug helper to print the ABC registry."""
    +        print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file)
    +        print(f"Inv. counter: {get_cache_token()}", file=file)
    +        for name in cls.__dict__:
    +            if name.startswith("_abc_"):
    +                value = getattr(cls, name)
    +                if isinstance(value, WeakSet):
    +                    value = set(value)
    +                print(f"{name}: {value!r}", file=file)
    +
    +    def _abc_registry_clear(cls):
    +        """Clear the registry (for debugging or testing)."""
    +        cls._abc_registry.clear()
    +
    +    def _abc_caches_clear(cls):
    +        """Clear the caches (for debugging or testing)."""
    +        cls._abc_cache.clear()
    +        cls._abc_negative_cache.clear()
    +
    +    def __instancecheck__(cls, instance):
    +        """Override for isinstance(instance, cls)."""
    +        # Inline the cache checking
    +        subclass = instance.__class__
    +        if subclass in cls._abc_cache:
    +            return True
    +        subtype = type(instance)
    +        if subtype is subclass:
    +            if (cls._abc_negative_cache_version ==
    +                ABCMeta._abc_invalidation_counter and
    +                subclass in cls._abc_negative_cache):
    +                return False
    +            # Fall back to the subclass check.
    +            return cls.__subclasscheck__(subclass)
    +        return any(cls.__subclasscheck__(c) for c in (subclass, subtype))
    +
    +    def __subclasscheck__(cls, subclass):
    +        """Override for issubclass(subclass, cls)."""
    +        if not isinstance(subclass, type):
    +            raise TypeError('issubclass() arg 1 must be a class')
    +        # Check cache
    +        if subclass in cls._abc_cache:
    +            return True
    +        # Check negative cache; may have to invalidate
    +        if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
    +            # Invalidate the negative cache
    +            cls._abc_negative_cache = WeakSet()
    +            cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
    +        elif subclass in cls._abc_negative_cache:
    +            return False
    +        # Check the subclass hook
    +        ok = cls.__subclasshook__(subclass)
    +        if ok is not NotImplemented:
    +            assert isinstance(ok, bool)
    +            if ok:
    +                cls._abc_cache.add(subclass)
    +            else:
    +                cls._abc_negative_cache.add(subclass)
    +            return ok
    +        # Check if it's a direct subclass
    +        if cls in getattr(subclass, '__mro__', ()):
    +            cls._abc_cache.add(subclass)
    +            return True
    +        # Check if it's a subclass of a registered class (recursive)
    +        for rcls in cls._abc_registry:
    +            if issubclass(subclass, rcls):
    +                cls._abc_cache.add(subclass)
    +                return True
    +        # Check if it's a subclass of a subclass (recursive)
    +        for scls in cls.__subclasses__():
    +            if issubclass(subclass, scls):
    +                cls._abc_cache.add(subclass)
    +                return True
    +        # No dice; update negative cache
    +        cls._abc_negative_cache.add(subclass)
    +        return False
    diff --git a/lib-python/3/_pydecimal.py b/lib-python/3/_pydecimal.py
    --- a/lib-python/3/_pydecimal.py
    +++ b/lib-python/3/_pydecimal.py
    @@ -431,80 +431,34 @@
     ##### Context Functions ##################################################
     
     # The getcontext() and setcontext() function manage access to a thread-local
    -# current context.  Py2.4 offers direct support for thread locals.  If that
    -# is not available, use threading.current_thread() which is slower but will
    -# work for older Pythons.  If threads are not part of the build, create a
    -# mock threading object with threading.local() returning the module namespace.
    -
    -try:
    -    import threading
    -except ImportError:
    -    # Python was compiled without threads; create a mock object instead
    -    class MockThreading(object):
    -        def local(self, sys=sys):
    -            return sys.modules[__xname__]
    -    threading = MockThreading()
    -    del MockThreading
    -
    -try:
    -    threading.local
    -
    -except AttributeError:
    -
    -    # To fix reloading, force it to create a new context
    -    # Old contexts have different exceptions in their dicts, making problems.
    -    if hasattr(threading.current_thread(), '__decimal_context__'):
    -        del threading.current_thread().__decimal_context__
    -
    -    def setcontext(context):
    -        """Set this thread's context to context."""
    -        if context in (DefaultContext, BasicContext, ExtendedContext):
    -            context = context.copy()
    -            context.clear_flags()
    -        threading.current_thread().__decimal_context__ = context
    -
    -    def getcontext():
    -        """Returns this thread's context.
    -
    -        If this thread does not yet have a context, returns
    -        a new context and sets this thread's context.
    -        New contexts are copies of DefaultContext.
    -        """
    -        try:
    -            return threading.current_thread().__decimal_context__
    -        except AttributeError:
    -            context = Context()
    -            threading.current_thread().__decimal_context__ = context
    -            return context
    -
    -else:
    -
    -    local = threading.local()
    -    if hasattr(local, '__decimal_context__'):
    -        del local.__decimal_context__
    -
    -    def getcontext(_local=local):
    -        """Returns this thread's context.
    -
    -        If this thread does not yet have a context, returns
    -        a new context and sets this thread's context.
    -        New contexts are copies of DefaultContext.
    -        """
    -        try:
    -            return _local.__decimal_context__
    -        except AttributeError:
    -            context = Context()
    -            _local.__decimal_context__ = context
    -            return context
    -
    -    def setcontext(context, _local=local):
    -        """Set this thread's context to context."""
    -        if context in (DefaultContext, BasicContext, ExtendedContext):
    -            context = context.copy()
    -            context.clear_flags()
    -        _local.__decimal_context__ = context
    -
    -    del threading, local        # Don't contaminate the namespace
    +# current context.
    +
    +import contextvars
    +
    +_current_context_var = contextvars.ContextVar('decimal_context')
    +
    +def getcontext():
    +    """Returns this thread's context.
    +
    +    If this thread does not yet have a context, returns
    +    a new context and sets this thread's context.
    +    New contexts are copies of DefaultContext.
    +    """
    +    try:
    +        return _current_context_var.get()
    +    except LookupError:
    +        context = Context()
    +        _current_context_var.set(context)
    +        return context
    +
    +def setcontext(context):
    +    """Set this thread's context to context."""
    +    if context in (DefaultContext, BasicContext, ExtendedContext):
    +        context = context.copy()
    +        context.clear_flags()
    +    _current_context_var.set(context)
    +
    +del contextvars        # Don't contaminate the namespace
     
     def localcontext(ctx=None):
         """Return a context manager for a copy of the supplied context
    @@ -734,18 +688,23 @@
     
             """
             if isinstance(f, int):                # handle integer inputs
    -            return cls(f)
    -        if not isinstance(f, float):
    +            sign = 0 if f >= 0 else 1
    +            k = 0
    +            coeff = str(abs(f))
    +        elif isinstance(f, float):
    +            if _math.isinf(f) or _math.isnan(f):
    +                return cls(repr(f))
    +            if _math.copysign(1.0, f) == 1.0:
    +                sign = 0
    +            else:
    +                sign = 1
    +            n, d = abs(f).as_integer_ratio()
    +            k = d.bit_length() - 1
    +            coeff = str(n*5**k)
    +        else:
                 raise TypeError("argument must be int or float.")
    -        if _math.isinf(f) or _math.isnan(f):
    -            return cls(repr(f))
    -        if _math.copysign(1.0, f) == 1.0:
    -            sign = 0
    -        else:
    -            sign = 1
    -        n, d = abs(f).as_integer_ratio()
    -        k = d.bit_length() - 1
    -        result = _dec_from_triple(sign, str(n*5**k), -k)
    +
    +        result = _dec_from_triple(sign, coeff, -k)
             if cls is Decimal:
                 return result
             else:
    @@ -1669,13 +1628,13 @@
     
         __trunc__ = __int__
     
    +    @property
         def real(self):
             return self
    -    real = property(real)
    -
    +
    +    @property
         def imag(self):
             return Decimal(0)
    -    imag = property(imag)
     
         def conjugate(self):
             return self
    diff --git a/lib-python/3/_pyio.py b/lib-python/3/_pyio.py
    --- a/lib-python/3/_pyio.py
    +++ b/lib-python/3/_pyio.py
    @@ -9,10 +9,7 @@
     import stat
     import sys
     # Import _thread instead of threading to reduce startup cost
    -try:
    -    from _thread import allocate_lock as Lock
    -except ImportError:
    -    from _dummy_thread import allocate_lock as Lock
    +from _thread import allocate_lock as Lock
     if sys.platform in {'win32', 'cygwin'}:
         from msvcrt import setmode as _setmode
     else:
    @@ -290,16 +287,15 @@
         derived classes can override selectively; the default implementations
         represent a file that cannot be read, written or seeked.
     
    -    Even though IOBase does not declare read, readinto, or write because
    +    Even though IOBase does not declare read or write because
         their signatures will vary, implementations and clients should
         consider those methods part of the interface. Also, implementations
         may raise UnsupportedOperation when operations they do not support are
         called.
     
         The basic type used for binary data read from or written to a file is
    -    bytes. Other bytes-like objects are accepted as method arguments too. In
    -    some cases (such as readinto), a writable object is required. Text I/O
    -    classes work with str data.
    +    bytes. Other bytes-like objects are accepted as method arguments too.
    +    Text I/O classes work with str data.
     
         Note that calling any method (even inquiries) on a closed stream is
         undefined. Implementations may raise OSError in this case.
    @@ -504,8 +500,13 @@
                     return 1
             if size is None:
                 size = -1
    -        elif not isinstance(size, int):
    -            raise TypeError("size must be an integer")
    +        else:
    +            try:
    +                size_index = size.__index__
    +            except AttributeError:
    +                raise TypeError(f"{size!r} is not an integer")
    +            else:
    +                size = size_index()
             res = bytearray()
             while size < 0 or len(res) < size:
                 b = self.read(nreadahead())
    @@ -545,6 +546,11 @@
             return lines
     
         def writelines(self, lines):
    +        """Write a list of lines to the stream.
    +
    +        Line separators are not added, so it is usual for each of the lines
    +        provided to have a line separator at the end.
    +        """
             self._checkClosed()
             for line in lines:
                 self.write(line)
    @@ -635,7 +641,7 @@
         implementation, but wrap one.
         """
     
    -    def read(self, size=None):
    +    def read(self, size=-1):
             """Read and return up to size bytes, where size is an int.
     
             If the argument is omitted, None, or negative, reads and
    @@ -655,7 +661,7 @@
             """
             self._unsupported("read")
     
    -    def read1(self, size=None):
    +    def read1(self, size=-1):
             """Read up to size bytes with at most one read() system call,
             where size is an int.
             """
    @@ -766,7 +772,7 @@
     
         def flush(self):
             if self.closed:
    -            raise ValueError("flush of closed file")
    +            raise ValueError("flush on closed file")
             self.raw.flush()
     
         def close(self):
    @@ -833,6 +839,10 @@
     
         """Buffered I/O implementation using an in-memory bytes buffer."""
     
    +    # Initialize _buffer as soon as possible since it's used by __del__()
    +    # which calls close()
    +    _buffer = None
    +
         def __init__(self, initial_bytes=None):
             buf = bytearray()
             if initial_bytes is not None:
    @@ -860,14 +870,22 @@
             return memoryview(self._buffer)
     
         def close(self):
    -        self._buffer.clear()
    +        if self._buffer is not None:
    +            self._buffer.clear()
             super().close()
     
    -    def read(self, size=None):
    +    def read(self, size=-1):
             if self.closed:
                 raise ValueError("read from closed file")
             if size is None:
                 size = -1
    +        else:
    +            try:
    +                size_index = size.__index__
    +            except AttributeError:
    +                raise TypeError(f"{size!r} is not an integer")
    +            else:
    +                size = size_index()
             if size < 0:
                 size = len(self._buffer)
             if len(self._buffer) <= self._pos:
    @@ -877,7 +895,7 @@
             self._pos = newpos
             return bytes(b)
     
    -    def read1(self, size):
    +    def read1(self, size=-1):
             """This is the same as read.
             """
             return self.read(size)
    @@ -905,9 +923,11 @@
             if self.closed:
                 raise ValueError("seek on closed file")
             try:
    -            pos.__index__
    -        except AttributeError as err:
    -            raise TypeError("an integer is required") from err
    +            pos_index = pos.__index__
    +        except AttributeError:
    +            raise TypeError(f"{pos!r} is not an integer")
    +        else:
    +            pos = pos_index()
             if whence == 0:
                 if pos < 0:
                     raise ValueError("negative seek position %r" % (pos,))
    @@ -932,9 +952,11 @@
                 pos = self._pos
             else:
                 try:
    -                pos.__index__
    -            except AttributeError as err:
    -                raise TypeError("an integer is required") from err
    +                pos_index = pos.__index__
    +            except AttributeError:
    +                raise TypeError(f"{pos!r} is not an integer")
    +            else:
    +                pos = pos_index()
                 if pos < 0:
                     raise ValueError("negative truncate position %r" % (pos,))
             del self._buffer[pos:]
    @@ -1073,12 +1095,12 @@
                     self._read_pos = 0
             return self._read_buf[self._read_pos:]
     
    -    def read1(self, size):
    +    def read1(self, size=-1):
             """Reads up to size bytes, with at most one read() system call."""
             # Returns up to size bytes.  If at least one byte is buffered, we
             # only return buffered bytes.  Otherwise, we do one raw read.
             if size < 0:
    -            raise ValueError("number of bytes to read must be positive")
    +            size = self.buffer_size
             if size == 0:
                 return b""
             with self._read_lock:
    @@ -1174,11 +1196,11 @@
             return self.raw.writable()
     
         def write(self, b):
    -        if self.closed:
    -            raise ValueError("write to closed file")
             if isinstance(b, str):
                 raise TypeError("can't write str to binary stream")
             with self._write_lock:
    +            if self.closed:
    +                raise ValueError("write to closed file")
                 # XXX we can implement some more tricks to try and avoid
                 # partial writes
                 if len(self._write_buf) > self.buffer_size:
    @@ -1214,7 +1236,7 @@
     
         def _flush_unlocked(self):
             if self.closed:
    -            raise ValueError("flush of closed file")
    +            raise ValueError("flush on closed file")
             while self._write_buf:
                 try:
                     n = self.raw.write(self._write_buf)
    @@ -1239,6 +1261,21 @@
                 self._flush_unlocked()
                 return _BufferedIOMixin.seek(self, pos, whence)
     
    +    def close(self):
    +        with self._write_lock:
    +            if self.raw is None or self.closed:
    +                return
    +        # We have to release the lock and call self.flush() (which will
    +        # probably just re-take the lock) in case flush has been overridden in
    +        # a subclass or the user set self.flush to something. This is the same
    +        # behavior as the C implementation.
    +        try:
    +            # may raise BlockingIOError or BrokenPipeError etc
    +            self.flush()
    +        finally:
    +            with self._write_lock:
    +                self.raw.close()
    +
     
     class BufferedRWPair(BufferedIOBase):
     
    @@ -1270,7 +1307,7 @@
             self.reader = BufferedReader(reader, buffer_size)
             self.writer = BufferedWriter(writer, buffer_size)
     
    -    def read(self, size=None):
    +    def read(self, size=-1):
             if size is None:
                 size = -1
             return self.reader.read(size)
    @@ -1284,7 +1321,7 @@
         def peek(self, size=0):
             return self.reader.peek(size)
     
    -    def read1(self, size):
    +    def read1(self, size=-1):
             return self.reader.read1(size)
     
         def readinto1(self, b):
    @@ -1370,7 +1407,7 @@
             self.flush()
             return BufferedReader.peek(self, size)
     
    -    def read1(self, size):
    +    def read1(self, size=-1):
             self.flush()
             return BufferedReader.read1(self, size)
     
    @@ -1731,8 +1768,7 @@
         """Base class for text I/O.
     
         This class provides a character and line based interface to stream
    -    I/O. There is no readinto method because Python's character strings
    -    are immutable. There is no public constructor.
    +    I/O. There is no public constructor.
         """
     
         def read(self, size=-1):
    @@ -1905,15 +1941,16 @@
     
         _CHUNK_SIZE = 2048
     
    +    # Initialize _buffer as soon as possible since it's used by __del__()
    +    # which calls close()
    +    _buffer = None
    +
         # The write_through argument has no effect here since this
         # implementation always writes through.  The argument is present only
         # so that the signature can match the signature of the C version.
         def __init__(self, buffer, encoding=None, errors=None, newline=None,
                      line_buffering=False, write_through=False):
    -        if newline is not None and not isinstance(newline, str):
    -            raise TypeError("illegal newline type: %r" % (type(newline),))
    -        if newline not in (None, "", "\n", "\r", "\r\n"):
    -            raise ValueError("illegal newline value: %r" % (newline,))
    +        self._check_newline(newline)
             if encoding is None:
                 try:
                     encoding = os.device_encoding(buffer.fileno())
    @@ -1943,23 +1980,38 @@
                     raise ValueError("invalid errors: %r" % errors)
     
             self._buffer = buffer
    -        self._line_buffering = line_buffering
    +        self._decoded_chars = ''  # buffer for text returned from decoder
    +        self._decoded_chars_used = 0  # offset into _decoded_chars for read()
    +        self._snapshot = None  # info for reconstructing decoder state
    +        self._seekable = self._telling = self.buffer.seekable()
    +        self._has_read1 = hasattr(self.buffer, 'read1')
    +        self._configure(encoding, errors, newline,
    +                        line_buffering, write_through)
    +
    +    def _check_newline(self, newline):
    +        if newline is not None and not isinstance(newline, str):
    +            raise TypeError("illegal newline type: %r" % (type(newline),))
    +        if newline not in (None, "", "\n", "\r", "\r\n"):
    +            raise ValueError("illegal newline value: %r" % (newline,))
    +
    +    def _configure(self, encoding=None, errors=None, newline=None,
    +                   line_buffering=False, write_through=False):
             self._encoding = encoding
             self._errors = errors
    +        self._encoder = None
    +        self._decoder = None
    +        self._b2cratio = 0.0
    +
             self._readuniversal = not newline
             self._readtranslate = newline is None
             self._readnl = newline
             self._writetranslate = newline != ''
             self._writenl = newline or os.linesep
    -        self._encoder = None
    -        self._decoder = None
    -        self._decoded_chars = ''  # buffer for text returned from decoder
    -        self._decoded_chars_used = 0  # offset into _decoded_chars for read()
    -        self._snapshot = None  # info for reconstructing decoder state
    -        self._seekable = self._telling = self.buffer.seekable()
    -        self._has_read1 = hasattr(self.buffer, 'read1')
    -        self._b2cratio = 0.0
    -
    +
    +        self._line_buffering = line_buffering
    +        self._write_through = write_through
    +
    +        # don't write a BOM in the middle of a file
             if self._seekable and self.writable():
                 position = self.buffer.tell()
                 if position != 0:
    @@ -2008,9 +2060,54 @@
             return self._line_buffering
     
         @property
    +    def write_through(self):
    +        return self._write_through
    +
    +    @property
         def buffer(self):
             return self._buffer
     
    +    def reconfigure(self, *,
    +                    encoding=None, errors=None, newline=Ellipsis,
    +                    line_buffering=None, write_through=None):
    +        """Reconfigure the text stream with new parameters.
    +
    +        This also flushes the stream.
    +        """
    +        if (self._decoder is not None
    +                and (encoding is not None or errors is not None
    +                     or newline is not Ellipsis)):
    +            raise UnsupportedOperation(
    +                "It is not possible to set the encoding or newline of stream "
    +                "after the first read")
    +
    +        if errors is None:
    +            if encoding is None:
    +                errors = self._errors
    +            else:
    +                errors = 'strict'
    +        elif not isinstance(errors, str):
    +            raise TypeError("invalid errors: %r" % errors)
    +
    +        if encoding is None:
    +            encoding = self._encoding
    +        else:
    +            if not isinstance(encoding, str):
    +                raise TypeError("invalid encoding: %r" % encoding)
    +
    +        if newline is Ellipsis:
    +            newline = self._readnl
    +        self._check_newline(newline)
    +
    +        if line_buffering is None:
    +            line_buffering = self.line_buffering
    +        if write_through is None:
    +            write_through = self.write_through
    +
    +        self.flush()
    +        self._configure(encoding, errors, newline,
    +                        line_buffering, write_through)
    +
         def seekable(self):
             if self.closed:
                 raise ValueError("I/O operation on closed file.")
    @@ -2358,11 +2455,14 @@
             self._checkReadable()
             if size is None:
                 size = -1
    +        else:
    +            try:
    +                size_index = size.__index__
    +            except AttributeError:
    +                raise TypeError(f"{size!r} is not an integer")
    +            else:
    +                size = size_index()
             decoder = self._decoder or self._get_decoder()
    -        try:
    -            size.__index__
    -        except AttributeError as err:
    -            raise TypeError("an integer is required") from err
             if size < 0:
                 # Read everything.
                 result = (self._get_decoded_chars() +
    @@ -2393,8 +2493,13 @@
                 raise ValueError("read from closed file")
             if size is None:
                 size = -1
    -        elif not isinstance(size, int):
    -            raise TypeError("size must be an integer")
    +        else:
    +            try:
    +                size_index = size.__index__
    +            except AttributeError:
    +                raise TypeError(f"{size!r} is not an integer")
    +            else:
    +                size = size_index()
     
             # Grab all the decoded text (we will rewind any extra bits later).
             line = self._get_decoded_chars()
    diff --git a/lib-python/3/_strptime.py b/lib-python/3/_strptime.py
    --- a/lib-python/3/_strptime.py
    +++ b/lib-python/3/_strptime.py
    @@ -19,10 +19,7 @@
     from datetime import (date as datetime_date,
                           timedelta as datetime_timedelta,
                           timezone as datetime_timezone)
    -try:
    -    from _thread import allocate_lock as _thread_allocate_lock
    -except ImportError:
    -    from _dummy_thread import allocate_lock as _thread_allocate_lock
    +from _thread import allocate_lock as _thread_allocate_lock
     
     __all__ = []
     
    @@ -213,7 +210,7 @@
                 #XXX: Does 'Y' need to worry about having less or more than
                 #     4 digits?
                 'Y': r"(?P\d\d\d\d)",
    -            'z': r"(?P[+-]\d\d[0-5]\d)",
    +            'z': r"(?P[+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?|Z)",
                 'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
                 'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
                 'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
    @@ -368,7 +365,8 @@
         month = day = 1
         hour = minute = second = fraction = 0
         tz = -1
    -    tzoffset = None
    +    gmtoff = None
    +    gmtoff_fraction = 0
         # Default to -1 to signify that values not known; not critical to have,
         # though
         iso_week = week_of_year = None
    @@ -458,9 +456,27 @@
                 iso_week = int(found_dict['V'])
             elif group_key == 'z':
                 z = found_dict['z']
    -            tzoffset = int(z[1:3]) * 60 + int(z[3:5])
    -            if z.startswith("-"):
    -                tzoffset = -tzoffset
    +            if z == 'Z':
    +                gmtoff = 0
    +            else:
    +                if z[3] == ':':
    +                    z = z[:3] + z[4:]
    +                    if len(z) > 5:
    +                        if z[5] != ':':
    +                            msg = f"Inconsistent use of : in {found_dict['z']}"
    +                            raise ValueError(msg)
    +                        z = z[:5] + z[6:]
    +                hours = int(z[1:3])
    +                minutes = int(z[3:5])
    +                seconds = int(z[5:7] or 0)
    +                gmtoff = (hours * 60 * 60) + (minutes * 60) + seconds
    +                gmtoff_remainder = z[8:]
    +                # Pad to always return microseconds.
    +                gmtoff_remainder_padding = "0" * (6 - len(gmtoff_remainder))
    +                gmtoff_fraction = int(gmtoff_remainder + gmtoff_remainder_padding)
    +                if z.startswith("-"):
    +                    gmtoff = -gmtoff
    +                    gmtoff_fraction = -gmtoff_fraction
             elif group_key == 'Z':
                 # Since -1 is default value only need to worry about setting tz if
                 # it can be something other than -1.
    @@ -538,10 +554,6 @@
             weekday = datetime_date(year, month, day).weekday()
         # Add timezone info
         tzname = found_dict.get("Z")
    -    if tzoffset is not None:
    -        gmtoff = tzoffset * 60
    -    else:
    -        gmtoff = None
     
         if leap_year_fix:
             # the caller didn't supply a year but asked for Feb 29th. We couldn't
    @@ -551,7 +563,7 @@
     
         return (year, month, day,
                 hour, minute, second,
    -            weekday, julian, tz, tzname, gmtoff), fraction
    +            weekday, julian, tz, tzname, gmtoff), fraction, gmtoff_fraction
     
     def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
         """Return a time struct based on the input string and the
    @@ -562,11 +574,11 @@
     def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
         """Return a class cls instance based on the input string and the
         format string."""
    -    tt, fraction = _strptime(data_string, format)
    +    tt, fraction, gmtoff_fraction = _strptime(data_string, format)
         tzname, gmtoff = tt[-2:]
         args = tt[:6] + (fraction,)
         if gmtoff is not None:
    -        tzdelta = datetime_timedelta(seconds=gmtoff)
    +        tzdelta = datetime_timedelta(seconds=gmtoff, microseconds=gmtoff_fraction)
             if tzname:
                 tz = datetime_timezone(tzdelta, tzname)
             else:
    diff --git a/lib-python/3/_weakrefset.py b/lib-python/3/_weakrefset.py
    --- a/lib-python/3/_weakrefset.py
    +++ b/lib-python/3/_weakrefset.py
    @@ -105,7 +105,7 @@
                 try:
                     itemref = self.data.pop()
                 except KeyError:
    -                raise KeyError('pop from empty WeakSet')
    +                raise KeyError('pop from empty WeakSet') from None
                 item = itemref()
                 if item is not None:
                     return item
    @@ -164,19 +164,19 @@
         __le__ = issubset
     
         def __lt__(self, other):
    -        return self.data < set(ref(item) for item in other)
    +        return self.data < set(map(ref, other))
     
         def issuperset(self, other):
             return self.data.issuperset(ref(item) for item in other)
         __ge__ = issuperset
     
         def __gt__(self, other):
    -        return self.data > set(ref(item) for item in other)
    +        return self.data > set(map(ref, other))
     
         def __eq__(self, other):
             if not isinstance(other, self.__class__):
                 return NotImplemented
    -        return self.data == set(ref(item) for item in other)
    +        return self.data == set(map(ref, other))
     
         def symmetric_difference(self, other):
             newset = self.copy()
    diff --git a/lib-python/3/abc.py b/lib-python/3/abc.py
    --- a/lib-python/3/abc.py
    +++ b/lib-python/3/abc.py
    @@ -3,8 +3,6 @@
     
     """Abstract Base Classes (ABCs) according to PEP 3119."""
     
    -from _weakrefset import WeakSet
    -
     
     def abstractmethod(funcobj):
         """A decorator indicating abstract methods.
    @@ -27,8 +25,7 @@
     
     
     class abstractclassmethod(classmethod):
    -    """
    -    A decorator indicating abstract classmethods.
    +    """A decorator indicating abstract classmethods.
     
         Similar to abstractmethod.
     
    @@ -51,8 +48,7 @@
     
     
     class abstractstaticmethod(staticmethod):
    -    """
    -    A decorator indicating abstract staticmethods.
    +    """A decorator indicating abstract staticmethods.
     
         Similar to abstractmethod.
     
    @@ -75,8 +71,7 @@
     
     
     class abstractproperty(property):
    -    """
    -    A decorator indicating abstract properties.
    +    """A decorator indicating abstract properties.
     
         Requires that the metaclass is ABCMeta or derived from it.  A
         class that has a metaclass derived from ABCMeta cannot be
    @@ -106,145 +101,70 @@
         __isabstractmethod__ = True
     
     
    -class ABCMeta(type):
    +try:
    +    from _abc import (get_cache_token, _abc_init, _abc_register,
    +                      _abc_instancecheck, _abc_subclasscheck, _get_dump,
    +                      _reset_registry, _reset_caches)
    +except ImportError:
    +    from _py_abc import ABCMeta, get_cache_token
    +    ABCMeta.__module__ = 'abc'
    +else:
    +    class ABCMeta(type):
    +        """Metaclass for defining Abstract Base Classes (ABCs).
     
    -    """Metaclass for defining Abstract Base Classes (ABCs).
    +        Use this metaclass to create an ABC.  An ABC can be subclassed
    +        directly, and then acts as a mix-in class.  You can also register
    +        unrelated concrete classes (even built-in classes) and unrelated
    +        ABCs as 'virtual subclasses' -- these and their descendants will
    +        be considered subclasses of the registering ABC by the built-in
    +        issubclass() function, but the registering ABC won't show up in
    +        their MRO (Method Resolution Order) nor will method
    +        implementations defined by the registering ABC be callable (not
    +        even via super()).
    +        """
    +        def __new__(mcls, name, bases, namespace, **kwargs):
    +            cls = super().__new__(mcls, name, bases, namespace, **kwargs)
    +            _abc_init(cls)
    +            return cls
     
    -    Use this metaclass to create an ABC.  An ABC can be subclassed
    -    directly, and then acts as a mix-in class.  You can also register
    -    unrelated concrete classes (even built-in classes) and unrelated
    -    ABCs as 'virtual subclasses' -- these and their descendants will
    -    be considered subclasses of the registering ABC by the built-in
    -    issubclass() function, but the registering ABC won't show up in
    -    their MRO (Method Resolution Order) nor will method
    -    implementations defined by the registering ABC be callable (not
    -    even via super()).
    +        def register(cls, subclass):
    +            """Register a virtual subclass of an ABC.
     
    -    """
    +            Returns the subclass, to allow usage as a class decorator.
    +            """
    +            return _abc_register(cls, subclass)
     
    -    # A global counter that is incremented each time a class is
    -    # registered as a virtual subclass of anything.  It forces the
    -    # negative cache to be cleared before its next use.
    -    # Note: this counter is private. Use `abc.get_cache_token()` for
    -    #       external code.
    -    _abc_invalidation_counter = 0
    +        def __instancecheck__(cls, instance):
    +            """Override for isinstance(instance, cls)."""
    +            return _abc_instancecheck(cls, instance)
     
    -    def __new__(mcls, name, bases, namespace, **kwargs):
    -        cls = super().__new__(mcls, name, bases, namespace, **kwargs)
    -        # Compute set of abstract method names
    -        abstracts = {name
    -                     for name, value in namespace.items()
    -                     if getattr(value, "__isabstractmethod__", False)}
    -        for base in bases:
    -            for name in getattr(base, "__abstractmethods__", set()):
    -                value = getattr(cls, name, None)
    -                if getattr(value, "__isabstractmethod__", False):
    -                    abstracts.add(name)
    -        cls.__abstractmethods__ = frozenset(abstracts)
    -        # Set up inheritance registry
    -        cls._abc_registry = WeakSet()
    -        cls._abc_cache = WeakSet()
    -        cls._abc_negative_cache = WeakSet()
    -        cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
    -        return cls
    +        def __subclasscheck__(cls, subclass):
    +            """Override for issubclass(subclass, cls)."""
    +            return _abc_subclasscheck(cls, subclass)
     
    -    def register(cls, subclass):
    -        """Register a virtual subclass of an ABC.
    +        def _dump_registry(cls, file=None):
    +            """Debug helper to print the ABC registry."""
    +            print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file)
    +            print(f"Inv. counter: {get_cache_token()}", file=file)
    +            (_abc_registry, _abc_cache, _abc_negative_cache,
    +             _abc_negative_cache_version) = _get_dump(cls)
    +            print(f"_abc_registry: {_abc_registry!r}", file=file)
    +            print(f"_abc_cache: {_abc_cache!r}", file=file)
    +            print(f"_abc_negative_cache: {_abc_negative_cache!r}", file=file)
    +            print(f"_abc_negative_cache_version: {_abc_negative_cache_version!r}",
    +                  file=file)
     
    -        Returns the subclass, to allow usage as a class decorator.
    -        """
    -        if not isinstance(subclass, type):
    -            raise TypeError("Can only register classes")
    -        if issubclass(subclass, cls):
    -            return subclass  # Already a subclass
    -        # Subtle: test for cycles *after* testing for "already a subclass";
    -        # this means we allow X.register(X) and interpret it as a no-op.
    -        if issubclass(cls, subclass):
    -            # This would create a cycle, which is bad for the algorithm below
    -            raise RuntimeError("Refusing to create an inheritance cycle")
    -        cls._abc_registry.add(subclass)
    -        ABCMeta._abc_invalidation_counter += 1  # Invalidate negative cache
    -        return subclass
    +        def _abc_registry_clear(cls):
    +            """Clear the registry (for debugging or testing)."""
    +            _reset_registry(cls)
     
    -    def _dump_registry(cls, file=None):
    -        """Debug helper to print the ABC registry."""
    -        print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file)
    -        print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file)
    -        for name in sorted(cls.__dict__):
    -            if name.startswith("_abc_"):
    -                value = getattr(cls, name)
    -                if isinstance(value, WeakSet):
    -                    value = set(value)
    -                print("%s: %r" % (name, value), file=file)
    -
    -    def __instancecheck__(cls, instance):
    -        """Override for isinstance(instance, cls)."""
    -        # Inline the cache checking
    -        subclass = instance.__class__
    -        if subclass in cls._abc_cache:
    -            return True
    -        subtype = type(instance)
    -        if subtype is subclass:
    -            if (cls._abc_negative_cache_version ==
    -                ABCMeta._abc_invalidation_counter and
    -                subclass in cls._abc_negative_cache):
    -                return False
    -            # Fall back to the subclass check.
    -            return cls.__subclasscheck__(subclass)
    -        return any(cls.__subclasscheck__(c) for c in {subclass, subtype})
    -
    -    def __subclasscheck__(cls, subclass):
    -        """Override for issubclass(subclass, cls)."""
    -        # Check cache
    -        if subclass in cls._abc_cache:
    -            return True
    -        # Check negative cache; may have to invalidate
    -        if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
    -            # Invalidate the negative cache
    -            cls._abc_negative_cache = WeakSet()
    -            cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
    -        elif subclass in cls._abc_negative_cache:
    -            return False
    -        # Check the subclass hook
    -        ok = cls.__subclasshook__(subclass)
    -        if ok is not NotImplemented:
    -            assert isinstance(ok, bool)
    -            if ok:
    -                cls._abc_cache.add(subclass)
    -            else:
    -                cls._abc_negative_cache.add(subclass)
    -            return ok
    -        # Check if it's a direct subclass
    -        if cls in getattr(subclass, '__mro__', ()):
    -            cls._abc_cache.add(subclass)
    -            return True
    -        # Check if it's a subclass of a registered class (recursive)
    -        for rcls in cls._abc_registry:
    -            if issubclass(subclass, rcls):
    -                cls._abc_cache.add(subclass)
    -                return True
    -        # Check if it's a subclass of a subclass (recursive)
    -        for scls in cls.__subclasses__():
    -            if issubclass(subclass, scls):
    -                cls._abc_cache.add(subclass)
    -                return True
    -        # No dice; update negative cache
    -        cls._abc_negative_cache.add(subclass)
    -        return False
    +        def _abc_caches_clear(cls):
    +            """Clear the caches (for debugging or testing)."""
    +            _reset_caches(cls)
     
     
     class ABC(metaclass=ABCMeta):
         """Helper class that provides a standard way to create an ABC using
         inheritance.
         """
    -    pass
    -
    -
    -def get_cache_token():
    -    """Returns the current ABC cache token.
    -
    -    The token is an opaque object (supporting equality testing) identifying the
    -    current version of the ABC cache for virtual subclasses. The token changes
    -    with every call to ``register()`` on any ABC.
    -    """
    -    return ABCMeta._abc_invalidation_counter
    +    __slots__ = ()
    diff --git a/lib-python/3/aifc.py b/lib-python/3/aifc.py
    --- a/lib-python/3/aifc.py
    +++ b/lib-python/3/aifc.py
    @@ -149,25 +149,25 @@
         try:
             return struct.unpack('>l', file.read(4))[0]
         except struct.error:
    -        raise EOFError
    +        raise EOFError from None
     
     def _read_ulong(file):
         try:
             return struct.unpack('>L', file.read(4))[0]
         except struct.error:
    -        raise EOFError
    +        raise EOFError from None
     
     def _read_short(file):
         try:
             return struct.unpack('>h', file.read(2))[0]
         except struct.error:
    -        raise EOFError
    +        raise EOFError from None
     
     def _read_ushort(file):
         try:
             return struct.unpack('>H', file.read(2))[0]
         except struct.error:
    -        raise EOFError
    +        raise EOFError from None
     
     def _read_string(file):
         length = ord(file.read(1))
    @@ -467,6 +467,10 @@
             self._nframes = _read_long(chunk)
             self._sampwidth = (_read_short(chunk) + 7) // 8
             self._framerate = int(_read_float(chunk))
    +        if self._sampwidth <= 0:
    +            raise Error('bad sample width')
    +        if self._nchannels <= 0:
    +            raise Error('bad # of channels')
             self._framesize = self._nchannels * self._sampwidth
             if self._aifc:
                 #DEBUG: SGI's soundeditor produces a bad size :-(
    @@ -916,7 +920,10 @@
         else:
             raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
     
    -openfp = open # B/W compatibility
    +def openfp(f, mode=None):
    +    warnings.warn("aifc.openfp is deprecated since Python 3.7. "
    +                  "Use aifc.open instead.", DeprecationWarning, stacklevel=2)
    +    return open(f, mode=mode)
     
     if __name__ == '__main__':
         import sys
    diff --git a/lib-python/3/argparse.py b/lib-python/3/argparse.py
    --- a/lib-python/3/argparse.py
    +++ b/lib-python/3/argparse.py
    @@ -83,16 +83,12 @@
     ]
     
     
    -import collections as _collections
    -import copy as _copy
     import os as _os
     import re as _re
     import sys as _sys
    -import textwrap as _textwrap
     
     from gettext import gettext as _, ngettext
     
    -
     SUPPRESS = '==SUPPRESS=='
     
     OPTIONAL = '?'
    @@ -137,10 +133,16 @@
             return []
     
     
    -def _ensure_value(namespace, name, value):
    -    if getattr(namespace, name, None) is None:
    -        setattr(namespace, name, value)
    -    return getattr(namespace, name)
    +def _copy_items(items):
    +    if items is None:
    +        return []
    +    # The copy module is used only in the 'append' and 'append_const'
    +    # actions, and it is needed only when the default value isn't a list.
    +    # Delay its import for speeding up the common case.
    +    if type(items) is list:
    +        return items[:]
    +    import copy
    +    return copy.copy(items)
     
     
     # ===============
    @@ -591,6 +593,8 @@
                 result = '...'
             elif action.nargs == PARSER:
                 result = '%s ...' % get_metavar(1)
    +        elif action.nargs == SUPPRESS:
    +            result = ''
             else:
                 formats = ['%s' for _ in range(action.nargs)]
                 result = ' '.join(formats) % get_metavar(action.nargs)
    @@ -621,12 +625,17 @@
     
         def _split_lines(self, text, width):
             text = self._whitespace_matcher.sub(' ', text).strip()
    -        return _textwrap.wrap(text, width)
    +        # The textwrap module is used only for formatting help.
    +        # Delay its import for speeding up the common usage of argparse.
    +        import textwrap
    +        return textwrap.wrap(text, width)
     
         def _fill_text(self, text, width, indent):
             text = self._whitespace_matcher.sub(' ', text).strip()
    -        return _textwrap.fill(text, width, initial_indent=indent,
    -                                           subsequent_indent=indent)
    +        import textwrap
    +        return textwrap.fill(text, width,
    +                             initial_indent=indent,
    +                             subsequent_indent=indent)
     
         def _get_help_string(self, action):
             return action.help
    @@ -954,7 +963,8 @@
                 metavar=metavar)
     
         def __call__(self, parser, namespace, values, option_string=None):
    -        items = _copy.copy(_ensure_value(namespace, self.dest, []))
    +        items = getattr(namespace, self.dest, None)
    +        items = _copy_items(items)
             items.append(values)
             setattr(namespace, self.dest, items)
     
    @@ -980,7 +990,8 @@
                 metavar=metavar)
     
         def __call__(self, parser, namespace, values, option_string=None):
    -        items = _copy.copy(_ensure_value(namespace, self.dest, []))
    +        items = getattr(namespace, self.dest, None)
    +        items = _copy_items(items)
             items.append(self.const)
             setattr(namespace, self.dest, items)
     
    @@ -1002,8 +1013,10 @@
                 help=help)
     
         def __call__(self, parser, namespace, values, option_string=None):
    -        new_count = _ensure_value(namespace, self.dest, 0) + 1
    -        setattr(namespace, self.dest, new_count)
    +        count = getattr(namespace, self.dest, None)
    +        if count is None:
    +            count = 0
    +        setattr(namespace, self.dest, count + 1)
     
     
     class _HelpAction(Action):
    @@ -1068,12 +1081,13 @@
                      prog,
                      parser_class,
                      dest=SUPPRESS,
    +                 required=False,
                      help=None,
                      metavar=None):
     
             self._prog_prefix = prog
             self._parser_class = parser_class
    -        self._name_parser_map = _collections.OrderedDict()
    +        self._name_parser_map = {}
             self._choices_actions = []
     
             super(_SubParsersAction, self).__init__(
    @@ -1081,6 +1095,7 @@
                 dest=dest,
                 nargs=PARSER,
                 choices=self._name_parser_map,
    +            required=required,
                 help=help,
                 metavar=metavar)
     
    @@ -2228,6 +2243,10 @@
             elif nargs == PARSER:
                 nargs_pattern = '(-*A[-AO]*)'
     
    +        # suppress action, like nargs=0
    +        elif nargs == SUPPRESS:
    +            nargs_pattern = '(-*-*)'
    +
             # all others should be integers
             else:
                 nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
    @@ -2241,6 +2260,91 @@
             return nargs_pattern
     
         # ========================
    +    # Alt command line argument parsing, allowing free intermix
    +    # ========================
    +
    +    def parse_intermixed_args(self, args=None, namespace=None):
    +        args, argv = self.parse_known_intermixed_args(args, namespace)
    +        if argv:
    +            msg = _('unrecognized arguments: %s')
    +            self.error(msg % ' '.join(argv))
    +        return args
    +
    +    def parse_known_intermixed_args(self, args=None, namespace=None):
    +        # returns a namespace and list of extras
    +        #
    +        # positional can be freely intermixed with optionals.  optionals are
    +        # first parsed with all positional arguments deactivated.  The 'extras'
    +        # are then parsed.  If the parser definition is incompatible with the
    +        # intermixed assumptions (e.g. use of REMAINDER, subparsers) a
    +        # TypeError is raised.
    +        #
    +        # positionals are 'deactivated' by setting nargs and default to
    +        # SUPPRESS.  This blocks the addition of that positional to the
    +        # namespace
    +
    +        positionals = self._get_positional_actions()
    +        a = [action for action in positionals
    +             if action.nargs in [PARSER, REMAINDER]]
    +        if a:
    +            raise TypeError('parse_intermixed_args: positional arg'
    +                            ' with nargs=%s'%a[0].nargs)
    +
    +        if [action.dest for group in self._mutually_exclusive_groups
    +            for action in group._group_actions if action in positionals]:
    +            raise TypeError('parse_intermixed_args: positional in'
    +                            ' mutuallyExclusiveGroup')
    +
    +        try:
    +            save_usage = self.usage
    +            try:
    +                if self.usage is None:
    +                    # capture the full usage for use in error messages
    +                    self.usage = self.format_usage()[7:]
    +                for action in positionals:
    +                    # deactivate positionals
    +                    action.save_nargs = action.nargs
    +                    # action.nargs = 0
    +                    action.nargs = SUPPRESS
    +                    action.save_default = action.default
    +                    action.default = SUPPRESS
    +                namespace, remaining_args = self.parse_known_args(args,
    +                                                                  namespace)
    +                for action in positionals:
    +                    # remove the empty positional values from namespace
    +                    if (hasattr(namespace, action.dest)
    +                            and getattr(namespace, action.dest)==[]):
    +                        from warnings import warn
    +                        warn('Do not expect %s in %s' % (action.dest, namespace))
    +                        delattr(namespace, action.dest)
    +            finally:
    +                # restore nargs and usage before exiting
    +                for action in positionals:
    +                    action.nargs = action.save_nargs
    +                    action.default = action.save_default
    +            optionals = self._get_optional_actions()
    +            try:
    +                # parse positionals.  optionals aren't normally required, but
    +                # they could be, so make sure they aren't.
    +                for action in optionals:
    +                    action.save_required = action.required
    +                    action.required = False
    +                for group in self._mutually_exclusive_groups:
    +                    group.save_required = group.required
    +                    group.required = False
    +                namespace, extras = self.parse_known_args(remaining_args,
    +                                                          namespace)
    +            finally:
    +                # restore parser values before exiting
    +                for action in optionals:
    +                    action.required = action.save_required
    +                for group in self._mutually_exclusive_groups:
    +                    group.required = group.save_required
    +        finally:
    +            self.usage = save_usage
    +        return namespace, extras
    +
    +    # ========================
         # Value conversion methods
         # ========================
         def _get_values(self, action, arg_strings):
    @@ -2286,6 +2390,10 @@
                 value = [self._get_value(action, v) for v in arg_strings]
                 self._check_value(action, value[0])
     
    +        # SUPPRESS argument does not put anything in the namespace
    +        elif action.nargs == SUPPRESS:
    +            value = SUPPRESS
    +
             # all other types of nargs produce a list
             else:
                 value = [self._get_value(action, v) for v in arg_strings]
    diff --git a/lib-python/3/ast.py b/lib-python/3/ast.py
    --- a/lib-python/3/ast.py
    +++ b/lib-python/3/ast.py
    @@ -35,8 +35,6 @@
         return compile(source, filename, mode, PyCF_ONLY_AST)
     
     
    -_NUM_TYPES = (int, float, complex)
    -
     def literal_eval(node_or_string):
         """
         Safely evaluate an expression node or a string containing a Python
    @@ -48,6 +46,21 @@
             node_or_string = parse(node_or_string, mode='eval')
         if isinstance(node_or_string, Expression):
             node_or_string = node_or_string.body
    +    def _convert_num(node):
    +        if isinstance(node, Constant):
    +            if isinstance(node.value, (int, float, complex)):
    +                return node.value
    +        elif isinstance(node, Num):
    +            return node.n
    +        raise ValueError('malformed node or string: ' + repr(node))
    +    def _convert_signed_num(node):
    +        if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)):
    +            operand = _convert_num(node.operand)
    +            if isinstance(node.op, UAdd):
    +                return + operand
    +            else:
    +                return - operand
    +        return _convert_num(node)
         def _convert(node):
             if isinstance(node, Constant):
                 return node.value
    @@ -62,26 +75,19 @@
             elif isinstance(node, Set):
                 return set(map(_convert, node.elts))
             elif isinstance(node, Dict):
    -            return dict((_convert(k), _convert(v)) for k, v
    -                        in zip(node.keys, node.values))
    +            return dict(zip(map(_convert, node.keys),
    +                            map(_convert, node.values)))
             elif isinstance(node, NameConstant):
                 return node.value
    -        elif isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)):
    -            operand = _convert(node.operand)
    -            if isinstance(operand, _NUM_TYPES):
    -                if isinstance(node.op, UAdd):
    -                    return + operand
    -                else:
    -                    return - operand
             elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)):
    -            left = _convert(node.left)
    -            right = _convert(node.right)
    -            if isinstance(left, _NUM_TYPES) and isinstance(right, _NUM_TYPES):
    +            left = _convert_signed_num(node.left)
    +            right = _convert_num(node.right)
    +            if isinstance(left, (int, float)) and isinstance(right, complex):
                     if isinstance(node.op, Add):
                         return left + right
                     else:
                         return left - right
    -        raise ValueError('malformed node or string: ' + repr(node))
    +        return _convert_signed_num(node)
         return _convert(node_or_string)
     
     
    @@ -194,18 +200,21 @@
         Return the docstring for the given node or None if no docstring can
         be found.  If the node provided does not have docstrings a TypeError
         will be raised.
    +
    +    If *clean* is `True`, all tabs are expanded to spaces and any whitespace
    +    that can be uniformly removed from the second line onwards is removed.
         """
         if not isinstance(node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)):
             raise TypeError("%r can't have docstrings" % node.__class__.__name__)
         if not(node.body and isinstance(node.body[0], Expr)):
    -        return
    +        return None
         node = node.body[0].value
         if isinstance(node, Str):
             text = node.s
         elif isinstance(node, Constant) and isinstance(node.value, str):
             text = node.value
         else:
    -        return
    +        return None
         if clean:
             import inspect
             text = inspect.cleandoc(text)
    diff --git a/lib-python/3/asyncio/__init__.py b/lib-python/3/asyncio/__init__.py
    --- a/lib-python/3/asyncio/__init__.py
    +++ b/lib-python/3/asyncio/__init__.py
    @@ -1,22 +1,9 @@
     """The asyncio package, tracking PEP 3156."""
     
    +# flake8: noqa
    +
     import sys
     
    -# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
    -# Do this first, so the other submodules can use "from . import selectors".
    -# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
    -try:
    -    from . import selectors
    -except ImportError:
    -    import selectors  # Will also be exported.
    -
    -if sys.platform == 'win32':
    -    # Similar thing for _overlapped.
    -    try:
    -        from . import _overlapped
    -    except ImportError:
    -        import _overlapped  # Will also be exported.
    -
     # This relies on each of the submodules having an __all__ variable.
     from .base_events import *
     from .coroutines import *
    @@ -24,18 +11,24 @@
     from .futures import *
     from .locks import *
     from .protocols import *
    +from .runners import *
     from .queues import *
     from .streams import *
     from .subprocess import *
     from .tasks import *
     from .transports import *
     
    +# Exposed for _asynciomodule.c to implement now deprecated
    +# Task.all_tasks() method.  This function will be removed in 3.9.
    +from .tasks import _all_tasks_compat  # NoQA
    +
     __all__ = (base_events.__all__ +
                coroutines.__all__ +
                events.__all__ +
                futures.__all__ +
                locks.__all__ +
                protocols.__all__ +
    +           runners.__all__ +
                queues.__all__ +
                streams.__all__ +
                subprocess.__all__ +
    diff --git a/lib-python/3/asyncio/base_events.py b/lib-python/3/asyncio/base_events.py
    --- a/lib-python/3/asyncio/base_events.py
    +++ b/lib-python/3/asyncio/base_events.py
    @@ -14,9 +14,9 @@
     """
     
     import collections
    +import collections.abc
     import concurrent.futures
     import heapq
    -import inspect
     import itertools
     import logging
     import os
    @@ -29,16 +29,23 @@
     import warnings
     import weakref
     
    -from . import compat
    +try:
    +    import ssl
    +except ImportError:  # pragma: no cover
    +    ssl = None
    +
    +from . import constants
     from . import coroutines
     from . import events
     from . import futures
    +from . import protocols
    +from . import sslproto
     from . import tasks
    -from .coroutines import coroutine
    +from . import transports
     from .log import logger
     
     
    -__all__ = ['BaseEventLoop']
    +__all__ = 'BaseEventLoop',
     
     
     # Minimum number of _scheduled timer handles before cleanup of
    @@ -49,11 +56,6 @@
     # before cleanup of cancelled handles is performed.
     _MIN_CANCELLED_TIMER_HANDLES_FRACTION = 0.5
     
    -# Exceptions which must not call the exception handler in fatal error
    -# methods (_fatal_error())
    -_FATAL_ERROR_IGNORE = (BrokenPipeError,
    -                       ConnectionResetError, ConnectionAbortedError)
    -
     _HAS_IPv6 = hasattr(socket, 'AF_INET6')
     
     # Maximum timeout passed to select to avoid OS limitations
    @@ -89,27 +91,7 @@
                                  'SO_REUSEPORT defined but not implemented.')
     
     
    -def _is_stream_socket(sock_type):
    -    if hasattr(socket, 'SOCK_NONBLOCK'):
    -        # Linux's socket.type is a bitmask that can include extra info
    -        # about socket (like SOCK_NONBLOCK bit), therefore we can't do simple
    -        # `sock_type == socket.SOCK_STREAM`, see
    -        # https://github.com/torvalds/linux/blob/v4.13/include/linux/net.h#L77
    -        # for more details.
    -        return (sock_type & 0xF) == socket.SOCK_STREAM
    -    else:
    -        return sock_type == socket.SOCK_STREAM
    -
    -
    -def _is_dgram_socket(sock_type):
    -    if hasattr(socket, 'SOCK_NONBLOCK'):
    -        # See the comment in `_is_stream_socket`.
    -        return (sock_type & 0xF) == socket.SOCK_DGRAM
    -    else:
    -        return sock_type == socket.SOCK_DGRAM
    -
    -
    -def _ipaddr_info(host, port, family, type, proto):
    +def _ipaddr_info(host, port, family, type, proto, flowinfo=0, scopeid=0):
         # Try to skip getaddrinfo if "host" is already an IP. Users might have
         # handled name resolution in their own code and pass in resolved IPs.
         if not hasattr(socket, 'inet_pton'):
    @@ -119,9 +101,9 @@
                 host is None:
             return None
     
    -    if _is_stream_socket(type):
    +    if type == socket.SOCK_STREAM:
             proto = socket.IPPROTO_TCP
    -    elif _is_dgram_socket(type):
    +    elif type == socket.SOCK_DGRAM:
             proto = socket.IPPROTO_UDP
         else:
             return None
    @@ -158,7 +140,7 @@
                 socket.inet_pton(af, host)
                 # The host has already been resolved.
                 if _HAS_IPv6 and af == socket.AF_INET6:
    -                return af, type, proto, '', (host, port, 0, 0)
    +                return af, type, proto, '', (host, port, flowinfo, scopeid)
                 else:
                     return af, type, proto, '', (host, port)
             except OSError:
    @@ -168,24 +150,20 @@
         return None
     
     
    -def _ensure_resolved(address, *, family=0, type=socket.SOCK_STREAM, proto=0,
    -                     flags=0, loop):
    -    host, port = address[:2]
    -    info = _ipaddr_info(host, port, family, type, proto)
    -    if info is not None:
    -        # "host" is already a resolved IP.
    -        fut = loop.create_future()
    -        fut.set_result([info])
    -        return fut
    -    else:
    -        return loop.getaddrinfo(host, port, family=family, type=type,
    -                                proto=proto, flags=flags)
    +def _run_until_complete_cb(fut):
    +    if not fut.cancelled():
    +        exc = fut.exception()
    +        if isinstance(exc, BaseException) and not isinstance(exc, Exception):
    +            # Issue #22429: run_forever() already finished, no need to
    +            # stop it.
    +            return
    +    futures._get_loop(fut).stop()
     
     
     if hasattr(socket, 'TCP_NODELAY'):
         def _set_nodelay(sock):
             if (sock.family in {socket.AF_INET, socket.AF_INET6} and
    -                _is_stream_socket(sock.type) and
    +                sock.type == socket.SOCK_STREAM and
                     sock.proto == socket.IPPROTO_TCP):
                 sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
     else:
    @@ -193,45 +171,100 @@
             pass
     
     
    -def _run_until_complete_cb(fut):
    -    exc = fut._exception
    -    if (isinstance(exc, BaseException)
    -    and not isinstance(exc, Exception)):
    -        # Issue #22429: run_forever() already finished, no need to
    -        # stop it.
    -        return
    -    fut._loop.stop()
    +class _SendfileFallbackProtocol(protocols.Protocol):
    +    def __init__(self, transp):
    +        if not isinstance(transp, transports._FlowControlMixin):
    +            raise TypeError("transport should be _FlowControlMixin instance")
    +        self._transport = transp
    +        self._proto = transp.get_protocol()
    +        self._should_resume_reading = transp.is_reading()
    +        self._should_resume_writing = transp._protocol_paused
    +        transp.pause_reading()
    +        transp.set_protocol(self)
    +        if self._should_resume_writing:
    +            self._write_ready_fut = self._transport._loop.create_future()
    +        else:
    +            self._write_ready_fut = None
    +
    +    async def drain(self):
    +        if self._transport.is_closing():
    +            raise ConnectionError("Connection closed by peer")
    +        fut = self._write_ready_fut
    +        if fut is None:
    +            return
    +        await fut
    +
    +    def connection_made(self, transport):
    +        raise RuntimeError("Invalid state: "
    +                           "connection should have been established already.")
    +
    +    def connection_lost(self, exc):
    +        if self._write_ready_fut is not None:
    +            # Never happens if peer disconnects after sending the whole content
    +            # Thus disconnection is always an exception from user perspective
    +            if exc is None:
    +                self._write_ready_fut.set_exception(
    +                    ConnectionError("Connection is closed by peer"))
    +            else:
    +                self._write_ready_fut.set_exception(exc)
    +        self._proto.connection_lost(exc)
    +
    +    def pause_writing(self):
    +        if self._write_ready_fut is not None:
    +            return
    +        self._write_ready_fut = self._transport._loop.create_future()
    +
    +    def resume_writing(self):
    +        if self._write_ready_fut is None:
    +            return
    +        self._write_ready_fut.set_result(False)
    +        self._write_ready_fut = None
    +
    +    def data_received(self, data):
    +        raise RuntimeError("Invalid state: reading should be paused")
    +
    +    def eof_received(self):
    +        raise RuntimeError("Invalid state: reading should be paused")
    +
    +    async def restore(self):
    +        self._transport.set_protocol(self._proto)
    +        if self._should_resume_reading:
    +            self._transport.resume_reading()
    +        if self._write_ready_fut is not None:
    +            # Cancel the future.
    +            # Basically it has no effect because protocol is switched back,
    +            # no code should wait for it anymore.
    +            self._write_ready_fut.cancel()
    +        if self._should_resume_writing:
    +            self._proto.resume_writing()
     
     
     class Server(events.AbstractServer):
     
    -    def __init__(self, loop, sockets):
    +    def __init__(self, loop, sockets, protocol_factory, ssl_context, backlog,
    +                 ssl_handshake_timeout):
             self._loop = loop
    -        self.sockets = sockets
    +        self._sockets = sockets
             self._active_count = 0
             self._waiters = []
    +        self._protocol_factory = protocol_factory
    +        self._backlog = backlog
    +        self._ssl_context = ssl_context
    +        self._ssl_handshake_timeout = ssl_handshake_timeout
    +        self._serving = False
    +        self._serving_forever_fut = None
     
         def __repr__(self):
    -        return '<%s sockets=%r>' % (self.__class__.__name__, self.sockets)
    +        return f'<{self.__class__.__name__} sockets={self.sockets!r}>'
     
         def _attach(self):
    -        assert self.sockets is not None
    +        assert self._sockets is not None
             self._active_count += 1
     
         def _detach(self):
             assert self._active_count > 0
             self._active_count -= 1
    -        if self._active_count == 0 and self.sockets is None:
    -            self._wakeup()
    -
    -    def close(self):
    -        sockets = self.sockets
    -        if sockets is None:
    -            return
    -        self.sockets = None
    -        for sock in sockets:
    -            self._loop._stop_serving(sock)
    -        if self._active_count == 0:
    +        if self._active_count == 0 and self._sockets is None:
                 self._wakeup()
     
         def _wakeup(self):
    @@ -241,13 +274,80 @@
                 if not waiter.done():
                     waiter.set_result(waiter)
     
    -    @coroutine
    -    def wait_closed(self):
    -        if self.sockets is None or self._waiters is None:
    +    def _start_serving(self):
    +        if self._serving:
    +            return
    
    From pypy.commits at gmail.com  Thu Aug 29 12:53:12 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 09:53:12 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Make pypy/tool/dis3.py valid Python2
    Message-ID: <5d6802f8.1c69fb81.9d108.972d@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97332:2d29adb1a8b7
    Date: 2019-08-29 17:24 +0100
    http://bitbucket.org/pypy/pypy/changeset/2d29adb1a8b7/
    
    Log:	Make pypy/tool/dis3.py valid Python2
    
    diff --git a/pypy/tool/dis3.py b/pypy/tool/dis3.py
    --- a/pypy/tool/dis3.py
    +++ b/pypy/tool/dis3.py
    @@ -84,7 +84,7 @@
             try:
                 tb = sys.last_traceback
             except AttributeError:
    -            raise RuntimeError("no last traceback to disassemble") from None
    +            raise RuntimeError("no last traceback to disassemble")
             while tb.tb_next: tb = tb.tb_next
         disassemble(tb.tb_frame.f_code, tb.tb_lasti, file=file)
     
    
    From pypy.commits at gmail.com  Thu Aug 29 12:53:13 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 09:53:13 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Use pypy3.6 opcodes for now
    Message-ID: <5d6802f9.1c69fb81.f4c2d.3686@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97333:a0c09c770864
    Date: 2019-08-29 17:51 +0100
    http://bitbucket.org/pypy/pypy/changeset/a0c09c770864/
    
    Log:	Use pypy3.6 opcodes for now
    
    diff --git a/lib-python/3/opcode.py b/lib-python/3/opcode.py
    --- a/lib-python/3/opcode.py
    +++ b/lib-python/3/opcode.py
    @@ -171,6 +171,7 @@
     haslocal.append(125)
     def_op('DELETE_FAST', 126)      # Local variable number
     haslocal.append(126)
    +name_op('STORE_ANNOTATION', 127) # Index in name list XXX: removed in CPython 3.7
     
     def_op('RAISE_VARARGS', 130)    # Number of raise arguments (1, 2, or 3)
     def_op('CALL_FUNCTION', 131)    # #args + (#kwargs << 8)
    @@ -218,6 +219,9 @@
     def_op('BUILD_CONST_KEY_MAP', 156)
     def_op('BUILD_STRING', 157)   # in CPython 3.6, but available in PyPy from 3.5
     
    +#name_op('LOAD_METHOD', 160)
    +#def_op('CALL_METHOD', 161)
    +
     # pypy modification, experimental bytecode
     def_op('LOOKUP_METHOD', 201)          # Index in name list
     hasname.append(201)
    @@ -225,7 +229,4 @@
     def_op('BUILD_LIST_FROM_ARG', 203)
     def_op('LOAD_REVDB_VAR', 205)         # reverse debugger (syntax example: $5)
     
    -name_op('LOAD_METHOD', 160)
    -def_op('CALL_METHOD', 161)
    -
     del def_op, name_op, jrel_op, jabs_op
    
    From pypy.commits at gmail.com  Thu Aug 29 12:53:15 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 09:53:15 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: _thread is required by importlib now
    Message-ID: <5d6802fb.1c69fb81.80af6.0d8a@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97334:db91b0c1a492
    Date: 2019-08-29 17:52 +0100
    http://bitbucket.org/pypy/pypy/changeset/db91b0c1a492/
    
    Log:	_thread is required by importlib now
    
    diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
    --- a/pypy/config/pypyoption.py
    +++ b/pypy/config/pypyoption.py
    @@ -17,6 +17,7 @@
     essential_modules = set([
         "exceptions", "_io", "sys", "builtins", "posix", "_warnings",
         "itertools", "_frozen_importlib", "operator", "_locale", "struct",
    +    "thread",
     ])
     if sys.platform == "win32":
         essential_modules.add("_winreg")
    @@ -36,7 +37,7 @@
         "_socket", "unicodedata", "mmap", "fcntl", "pwd",
         "select", "zipimport", "_lsprof", "signal", "_rawffi", "termios",
         "zlib", "bz2", "_md5", "_minimal_curses",
    -    "thread", "itertools", "pyexpat", "cpyext", "array",
    +    "itertools", "pyexpat", "cpyext", "array",
         "binascii", "_multiprocessing", '_warnings', "_collections",
         "_multibytecodec", "_continuation", "_cffi_backend",
         "_csv", "_pypyjson", "_posixsubprocess", "_cppyy", # "micronumpy",
    
    From pypy.commits at gmail.com  Thu Aug 29 13:56:47 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 10:56:47 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: importlib._bootstrap._install() was split
     into 2 funcs due to PEP432/587 (bpo-22257)
    Message-ID: <5d6811df.1c69fb81.51c08.005e@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97335:a10d1ef767e8
    Date: 2019-08-29 18:56 +0100
    http://bitbucket.org/pypy/pypy/changeset/a10d1ef767e8/
    
    Log:	importlib._bootstrap._install() was split into 2 funcs due to
    	PEP432/587 (bpo-22257)
    
    diff --git a/pypy/module/_frozen_importlib/moduledef.py b/pypy/module/_frozen_importlib/moduledef.py
    --- a/pypy/module/_frozen_importlib/moduledef.py
    +++ b/pypy/module/_frozen_importlib/moduledef.py
    @@ -94,3 +94,6 @@
             space.call_function(w_install,
                                 space.getbuiltinmodule('sys'),
                                 space.getbuiltinmodule('_imp'))
    +        w_install_external = self.getdictvalue(
    +            space, '_install_external_importers')
    +        space.call_function(w_install_external)
    
    From pypy.commits at gmail.com  Thu Aug 29 13:57:53 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Thu, 29 Aug 2019 10:57:53 -0700 (PDT)
    Subject: [pypy-commit] pypy default: Don't use range() in a function with
     @no_collect
    Message-ID: <5d681221.1c69fb81.51c08.00ea@mx.google.com>
    
    Author: Armin Rigo 
    Branch: 
    Changeset: r97336:178d36cf8694
    Date: 2019-08-29 19:55 +0200
    http://bitbucket.org/pypy/pypy/changeset/178d36cf8694/
    
    Log:	Don't use range() in a function with @no_collect
    
    diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py
    --- a/rpython/rlib/rsiphash.py
    +++ b/rpython/rlib/rsiphash.py
    @@ -170,10 +170,12 @@
             # unicode strings where CPython uses 2 bytes per character.
             addr = rstr._get_raw_buf_unicode(rstr.UNICODE, ll_s, 0)
             SZ = rffi.sizeof(rstr.UNICODE.chars.OF)
    -        for i in range(length):
    +        i = 0
    +        while i < length:
                 if ord(ll_s.chars[i]) > 0xFF:
                     length *= SZ
                     break
    +            i += 1
             else:
                 x = _siphash24(addr, length, SZ)
                 keepalive_until_here(ll_s)
    
    From pypy.commits at gmail.com  Thu Aug 29 14:00:36 2019
    From: pypy.commits at gmail.com (mattip)
    Date: Thu, 29 Aug 2019 11:00:36 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: merge default into py3.6
    Message-ID: <5d6812c4.1c69fb81.ee88.3fd0@mx.google.com>
    
    Author: Matti Picus 
    Branch: py3.6
    Changeset: r97337:cfb972269866
    Date: 2019-08-29 20:59 +0300
    http://bitbucket.org/pypy/pypy/changeset/cfb972269866/
    
    Log:	merge default into py3.6
    
    diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py
    --- a/rpython/rlib/rsiphash.py
    +++ b/rpython/rlib/rsiphash.py
    @@ -170,10 +170,12 @@
             # unicode strings where CPython uses 2 bytes per character.
             addr = rstr._get_raw_buf_unicode(rstr.UNICODE, ll_s, 0)
             SZ = rffi.sizeof(rstr.UNICODE.chars.OF)
    -        for i in range(length):
    +        i = 0
    +        while i < length:
                 if ord(ll_s.chars[i]) > 0xFF:
                     length *= SZ
                     break
    +            i += 1
             else:
                 x = _siphash24(addr, length, SZ)
                 keepalive_until_here(ll_s)
    
    From pypy.commits at gmail.com  Thu Aug 29 19:19:56 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 16:19:56 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Add new sys.flags attributes
    Message-ID: <5d685d9c.1c69fb81.db406.c071@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97338:bcb6578cf796
    Date: 2019-08-30 00:19 +0100
    http://bitbucket.org/pypy/pypy/changeset/bcb6578cf796/
    
    Log:	Add new sys.flags attributes
    
    diff --git a/pypy/module/sys/app.py b/pypy/module/sys/app.py
    --- a/pypy/module/sys/app.py
    +++ b/pypy/module/sys/app.py
    @@ -56,7 +56,7 @@
     If it is another kind of object, it will be printed and the system
     exit status will be one (i.e., failure)."""
         # note that we cannot simply use SystemExit(exitcode) here.
    -    # in the default branch, we use "raise SystemExit, exitcode", 
    +    # in the default branch, we use "raise SystemExit, exitcode",
         # which leads to an extra de-tupelizing
         # in normalize_exception, which is exactly like CPython's.
         if isinstance(exitcode, tuple):
    @@ -106,8 +106,10 @@
         quiet = structseqfield(10)
         hash_randomization = structseqfield(11)
         isolated = structseqfield(12)
    +    dev_mode = structseqfield(13)
    +    utf8_mode = structseqfield(14)
     
    -null_sysflags = sysflags((0,)*13)
    +null_sysflags = sysflags((0,)*15)
     null__xoptions = {}
     
     
    
    From pypy.commits at gmail.com  Thu Aug 29 20:56:43 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Thu, 29 Aug 2019 17:56:43 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Partially revert stdlib sre to 3.6 version
    Message-ID: <5d68744b.1c69fb81.ea017.97f9@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97339:c1aa5a9acd68
    Date: 2019-08-30 01:55 +0100
    http://bitbucket.org/pypy/pypy/changeset/c1aa5a9acd68/
    
    Log:	Partially revert stdlib sre to 3.6 version
    
    diff --git a/lib-python/3/sre_compile.py b/lib-python/3/sre_compile.py
    --- a/lib-python/3/sre_compile.py
    +++ b/lib-python/3/sre_compile.py
    @@ -20,7 +20,6 @@
     _REPEATING_CODES = {REPEAT, MIN_REPEAT, MAX_REPEAT}
     _SUCCESS_CODES = {SUCCESS, FAILURE}
     _ASSERT_CODES = {ASSERT, ASSERT_NOT}
    -_UNIT_CODES = _LITERAL_CODES | {ANY, IN}
     
     # Sets of lowercase characters which have the same uppercase.
     _equivalences = (
    @@ -62,12 +61,6 @@
     _ignorecase_fixes = {i: tuple(j for j in t if i != j)
                          for t in _equivalences for i in t}
     
    -def _combine_flags(flags, add_flags, del_flags,
    -                   TYPE_FLAGS=sre_parse.TYPE_FLAGS):
    -    if add_flags & TYPE_FLAGS:
    -        flags &= ~TYPE_FLAGS
    -    return (flags | add_flags) & ~del_flags
    -
     def _compile(code, pattern, flags):
         # internal: compile a (sub)pattern
         emit = code.append
    @@ -76,38 +69,19 @@
         REPEATING_CODES = _REPEATING_CODES
         SUCCESS_CODES = _SUCCESS_CODES
         ASSERT_CODES = _ASSERT_CODES
    -    iscased = None
    -    tolower = None
    -    fixes = None
    -    if flags & SRE_FLAG_IGNORECASE and not flags & SRE_FLAG_LOCALE:
    -        if flags & SRE_FLAG_UNICODE and not flags & SRE_FLAG_ASCII:
    -            iscased = _sre.unicode_iscased
    -            tolower = _sre.unicode_tolower
    -            fixes = _ignorecase_fixes
    -        else:
    -            iscased = _sre.ascii_iscased
    -            tolower = _sre.ascii_tolower
    +    if (flags & SRE_FLAG_IGNORECASE and
    +            not (flags & SRE_FLAG_LOCALE) and
    +            flags & SRE_FLAG_UNICODE and
    +            not (flags & SRE_FLAG_ASCII)):
    +        fixes = _ignorecase_fixes
    +    else:
    +        fixes = None
         for op, av in pattern:
             if op in LITERAL_CODES:
    -            if not flags & SRE_FLAG_IGNORECASE:
    -                emit(op)
    -                emit(av)
    -            elif flags & SRE_FLAG_LOCALE:
    -                emit(OP_LOCALE_IGNORE[op])
    -                emit(av)
    -            elif not iscased(av):
    -                emit(op)
    -                emit(av)
    -            else:
    -                lo = tolower(av)
    -                if not fixes:  # ascii
    -                    emit(OP_IGNORE[op])
    -                    emit(lo)
    -                elif lo not in fixes:
    -                    emit(OP_UNICODE_IGNORE[op])
    -                    emit(lo)
    -                else:
    -                    emit(IN_UNI_IGNORE)
    +            if flags & SRE_FLAG_IGNORECASE:
    +                lo = _sre.getlower(av, flags)
    +                if fixes and lo in fixes:
    +                    emit(IN_IGNORE)
                         skip = _len(code); emit(0)
                         if op is NOT_LITERAL:
                             emit(NEGATE)
    @@ -116,18 +90,22 @@
                             emit(k)
                         emit(FAILURE)
                         code[skip] = _len(code) - skip
    +                else:
    +                    emit(OP_IGNORE[op])
    +                    emit(lo)
    +            else:
    +                emit(op)
    +                emit(av)
             elif op is IN:
    -            charset, hascased = _optimize_charset(av, iscased, tolower, fixes)
    -            if flags & SRE_FLAG_IGNORECASE and flags & SRE_FLAG_LOCALE:
    -                emit(IN_LOC_IGNORE)
    -            elif not hascased:
    -                emit(IN)
    -            elif not fixes:  # ascii
    -                emit(IN_IGNORE)
    +            if flags & SRE_FLAG_IGNORECASE:
    +                emit(OP_IGNORE[op])
    +                def fixup(literal, flags=flags):
    +                    return _sre.getlower(literal, flags)
                 else:
    -                emit(IN_UNI_IGNORE)
    +                emit(op)
    +                fixup = None
                 skip = _len(code); emit(0)
    -            _compile_charset(charset, flags, code)
    +            _compile_charset(av, flags, code, fixup, fixes)
                 code[skip] = _len(code) - skip
             elif op is ANY:
                 if flags & SRE_FLAG_DOTALL:
    @@ -137,7 +115,7 @@
             elif op in REPEATING_CODES:
                 if flags & SRE_FLAG_TEMPLATE:
                     raise error("internal: unsupported template operator %r" % (op,))
    -            if _simple(av[2]):
    +            elif _simple(av) and op is not REPEAT:
                     if op is MAX_REPEAT:
                         emit(REPEAT_ONE)
                     else:
    @@ -164,8 +142,8 @@
                 if group:
                     emit(MARK)
                     emit((group-1)*2)
    -            # _compile_info(code, p, _combine_flags(flags, add_flags, del_flags))
    -            _compile(code, p, _combine_flags(flags, add_flags, del_flags))
    +            # _compile_info(code, p, (flags | add_flags) & ~del_flags)
    +            _compile(code, p, (flags | add_flags) & ~del_flags)
                 if group:
                     emit(MARK)
                     emit((group-1)*2+1)
    @@ -221,14 +199,10 @@
                     av = CH_UNICODE[av]
                 emit(av)
             elif op is GROUPREF:
    -            if not flags & SRE_FLAG_IGNORECASE:
    +            if flags & SRE_FLAG_IGNORECASE:
    +                emit(OP_IGNORE[op])
    +            else:
                     emit(op)
    -            elif flags & SRE_FLAG_LOCALE:
    -                emit(GROUPREF_LOC_IGNORE)
    -            elif not fixes:  # ascii
    -                emit(GROUPREF_IGNORE)
    -            else:
    -                emit(GROUPREF_UNI_IGNORE)
                 emit(av-1)
             elif op is GROUPREF_EXISTS:
                 emit(op)
    @@ -246,16 +220,16 @@
             else:
                 raise error("internal: unsupported operand type %r" % (op,))
     
    -def _compile_charset(charset, flags, code):
    +def _compile_charset(charset, flags, code, fixup=None, fixes=None):
         # compile charset subprogram
         emit = code.append
    -    for op, av in charset:
    +    for op, av in _optimize_charset(charset, fixup, fixes):
             emit(op)
             if op is NEGATE:
                 pass
             elif op is LITERAL:
                 emit(av)
    -        elif op is RANGE or op is RANGE_UNI_IGNORE:
    +        elif op is RANGE or op is RANGE_IGNORE:
                 emit(av[0])
                 emit(av[1])
             elif op is CHARSET:
    @@ -273,12 +247,11 @@
                 raise error("internal: unsupported set operator %r" % (op,))
         emit(FAILURE)
     
    -def _optimize_charset(charset, iscased=None, fixup=None, fixes=None):
    +def _optimize_charset(charset, fixup, fixes):
         # internal: optimize character set
         out = []
         tail = []
         charmap = bytearray(256)
    -    hascased = False
         for op, av in charset:
             while True:
                 try:
    @@ -289,24 +262,18 @@
                             if fixes and lo in fixes:
                                 for k in fixes[lo]:
                                     charmap[k] = 1
    -                        if not hascased and iscased(av):
    -                            hascased = True
                         else:
                             charmap[av] = 1
                     elif op is RANGE:
                         r = range(av[0], av[1]+1)
                         if fixup:
    -                        if fixes:
    -                            for i in map(fixup, r):
    -                                charmap[i] = 1
    -                                if i in fixes:
    -                                    for k in fixes[i]:
    -                                        charmap[k] = 1
    -                        else:
    -                            for i in map(fixup, r):
    -                                charmap[i] = 1
    -                        if not hascased:
    -                            hascased = any(map(iscased, r))
    +                        r = map(fixup, r)
    +                    if fixup and fixes:
    +                        for i in r:
    +                            charmap[i] = 1
    +                            if i in fixes:
    +                                for k in fixes[i]:
    +                                    charmap[k] = 1
                         else:
                             for i in r:
                                 charmap[i] = 1
    @@ -320,13 +287,11 @@
                         charmap += b'\0' * 0xff00
                         continue
                     # Character set contains non-BMP character codes.
    -                if fixup:
    -                    hascased = True
    -                    # There are only two ranges of cased non-BMP characters:
    -                    # 10400-1044F (Deseret) and 118A0-118DF (Warang Citi),
    -                    # and for both ranges RANGE_UNI_IGNORE works.
    -                    if op is RANGE:
    -                        op = RANGE_UNI_IGNORE
    +                # There are only two ranges of cased non-BMP characters:
    +                # 10400-1044F (Deseret) and 118A0-118DF (Warang Citi),
    +                # and for both ranges RANGE_IGNORE works.
    +                if fixup and op is RANGE:
    +                    op = RANGE_IGNORE
                     tail.append((op, av))
                 break
     
    @@ -354,17 +319,17 @@
                     out.append((RANGE, (p, q - 1)))
             out += tail
             # if the case was changed or new representation is more compact
    -        if hascased or len(out) < len(charset):
    -            return out, hascased
    +        if fixup or len(out) < len(charset):
    +            return out
             # else original character set is good enough
    -        return charset, hascased
    +        return charset
     
         # use bitmap
         if len(charmap) == 256:
             data = _mk_bitmap(charmap)
             out.append((CHARSET, data))
             out += tail
    -        return out, hascased
    +        return out
     
         # To represent a big charset, first a bitmap of all characters in the
         # set is constructed. Then, this bitmap is sliced into chunks of 256
    @@ -403,7 +368,7 @@
         data[0:0] = [block] + _bytes_to_codes(mapping)
         out.append((BIGCHARSET, data))
         out += tail
    -    return out, hascased
    +    return out
     
     _CODEBITS = _sre.CODESIZE * 8
     MAXCODE = (1 << _CODEBITS) - 1
    @@ -424,14 +389,10 @@
         assert len(a) * a.itemsize == len(b)
         return a.tolist()
     
    -def _simple(p):
    -    # check if this subpattern is a "simple" operator
    -    if len(p) != 1:
    -        return False
    -    op, av = p[0]
    -    if op is SUBPATTERN:
    -        return av[0] is None and _simple(av[-1])
    -    return op in _UNIT_CODES
    +def _simple(av):
    +    # check if av is a "simple" operator
    +    lo, hi = av[2].getwidth()
    +    return lo == hi == 1 and av[2][0][0] != SUBPATTERN
     
     def _generate_overlap_table(prefix):
         """
    @@ -454,31 +415,19 @@
                 table[i] = idx + 1
         return table
     
    -def _get_iscased(flags):
    -    if not flags & SRE_FLAG_IGNORECASE:
    -        return None
    -    elif flags & SRE_FLAG_UNICODE and not flags & SRE_FLAG_ASCII:
    -        return _sre.unicode_iscased
    -    else:
    -        return _sre.ascii_iscased
    -
    -def _get_literal_prefix(pattern, flags):
    +def _get_literal_prefix(pattern):
         # look for literal prefix
         prefix = []
         prefixappend = prefix.append
         prefix_skip = None
    -    iscased = _get_iscased(flags)
         for op, av in pattern.data:
             if op is LITERAL:
    -            if iscased and iscased(av):
    -                break
                 prefixappend(av)
             elif op is SUBPATTERN:
                 group, add_flags, del_flags, p = av
    -            flags1 = _combine_flags(flags, add_flags, del_flags)
    -            if flags1 & SRE_FLAG_IGNORECASE and flags1 & SRE_FLAG_LOCALE:
    +            if add_flags & SRE_FLAG_IGNORECASE:
                     break
    -            prefix1, prefix_skip1, got_all = _get_literal_prefix(p, flags1)
    +            prefix1, prefix_skip1, got_all = _get_literal_prefix(p)
                 if prefix_skip is None:
                     if group is not None:
                         prefix_skip = len(prefix)
    @@ -493,49 +442,46 @@
             return prefix, prefix_skip, True
         return prefix, prefix_skip, False
     
    -def _get_charset_prefix(pattern, flags):
    -    while True:
    -        if not pattern.data:
    -            return None
    +def _get_charset_prefix(pattern):
    +    charset = [] # not used
    +    charsetappend = charset.append
    +    if pattern.data:
             op, av = pattern.data[0]
    -        if op is not SUBPATTERN:
    -            break
    -        group, add_flags, del_flags, pattern = av
    -        flags = _combine_flags(flags, add_flags, del_flags)
    -        if flags & SRE_FLAG_IGNORECASE and flags & SRE_FLAG_LOCALE:
    -            return None
    -
    -    iscased = _get_iscased(flags)
    -    if op is LITERAL:
    -        if iscased and iscased(av):
    -            return None
    -        return [(op, av)]
    -    elif op is BRANCH:
    -        charset = []
    -        charsetappend = charset.append
    -        for p in av[1]:
    -            if not p:
    -                return None
    -            op, av = p[0]
    -            if op is LITERAL and not (iscased and iscased(av)):
    -                charsetappend((op, av))
    +        if op is SUBPATTERN:
    +            group, add_flags, del_flags, p = av
    +            if p and not (add_flags & SRE_FLAG_IGNORECASE):
    +                op, av = p[0]
    +                if op is LITERAL:
    +                    charsetappend((op, av))
    +                elif op is BRANCH:
    +                    c = []
    +                    cappend = c.append
    +                    for p in av[1]:
    +                        if not p:
    +                            break
    +                        op, av = p[0]
    +                        if op is LITERAL:
    +                            cappend((op, av))
    +                        else:
    +                            break
    +                    else:
    +                        charset = c
    +        elif op is BRANCH:
    +            c = []
    +            cappend = c.append
    +            for p in av[1]:
    +                if not p:
    +                    break
    +                op, av = p[0]
    +                if op is LITERAL:
    +                    cappend((op, av))
    +                else:
    +                    break
                 else:
    -                return None
    -        return charset
    -    elif op is IN:
    -        charset = av
    -        if iscased:
    -            for op, av in charset:
    -                if op is LITERAL:
    -                    if iscased(av):
    -                        return None
    -                elif op is RANGE:
    -                    if av[1] > 0xffff:
    -                        return None
    -                    if any(map(iscased, range(av[0], av[1]+1))):
    -                        return None
    -        return charset
    -    return None
    +                charset = c
    +        elif op is IN:
    +            charset = av
    +    return charset
     
     def _compile_info(code, pattern, flags):
         # internal: compile an info block.  in the current version,
    @@ -551,12 +497,12 @@
         prefix = []
         prefix_skip = 0
         charset = [] # not used
    -    if not (flags & SRE_FLAG_IGNORECASE and flags & SRE_FLAG_LOCALE):
    +    if not (flags & SRE_FLAG_IGNORECASE):
             # look for literal prefix
    -        prefix, prefix_skip, got_all = _get_literal_prefix(pattern, flags)
    +        prefix, prefix_skip, got_all = _get_literal_prefix(pattern)
             # if no prefix, look for charset prefix
             if not prefix:
    -            charset = _get_charset_prefix(pattern, flags)
    +            charset = _get_charset_prefix(pattern)
     ##     if prefix:
     ##         print("*** PREFIX", prefix, prefix_skip)
     ##     if charset:
    @@ -591,8 +537,6 @@
             # generate overlap table
             code.extend(_generate_overlap_table(prefix))
         elif charset:
    -        charset, hascased = _optimize_charset(charset)
    -        assert not hascased
             _compile_charset(charset, flags, code)
         code[skip] = len(code) - skip
     
    @@ -614,152 +558,6 @@
     
         return code
     
    -def _hex_code(code):
    -    return '[%s]' % ', '.join('%#0*x' % (_sre.CODESIZE*2+2, x) for x in code)
    -
    -def dis(code):
    -    import sys
    -
    -    labels = set()
    -    level = 0
    -    offset_width = len(str(len(code) - 1))
    -
    -    def dis_(start, end):
    -        def print_(*args, to=None):
    -            if to is not None:
    -                labels.add(to)
    -                args += ('(to %d)' % (to,),)
    -            print('%*d%s ' % (offset_width, start, ':' if start in labels else '.'),
    -                  end='  '*(level-1))
    -            print(*args)
    -
    -        def print_2(*args):
    -            print(end=' '*(offset_width + 2*level))
    -            print(*args)
    -
    -        nonlocal level
    -        level += 1
    -        i = start
    -        while i < end:
    -            start = i
    -            op = code[i]
    -            i += 1
    -            op = OPCODES[op]
    -            if op in (SUCCESS, FAILURE, ANY, ANY_ALL,
    -                      MAX_UNTIL, MIN_UNTIL, NEGATE):
    -                print_(op)
    -            elif op in (LITERAL, NOT_LITERAL,
    -                        LITERAL_IGNORE, NOT_LITERAL_IGNORE,
    -                        LITERAL_UNI_IGNORE, NOT_LITERAL_UNI_IGNORE,
    -                        LITERAL_LOC_IGNORE, NOT_LITERAL_LOC_IGNORE):
    -                arg = code[i]
    -                i += 1
    -                print_(op, '%#02x (%r)' % (arg, chr(arg)))
    -            elif op is AT:
    -                arg = code[i]
    -                i += 1
    -                arg = str(ATCODES[arg])
    -                assert arg[:3] == 'AT_'
    -                print_(op, arg[3:])
    -            elif op is CATEGORY:
    -                arg = code[i]
    -                i += 1
    -                arg = str(CHCODES[arg])
    -                assert arg[:9] == 'CATEGORY_'
    -                print_(op, arg[9:])
    -            elif op in (IN, IN_IGNORE, IN_UNI_IGNORE, IN_LOC_IGNORE):
    -                skip = code[i]
    -                print_(op, skip, to=i+skip)
    -                dis_(i+1, i+skip)
    -                i += skip
    -            elif op in (RANGE, RANGE_UNI_IGNORE):
    -                lo, hi = code[i: i+2]
    -                i += 2
    -                print_(op, '%#02x %#02x (%r-%r)' % (lo, hi, chr(lo), chr(hi)))
    -            elif op is CHARSET:
    -                print_(op, _hex_code(code[i: i + 256//_CODEBITS]))
    -                i += 256//_CODEBITS
    -            elif op is BIGCHARSET:
    -                arg = code[i]
    -                i += 1
    -                mapping = list(b''.join(x.to_bytes(_sre.CODESIZE, sys.byteorder)
    -                                        for x in code[i: i + 256//_sre.CODESIZE]))
    -                print_(op, arg, mapping)
    -                i += 256//_sre.CODESIZE
    -                level += 1
    -                for j in range(arg):
    -                    print_2(_hex_code(code[i: i + 256//_CODEBITS]))
    -                    i += 256//_CODEBITS
    -                level -= 1
    -            elif op in (MARK, GROUPREF, GROUPREF_IGNORE, GROUPREF_UNI_IGNORE,
    -                        GROUPREF_LOC_IGNORE):
    -                arg = code[i]
    -                i += 1
    -                print_(op, arg)
    -            elif op is JUMP:
    -                skip = code[i]
    -                print_(op, skip, to=i+skip)
    -                i += 1
    -            elif op is BRANCH:
    -                skip = code[i]
    -                print_(op, skip, to=i+skip)
    -                while skip:
    -                    dis_(i+1, i+skip)
    -                    i += skip
    -                    start = i
    -                    skip = code[i]
    -                    if skip:
    -                        print_('branch', skip, to=i+skip)
    -                    else:
    -                        print_(FAILURE)
    -                i += 1
    -            elif op in (REPEAT, REPEAT_ONE, MIN_REPEAT_ONE):
    -                skip, min, max = code[i: i+3]
    -                if max == MAXREPEAT:
    -                    max = 'MAXREPEAT'
    -                print_(op, skip, min, max, to=i+skip)
    -                dis_(i+3, i+skip)
    -                i += skip
    -            elif op is GROUPREF_EXISTS:
    -                arg, skip = code[i: i+2]
    -                print_(op, arg, skip, to=i+skip)
    -                i += 2
    -            elif op in (ASSERT, ASSERT_NOT):
    -                skip, arg = code[i: i+2]
    -                print_(op, skip, arg, to=i+skip)
    -                dis_(i+2, i+skip)
    -                i += skip
    -            elif op is INFO:
    -                skip, flags, min, max = code[i: i+4]
    -                if max == MAXREPEAT:
    -                    max = 'MAXREPEAT'
    -                print_(op, skip, bin(flags), min, max, to=i+skip)
    -                start = i+4
    -                if flags & SRE_INFO_PREFIX:
    -                    prefix_len, prefix_skip = code[i+4: i+6]
    -                    print_2('  prefix_skip', prefix_skip)
    -                    start = i + 6
    -                    prefix = code[start: start+prefix_len]
    -                    print_2('  prefix',
    -                            '[%s]' % ', '.join('%#02x' % x for x in prefix),
    -                            '(%r)' % ''.join(map(chr, prefix)))
    -                    start += prefix_len
    -                    print_2('  overlap', code[start: start+prefix_len])
    -                    start += prefix_len
    -                if flags & SRE_INFO_CHARSET:
    -                    level += 1
    -                    print_2('in')
    -                    dis_(start, i+skip)
    -                    level -= 1
    -                i += skip
    -            else:
    -                raise ValueError(op)
    -
    -        level -= 1
    -
    -    dis_(0, len(code))
    -
    -
     def compile(p, flags=0):
         # internal: convert pattern list to internal format
     
    @@ -771,9 +569,7 @@
     
         code = _code(p, flags)
     
    -    if flags & SRE_FLAG_DEBUG:
    -        print()
    -        dis(code)
    +    # print(code)
     
         # map in either direction
         groupindex = p.pattern.groupdict
    @@ -784,5 +580,5 @@
         return _sre.compile(
             pattern, flags | p.pattern.flags, code,
             p.pattern.groups-1,
    -        groupindex, tuple(indexgroup)
    +        groupindex, indexgroup
             )
    diff --git a/lib-python/3/sre_constants.py b/lib-python/3/sre_constants.py
    --- a/lib-python/3/sre_constants.py
    +++ b/lib-python/3/sre_constants.py
    @@ -13,7 +13,7 @@
     
     # update when constants are added or removed
     
    -MAGIC = 20171005
    +MAGIC = 20140917
     
     from _sre import MAXREPEAT, MAXGROUPS
     
    @@ -32,8 +32,6 @@
             colno: The column corresponding to pos (may be None)
         """
     
    -    __module__ = 're'
    -
         def __init__(self, msg, pattern=None, pos=None):
             self.msg = msg
             self.pattern = pattern
    @@ -84,37 +82,22 @@
         CALL
         CATEGORY
         CHARSET BIGCHARSET
    -    GROUPREF GROUPREF_EXISTS
    -    IN
    +    GROUPREF GROUPREF_EXISTS GROUPREF_IGNORE
    +    IN IN_IGNORE
         INFO
         JUMP
    -    LITERAL
    +    LITERAL LITERAL_IGNORE
         MARK
         MAX_UNTIL
         MIN_UNTIL
    -    NOT_LITERAL
    +    NOT_LITERAL NOT_LITERAL_IGNORE
         NEGATE
         RANGE
         REPEAT
         REPEAT_ONE
         SUBPATTERN
         MIN_REPEAT_ONE
    -
    -    GROUPREF_IGNORE
    -    IN_IGNORE
    -    LITERAL_IGNORE
    -    NOT_LITERAL_IGNORE
    -
    -    GROUPREF_LOC_IGNORE
    -    IN_LOC_IGNORE
    -    LITERAL_LOC_IGNORE
    -    NOT_LITERAL_LOC_IGNORE
    -
    -    GROUPREF_UNI_IGNORE
    -    IN_UNI_IGNORE
    -    LITERAL_UNI_IGNORE
    -    NOT_LITERAL_UNI_IGNORE
    -    RANGE_UNI_IGNORE
    +    RANGE_IGNORE
     
         MIN_REPEAT MAX_REPEAT
     """)
    @@ -125,9 +108,7 @@
         AT_BEGINNING AT_BEGINNING_LINE AT_BEGINNING_STRING
         AT_BOUNDARY AT_NON_BOUNDARY
         AT_END AT_END_LINE AT_END_STRING
    -
         AT_LOC_BOUNDARY AT_LOC_NON_BOUNDARY
    -
         AT_UNI_BOUNDARY AT_UNI_NON_BOUNDARY
     """)
     
    @@ -137,9 +118,7 @@
         CATEGORY_SPACE CATEGORY_NOT_SPACE
         CATEGORY_WORD CATEGORY_NOT_WORD
         CATEGORY_LINEBREAK CATEGORY_NOT_LINEBREAK
    -
         CATEGORY_LOC_WORD CATEGORY_LOC_NOT_WORD
    -
         CATEGORY_UNI_DIGIT CATEGORY_UNI_NOT_DIGIT
         CATEGORY_UNI_SPACE CATEGORY_UNI_NOT_SPACE
         CATEGORY_UNI_WORD CATEGORY_UNI_NOT_WORD
    @@ -149,18 +128,11 @@
     
     # replacement operations for "ignore case" mode
     OP_IGNORE = {
    +    GROUPREF: GROUPREF_IGNORE,
    +    IN: IN_IGNORE,
         LITERAL: LITERAL_IGNORE,
         NOT_LITERAL: NOT_LITERAL_IGNORE,
    -}
    -
    -OP_LOCALE_IGNORE = {
    -    LITERAL: LITERAL_LOC_IGNORE,
    -    NOT_LITERAL: NOT_LITERAL_LOC_IGNORE,
    -}
    -
    -OP_UNICODE_IGNORE = {
    -    LITERAL: LITERAL_UNI_IGNORE,
    -    NOT_LITERAL: NOT_LITERAL_UNI_IGNORE,
    +    RANGE: RANGE_IGNORE,
     }
     
     AT_MULTILINE = {
    
    From pypy.commits at gmail.com  Fri Aug 30 07:53:36 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Fri, 30 Aug 2019 04:53:36 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Don't make a subclass of
     NotImplementedError to avoid using
    Message-ID: <5d690e40.1c69fb81.6b836.5bba@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97340:618365170725
    Date: 2019-08-30 13:52 +0200
    http://bitbucket.org/pypy/pypy/changeset/618365170725/
    
    Log:	Don't make a subclass of NotImplementedError to avoid using
    	NotImplementedError---it's unlikely to work correctly in RPython
    
    diff --git a/pypy/module/posix/interp_nt.py b/pypy/module/posix/interp_nt.py
    --- a/pypy/module/posix/interp_nt.py
    +++ b/pypy/module/posix/interp_nt.py
    @@ -35,7 +35,7 @@
     
     
     # plain NotImplementedError is invalid RPython
    -class LLNotImplemented(NotImplementedError):
    +class LLNotImplemented(Exception):
     
         def __init__(self, msg):
             self.msg = msg
    
    From pypy.commits at gmail.com  Fri Aug 30 07:58:25 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Fri, 30 Aug 2019 04:58:25 -0700 (PDT)
    Subject: [pypy-commit] pypy default: Don't make an RPython subclass of
     AssertionError or NotImplementedError for the
    Message-ID: <5d690f61.1c69fb81.741b.b4c9@mx.google.com>
    
    Author: Armin Rigo 
    Branch: 
    Changeset: r97341:2dceead28db5
    Date: 2019-08-30 13:57 +0200
    http://bitbucket.org/pypy/pypy/changeset/2dceead28db5/
    
    Log:	Don't make an RPython subclass of AssertionError or
    	NotImplementedError for the purpose of "catching" it: it would work
    	around the detection logic but not actually work at all
    
    diff --git a/rpython/flowspace/flowcontext.py b/rpython/flowspace/flowcontext.py
    --- a/rpython/flowspace/flowcontext.py
    +++ b/rpython/flowspace/flowcontext.py
    @@ -566,11 +566,12 @@
             if not isinstance(w_check_class, Constant):
                 raise FlowingError("Non-constant except guard.")
             check_class = w_check_class.value
    -        if check_class in (NotImplementedError, AssertionError):
    -            raise FlowingError(
    -                "Catching %s is not valid in RPython" % check_class.__name__)
             if not isinstance(check_class, tuple):
                 # the simple case
    +            if issubclass(check_class, (NotImplementedError, AssertionError)):
    +                raise FlowingError(
    +                    "Catching %s is not valid in RPython" %
    +                    check_class.__name__)
                 return self.guessbool(op.issubtype(w_exc_type, w_check_class).eval(self))
             # special case for StackOverflow (see rlib/rstackovf.py)
             if check_class == rstackovf.StackOverflow:
    diff --git a/rpython/flowspace/test/test_objspace.py b/rpython/flowspace/test/test_objspace.py
    --- a/rpython/flowspace/test/test_objspace.py
    +++ b/rpython/flowspace/test/test_objspace.py
    @@ -1135,6 +1135,23 @@
                     pass
             py.test.raises(FlowingError, "self.codetest(f)")
     
    +    def test_cannot_catch_special_exceptions_2(self):
    +        class MyNIE(NotImplementedError):
    +            pass
    +        def f():
    +            try:
    +                f()
    +            except MyNIE:
    +                pass
    +        py.test.raises(FlowingError, "self.codetest(f)")
    +        #
    +        def f():
    +            try:
    +                f()
    +            except (ValueError, MyNIE):
    +                pass
    +        py.test.raises(FlowingError, "self.codetest(f)")
    +
         def test_locals_dict(self):
             def f():
                 x = 5
    
    From pypy.commits at gmail.com  Fri Aug 30 10:13:04 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Fri, 30 Aug 2019 07:13:04 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Keep app_main sysflags in sync with
     sys.flags
    Message-ID: <5d692ef0.1c69fb81.51825.b656@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97342:66823f138171
    Date: 2019-08-30 15:12 +0100
    http://bitbucket.org/pypy/pypy/changeset/66823f138171/
    
    Log:	Keep app_main sysflags in sync with sys.flags
    
    diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
    --- a/pypy/interpreter/app_main.py
    +++ b/pypy/interpreter/app_main.py
    @@ -384,6 +384,8 @@
         "quiet",
         "hash_randomization",
         "isolated",
    +    "dev_mode",
    +    "utf8_mode",
     )
     # ^^^ Order is significant!  Keep in sync with module.sys.app.sysflags
     
    
    From pypy.commits at gmail.com  Fri Aug 30 11:35:48 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Fri, 30 Aug 2019 08:35:48 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Check return type of __prepare__()
     (bpo-31588)
    Message-ID: <5d694254.1c69fb81.48008.3d36@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97343:5c433fa743eb
    Date: 2019-08-30 16:33 +0100
    http://bitbucket.org/pypy/pypy/changeset/5c433fa743eb/
    
    Log:	Check return type of __prepare__() (bpo-31588)
    
    diff --git a/pypy/module/__builtin__/compiling.py b/pypy/module/__builtin__/compiling.py
    --- a/pypy/module/__builtin__/compiling.py
    +++ b/pypy/module/__builtin__/compiling.py
    @@ -130,6 +130,16 @@
                              keywords=keywords,
                              keywords_w=kwds_w.values())
             w_namespace = space.call_args(w_prep, args)
    +    if not space.ismapping_w(w_namespace):
    +        if isclass:
    +            raise oefmt(space.w_TypeError,
    +                "%N.__prepare__ must return a mapping, not %T",
    +                w_meta, w_namespace)
    +        else:
    +            raise oefmt(space.w_TypeError,
    +                ".__prepare__ must return a mapping, not %T",
    +                w_namespace)
    +
         code = w_func.getcode()
         frame = space.createframe(code, w_func.w_func_globals, w_func)
         frame.setdictscope(w_namespace)
    diff --git a/pypy/objspace/std/test/test_typeobject.py b/pypy/objspace/std/test/test_typeobject.py
    --- a/pypy/objspace/std/test/test_typeobject.py
    +++ b/pypy/objspace/std/test/test_typeobject.py
    @@ -1277,6 +1277,23 @@
             assert C.foo == 42
             """
     
    +    def test_prepare_error(self):
    +        """
    +        class BadMeta:
    +            @classmethod
    +            def __prepare__(cls, *args, **kwargs):
    +                return 42
    +        def make_class(meta):
    +            class Foo(metaclass=meta):
    +                pass
    +        excinfo = raises(TypeError, make_class, BadMeta)
    +        print(excinfo.value.args[0])
    +        assert excinfo.value.args[0].startswith('BadMeta.__prepare__')
    +        # Non-type as metaclass
    +        excinfo = raises(TypeError, make_class, BadMeta())
    +        assert excinfo.value.args[0].startswith('.__prepare__')
    +        """
    +
         def test_crash_mro_without_object_1(self):
             """
             class X(type):
    
    From pypy.commits at gmail.com  Fri Aug 30 11:35:50 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Fri, 30 Aug 2019 08:35:50 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Import untested debugging tool
     pypy.tool.dis3 only when needed
    Message-ID: <5d694256.1c69fb81.cd4e8.90d6@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97344:0587af2ecd65
    Date: 2019-08-29 17:21 +0100
    http://bitbucket.org/pypy/pypy/changeset/0587af2ecd65/
    
    Log:	Import untested debugging tool pypy.tool.dis3 only when needed
    
    diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
    --- a/pypy/interpreter/pycode.py
    +++ b/pypy/interpreter/pycode.py
    @@ -14,7 +14,6 @@
         CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS, CO_NESTED,
         CO_GENERATOR, CO_COROUTINE, CO_KILL_DOCSTRING, CO_YIELD_INSIDE_TRY,
         CO_ITERABLE_COROUTINE, CO_ASYNC_GENERATOR)
    -from pypy.tool import dis3
     from pypy.tool.stdlib_opcode import opcodedesc, HAVE_ARGUMENT
     from rpython.rlib.rarithmetic import intmask
     from rpython.rlib.objectmodel import compute_hash, we_are_translated
    @@ -307,6 +306,7 @@
     
         def dump(self):
             """NOT_RPYTHON: A dis.dis() dump of the code object."""
    +        from pypy.tool import dis3
             if not hasattr(self, 'co_consts'):
                 self.co_consts = [w if isinstance(w, PyCode) else self.space.unwrap(w)
                                   for w in self.co_consts_w]
    
    From pypy.commits at gmail.com  Fri Aug 30 16:50:25 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Fri, 30 Aug 2019 13:50:25 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: Clean up apptest_pyframe and add a
     failing test
    Message-ID: <5d698c11.1c69fb81.47c92.3de6@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97345:f779cb537e5e
    Date: 2019-08-30 21:48 +0100
    http://bitbucket.org/pypy/pypy/changeset/f779cb537e5e/
    
    Log:	Clean up apptest_pyframe and add a failing test
    
    diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
    --- a/pypy/interpreter/pyframe.py
    +++ b/pypy/interpreter/pyframe.py
    @@ -69,7 +69,7 @@
         f_generator_nowref       = None               # (only one of the two attrs)
         last_instr               = -1
         f_backref                = jit.vref_None
    -    
    +
         escaped                  = False  # see mark_as_escaped()
         debugdata                = None
     
    @@ -79,7 +79,7 @@
         lastblock = None
     
         # other fields:
    -    
    +
         # builtin - builtin cache, only if honor__builtins__ is True
         # defaults to False
     
    diff --git a/pypy/interpreter/test/apptest_pyframe.py b/pypy/interpreter/test/apptest_pyframe.py
    --- a/pypy/interpreter/test/apptest_pyframe.py
    +++ b/pypy/interpreter/test/apptest_pyframe.py
    @@ -61,6 +61,34 @@
         print(repr(g.__code__.co_lnotab))
         assert g() == [origin+3, origin+5+127, origin+7+127+1000]
     
    +class JumpTracer:
    +    """Defines a trace function that jumps from one place to another."""
    +
    +    def __init__(self, function, jumpFrom, jumpTo, event='line',
    +                 decorated=False):
    +        self.code = function.__code__
    +        self.jumpFrom = jumpFrom
    +        self.jumpTo = jumpTo
    +        self.event = event
    +        self.firstLine = None if decorated else self.code.co_firstlineno
    +        self.done = False
    +
    +    def trace(self, frame, event, arg):
    +        if self.done:
    +            return
    +        # frame.f_code.co_firstlineno is the first line of the decorator when
    +        # 'function' is decorated and the decorator may be written using
    +        # multiple physical lines when it is too long. Use the first line
    +        # trace event in 'function' to find the first line of 'function'.
    +        if (self.firstLine is None and frame.f_code == self.code and
    +                event == 'line'):
    +            self.firstLine = frame.f_lineno - 1
    +        if (event == self.event and self.firstLine and
    +                frame.f_lineno == self.firstLine + self.jumpFrom):
    +            frame.f_lineno = self.firstLine + self.jumpTo
    +            self.done = True
    +        return self.trace
    +
     def test_f_lineno_set(tempfile):
         def tracer(f, *args):
             def y(f, *args):
    @@ -87,7 +115,7 @@
         sys.settrace(None)
         # assert did not crash
     
    -def test_f_lineno_set_2(tempfile):
    +def test_f_lineno_set_2():
         counter = [0]
         errors = []
     
    @@ -101,10 +129,6 @@
                         errors.append(e)
             return tracer
     
    -    # obscure: call open beforehand, py3k's open invokes some app
    -    # level code that confuses our tracing (likely due to the
    -    # testing env, otherwise it's not a problem)
    -    f = open(tempfile, 'w')
         def function():
             try:
                 raise ValueError
    @@ -133,18 +157,39 @@
                     output.append(8)
                 output.append(9)
         output = []
    -
    -    def tracer(f, event, *args):
    -        if event == 'line' and len(output) == 1:
    -            f.f_lineno += 5
    -        return tracer
    +    tracer = JumpTracer(jump_in_nested_finally, 4, 9)
     
         import sys
    -    sys.settrace(tracer)
    +    sys.settrace(tracer.trace)
         jump_in_nested_finally(output)
         sys.settrace(None)
         assert output == [2, 9]
     
    +def test_f_lineno_set_4():
    +    pytest.skip("test is failing on pypy")
    +    def jump_in_nested_finally(output):
    +        try:
    +            output.append(2)
    +            1/0
    +            return
    +        finally:
    +            output.append(6)
    +            output.append(7)
    +        output.append(8)
    +    output = []
    +    tracer = JumpTracer(jump_in_nested_finally, 6, 7)
    +
    +    import sys
    +    sys.settrace(tracer.trace)
    +    try:
    +        jump_in_nested_finally(output)
    +    except ZeroDivisionError:
    +        sys.settrace(None)
    +    else:
    +        sys.settrace(None)
    +        assert False, 'did not raise'
    +    assert output == [2, 7]
    +
     def test_f_lineno_set_firstline():
         seen = []
         def tracer(f, event, *args):
    
    From pypy.commits at gmail.com  Fri Aug 30 20:55:36 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Fri, 30 Aug 2019 17:55:36 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Gently hack stdlib to avoid unimplemented
     functions
    Message-ID: <5d69c588.1c69fb81.d109.9e23@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97346:933f6504ecb5
    Date: 2019-08-31 01:53 +0100
    http://bitbucket.org/pypy/pypy/changeset/933f6504ecb5/
    
    Log:	Gently hack stdlib to avoid unimplemented functions
    
    diff --git a/lib-python/3/os.py b/lib-python/3/os.py
    --- a/lib-python/3/os.py
    +++ b/lib-python/3/os.py
    @@ -129,7 +129,7 @@
         _add("HAVE_FCHMOD",     "chmod")
         _add("HAVE_FCHOWN",     "chown")
         _add("HAVE_FDOPENDIR",  "listdir")
    -    _add("HAVE_FDOPENDIR",  "scandir")
    +    # _add("HAVE_FDOPENDIR",  "scandir") # not implemented on PyPy yet
         _add("HAVE_FEXECVE",    "execve")
         _set.add(stat) # fstat always works
         _add("HAVE_FTRUNCATE",  "truncate")
    diff --git a/lib-python/3/random.py b/lib-python/3/random.py
    --- a/lib-python/3/random.py
    +++ b/lib-python/3/random.py
    @@ -776,7 +776,8 @@
     setstate = _inst.setstate
     getrandbits = _inst.getrandbits
     
    -if hasattr(_os, "fork"):
    +# PyPy change: we have _os.fork, but not _os.register_at_fork yet
    +if hasattr(_os, "register_at_fork"):
         _os.register_at_fork(after_in_child=_inst.seed)
     
     
    diff --git a/pypy/module/posix/interp_scandir.py b/pypy/module/posix/interp_scandir.py
    --- a/pypy/module/posix/interp_scandir.py
    +++ b/pypy/module/posix/interp_scandir.py
    @@ -11,6 +11,7 @@
     from pypy.module.posix.interp_posix import unwrap_fd, build_stat_result, _WIN32
     
     
    +# XXX: update os.supports_fd when fd support is implemented
     def scandir(space, w_path=None):
         "scandir(path='.') -> iterator of DirEntry objects for given path"
         if space.is_none(w_path):
    
    From pypy.commits at gmail.com  Fri Aug 30 20:56:36 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Fri, 30 Aug 2019 17:56:36 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: hg merge py3.6
    Message-ID: <5d69c5c4.1c69fb81.085c.070d@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97347:a676a5366a60
    Date: 2019-08-31 01:55 +0100
    http://bitbucket.org/pypy/pypy/changeset/a676a5366a60/
    
    Log:	hg merge py3.6
    
    diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
    --- a/pypy/interpreter/pycode.py
    +++ b/pypy/interpreter/pycode.py
    @@ -14,7 +14,6 @@
         CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS, CO_NESTED,
         CO_GENERATOR, CO_COROUTINE, CO_KILL_DOCSTRING, CO_YIELD_INSIDE_TRY,
         CO_ITERABLE_COROUTINE, CO_ASYNC_GENERATOR)
    -from pypy.tool import dis3
     from pypy.tool.stdlib_opcode import opcodedesc, HAVE_ARGUMENT
     from rpython.rlib.rarithmetic import intmask
     from rpython.rlib.objectmodel import compute_hash, we_are_translated
    @@ -307,6 +306,7 @@
     
         def dump(self):
             """NOT_RPYTHON: A dis.dis() dump of the code object."""
    +        from pypy.tool import dis3
             if not hasattr(self, 'co_consts'):
                 self.co_consts = [w if isinstance(w, PyCode) else self.space.unwrap(w)
                                   for w in self.co_consts_w]
    diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
    --- a/pypy/interpreter/pyframe.py
    +++ b/pypy/interpreter/pyframe.py
    @@ -69,7 +69,7 @@
         f_generator_nowref       = None               # (only one of the two attrs)
         last_instr               = -1
         f_backref                = jit.vref_None
    -    
    +
         escaped                  = False  # see mark_as_escaped()
         debugdata                = None
     
    @@ -79,7 +79,7 @@
         lastblock = None
     
         # other fields:
    -    
    +
         # builtin - builtin cache, only if honor__builtins__ is True
         # defaults to False
     
    diff --git a/pypy/interpreter/test/apptest_pyframe.py b/pypy/interpreter/test/apptest_pyframe.py
    --- a/pypy/interpreter/test/apptest_pyframe.py
    +++ b/pypy/interpreter/test/apptest_pyframe.py
    @@ -61,6 +61,34 @@
         print(repr(g.__code__.co_lnotab))
         assert g() == [origin+3, origin+5+127, origin+7+127+1000]
     
    +class JumpTracer:
    +    """Defines a trace function that jumps from one place to another."""
    +
    +    def __init__(self, function, jumpFrom, jumpTo, event='line',
    +                 decorated=False):
    +        self.code = function.__code__
    +        self.jumpFrom = jumpFrom
    +        self.jumpTo = jumpTo
    +        self.event = event
    +        self.firstLine = None if decorated else self.code.co_firstlineno
    +        self.done = False
    +
    +    def trace(self, frame, event, arg):
    +        if self.done:
    +            return
    +        # frame.f_code.co_firstlineno is the first line of the decorator when
    +        # 'function' is decorated and the decorator may be written using
    +        # multiple physical lines when it is too long. Use the first line
    +        # trace event in 'function' to find the first line of 'function'.
    +        if (self.firstLine is None and frame.f_code == self.code and
    +                event == 'line'):
    +            self.firstLine = frame.f_lineno - 1
    +        if (event == self.event and self.firstLine and
    +                frame.f_lineno == self.firstLine + self.jumpFrom):
    +            frame.f_lineno = self.firstLine + self.jumpTo
    +            self.done = True
    +        return self.trace
    +
     def test_f_lineno_set(tempfile):
         def tracer(f, *args):
             def y(f, *args):
    @@ -87,7 +115,7 @@
         sys.settrace(None)
         # assert did not crash
     
    -def test_f_lineno_set_2(tempfile):
    +def test_f_lineno_set_2():
         counter = [0]
         errors = []
     
    @@ -101,10 +129,6 @@
                         errors.append(e)
             return tracer
     
    -    # obscure: call open beforehand, py3k's open invokes some app
    -    # level code that confuses our tracing (likely due to the
    -    # testing env, otherwise it's not a problem)
    -    f = open(tempfile, 'w')
         def function():
             try:
                 raise ValueError
    @@ -133,18 +157,39 @@
                     output.append(8)
                 output.append(9)
         output = []
    -
    -    def tracer(f, event, *args):
    -        if event == 'line' and len(output) == 1:
    -            f.f_lineno += 5
    -        return tracer
    +    tracer = JumpTracer(jump_in_nested_finally, 4, 9)
     
         import sys
    -    sys.settrace(tracer)
    +    sys.settrace(tracer.trace)
         jump_in_nested_finally(output)
         sys.settrace(None)
         assert output == [2, 9]
     
    +def test_f_lineno_set_4():
    +    pytest.skip("test is failing on pypy")
    +    def jump_in_nested_finally(output):
    +        try:
    +            output.append(2)
    +            1/0
    +            return
    +        finally:
    +            output.append(6)
    +            output.append(7)
    +        output.append(8)
    +    output = []
    +    tracer = JumpTracer(jump_in_nested_finally, 6, 7)
    +
    +    import sys
    +    sys.settrace(tracer.trace)
    +    try:
    +        jump_in_nested_finally(output)
    +    except ZeroDivisionError:
    +        sys.settrace(None)
    +    else:
    +        sys.settrace(None)
    +        assert False, 'did not raise'
    +    assert output == [2, 7]
    +
     def test_f_lineno_set_firstline():
         seen = []
         def tracer(f, event, *args):
    diff --git a/pypy/module/__builtin__/compiling.py b/pypy/module/__builtin__/compiling.py
    --- a/pypy/module/__builtin__/compiling.py
    +++ b/pypy/module/__builtin__/compiling.py
    @@ -130,6 +130,16 @@
                              keywords=keywords,
                              keywords_w=kwds_w.values())
             w_namespace = space.call_args(w_prep, args)
    +    if not space.ismapping_w(w_namespace):
    +        if isclass:
    +            raise oefmt(space.w_TypeError,
    +                "%N.__prepare__ must return a mapping, not %T",
    +                w_meta, w_namespace)
    +        else:
    +            raise oefmt(space.w_TypeError,
    +                ".__prepare__ must return a mapping, not %T",
    +                w_namespace)
    +
         code = w_func.getcode()
         frame = space.createframe(code, w_func.w_func_globals, w_func)
         frame.setdictscope(w_namespace)
    diff --git a/pypy/module/posix/interp_nt.py b/pypy/module/posix/interp_nt.py
    --- a/pypy/module/posix/interp_nt.py
    +++ b/pypy/module/posix/interp_nt.py
    @@ -35,7 +35,7 @@
     
     
     # plain NotImplementedError is invalid RPython
    -class LLNotImplemented(NotImplementedError):
    +class LLNotImplemented(Exception):
     
         def __init__(self, msg):
             self.msg = msg
    diff --git a/pypy/objspace/std/test/test_typeobject.py b/pypy/objspace/std/test/test_typeobject.py
    --- a/pypy/objspace/std/test/test_typeobject.py
    +++ b/pypy/objspace/std/test/test_typeobject.py
    @@ -1277,6 +1277,23 @@
             assert C.foo == 42
             """
     
    +    def test_prepare_error(self):
    +        """
    +        class BadMeta:
    +            @classmethod
    +            def __prepare__(cls, *args, **kwargs):
    +                return 42
    +        def make_class(meta):
    +            class Foo(metaclass=meta):
    +                pass
    +        excinfo = raises(TypeError, make_class, BadMeta)
    +        print(excinfo.value.args[0])
    +        assert excinfo.value.args[0].startswith('BadMeta.__prepare__')
    +        # Non-type as metaclass
    +        excinfo = raises(TypeError, make_class, BadMeta())
    +        assert excinfo.value.args[0].startswith('.__prepare__')
    +        """
    +
         def test_crash_mro_without_object_1(self):
             """
             class X(type):
    diff --git a/rpython/rlib/rsiphash.py b/rpython/rlib/rsiphash.py
    --- a/rpython/rlib/rsiphash.py
    +++ b/rpython/rlib/rsiphash.py
    @@ -170,10 +170,12 @@
             # unicode strings where CPython uses 2 bytes per character.
             addr = rstr._get_raw_buf_unicode(rstr.UNICODE, ll_s, 0)
             SZ = rffi.sizeof(rstr.UNICODE.chars.OF)
    -        for i in range(length):
    +        i = 0
    +        while i < length:
                 if ord(ll_s.chars[i]) > 0xFF:
                     length *= SZ
                     break
    +            i += 1
             else:
                 x = _siphash24(addr, length, SZ)
                 keepalive_until_here(ll_s)
    
    From pypy.commits at gmail.com  Fri Aug 30 21:05:57 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Fri, 30 Aug 2019 18:05:57 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Make queue importable
    Message-ID: <5d69c7f5.1c69fb81.e0b9c.b246@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97348:7697e979053f
    Date: 2019-08-31 02:04 +0100
    http://bitbucket.org/pypy/pypy/changeset/7697e979053f/
    
    Log:	Make queue importable
    
    diff --git a/lib-python/3/queue.py b/lib-python/3/queue.py
    --- a/lib-python/3/queue.py
    +++ b/lib-python/3/queue.py
    @@ -14,7 +14,7 @@
     
     try:
         from _queue import Empty
    -except AttributeError:
    +except (AttributeError, ImportError):
         class Empty(Exception):
             'Exception raised by Queue.get(block=0)/get_nowait().'
             pass
    
    From pypy.commits at gmail.com  Sat Aug 31 10:15:58 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Sat, 31 Aug 2019 07:15:58 -0700 (PDT)
    Subject: [pypy-commit] pypy default: Don't make a subclass of
     NotImplementedError in RPython---it was unlikely to
    Message-ID: <5d6a811e.1c69fb81.ea7a0.d3c1@mx.google.com>
    
    Author: Armin Rigo 
    Branch: 
    Changeset: r97349:29b473bb1c55
    Date: 2019-08-31 16:15 +0200
    http://bitbucket.org/pypy/pypy/changeset/29b473bb1c55/
    
    Log:	Don't make a subclass of NotImplementedError in RPython---it was
    	unlikely to work correctly and is now forbidden
    
    diff --git a/rpython/rlib/rawstorage.py b/rpython/rlib/rawstorage.py
    --- a/rpython/rlib/rawstorage.py
    +++ b/rpython/rlib/rawstorage.py
    @@ -55,7 +55,7 @@
         misaligned_is_fine = False
     
     
    -class AlignmentError(NotImplementedError):
    +class AlignmentError(Exception):
         "Means that raw_storage_{get,set}item was used on unaligned memory"
     
     # Tweak?  It seems a reasonable value for any system out there: requiring
    
    From pypy.commits at gmail.com  Sat Aug 31 10:16:47 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Sat, 31 Aug 2019 07:16:47 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: hg merge default
    Message-ID: <5d6a814f.1c69fb81.9a42f.7aa9@mx.google.com>
    
    Author: Armin Rigo 
    Branch: py3.6
    Changeset: r97350:c0dcb6566060
    Date: 2019-08-31 16:16 +0200
    http://bitbucket.org/pypy/pypy/changeset/c0dcb6566060/
    
    Log:	hg merge default
    
    diff --git a/rpython/flowspace/flowcontext.py b/rpython/flowspace/flowcontext.py
    --- a/rpython/flowspace/flowcontext.py
    +++ b/rpython/flowspace/flowcontext.py
    @@ -566,11 +566,12 @@
             if not isinstance(w_check_class, Constant):
                 raise FlowingError("Non-constant except guard.")
             check_class = w_check_class.value
    -        if check_class in (NotImplementedError, AssertionError):
    -            raise FlowingError(
    -                "Catching %s is not valid in RPython" % check_class.__name__)
             if not isinstance(check_class, tuple):
                 # the simple case
    +            if issubclass(check_class, (NotImplementedError, AssertionError)):
    +                raise FlowingError(
    +                    "Catching %s is not valid in RPython" %
    +                    check_class.__name__)
                 return self.guessbool(op.issubtype(w_exc_type, w_check_class).eval(self))
             # special case for StackOverflow (see rlib/rstackovf.py)
             if check_class == rstackovf.StackOverflow:
    diff --git a/rpython/flowspace/test/test_objspace.py b/rpython/flowspace/test/test_objspace.py
    --- a/rpython/flowspace/test/test_objspace.py
    +++ b/rpython/flowspace/test/test_objspace.py
    @@ -1135,6 +1135,23 @@
                     pass
             py.test.raises(FlowingError, "self.codetest(f)")
     
    +    def test_cannot_catch_special_exceptions_2(self):
    +        class MyNIE(NotImplementedError):
    +            pass
    +        def f():
    +            try:
    +                f()
    +            except MyNIE:
    +                pass
    +        py.test.raises(FlowingError, "self.codetest(f)")
    +        #
    +        def f():
    +            try:
    +                f()
    +            except (ValueError, MyNIE):
    +                pass
    +        py.test.raises(FlowingError, "self.codetest(f)")
    +
         def test_locals_dict(self):
             def f():
                 x = 5
    diff --git a/rpython/rlib/rawstorage.py b/rpython/rlib/rawstorage.py
    --- a/rpython/rlib/rawstorage.py
    +++ b/rpython/rlib/rawstorage.py
    @@ -55,7 +55,7 @@
         misaligned_is_fine = False
     
     
    -class AlignmentError(NotImplementedError):
    +class AlignmentError(Exception):
         "Means that raw_storage_{get,set}item was used on unaligned memory"
     
     # Tweak?  It seems a reasonable value for any system out there: requiring
    
    From pypy.commits at gmail.com  Sat Aug 31 10:19:20 2019
    From: pypy.commits at gmail.com (arigo)
    Date: Sat, 31 Aug 2019 07:19:20 -0700 (PDT)
    Subject: [pypy-commit] pypy default: improve error message
    Message-ID: <5d6a81e8.1c69fb81.231ff.2e5b@mx.google.com>
    
    Author: Armin Rigo 
    Branch: 
    Changeset: r97351:ee34ae9a1ae5
    Date: 2019-08-31 16:18 +0200
    http://bitbucket.org/pypy/pypy/changeset/ee34ae9a1ae5/
    
    Log:	improve error message
    
    diff --git a/rpython/flowspace/flowcontext.py b/rpython/flowspace/flowcontext.py
    --- a/rpython/flowspace/flowcontext.py
    +++ b/rpython/flowspace/flowcontext.py
    @@ -570,8 +570,8 @@
                 # the simple case
                 if issubclass(check_class, (NotImplementedError, AssertionError)):
                     raise FlowingError(
    -                    "Catching %s is not valid in RPython" %
    -                    check_class.__name__)
    +                    "Catching NotImplementedError, AssertionError, or a "
    +                    "subclass is not valid in RPython (%r)" % (check_class,))
                 return self.guessbool(op.issubtype(w_exc_type, w_check_class).eval(self))
             # special case for StackOverflow (see rlib/rstackovf.py)
             if check_class == rstackovf.StackOverflow:
    
    From pypy.commits at gmail.com  Sat Aug 31 12:25:14 2019
    From: pypy.commits at gmail.com (stevie_92)
    Date: Sat, 31 Aug 2019 09:25:14 -0700 (PDT)
    Subject: [pypy-commit] pypy cpyext-gc-cycle: Added consistency check in case
     rrc graph changed between gc-iterations
    Message-ID: <5d6a9f6a.1c69fb81.3e9cc.b57f@mx.google.com>
    
    Author: Stefan Beyer 
    Branch: cpyext-gc-cycle
    Changeset: r97352:af16e13f7cbf
    Date: 2019-08-31 18:24 +0200
    http://bitbucket.org/pypy/pypy/changeset/af16e13f7cbf/
    
    Log:	Added consistency check in case rrc graph changed between gc-
    	iterations
    
    diff --git a/rpython/memory/gc/rrc/base.py b/rpython/memory/gc/rrc/base.py
    --- a/rpython/memory/gc/rrc/base.py
    +++ b/rpython/memory/gc/rrc/base.py
    @@ -41,6 +41,7 @@
         PYOBJ_SNAPSHOT_OBJ = lltype.Struct('PyObject_Snapshot',
                                            ('pyobj', llmemory.Address),
                                            ('status', lltype.Signed),
    +                                       ('refcnt', lltype.Signed),
                                            ('refcnt_external', lltype.Signed),
                                            ('refs_index', lltype.Signed),
                                            ('refs_len', lltype.Signed),
    @@ -629,6 +630,11 @@
             gchdr.c_gc_next = next
             next.c_gc_prev = gchdr
     
    +    def _gc_list_remove(self, gchdr):
    +        next = gchdr.c_gc_next
    +        next.c_gc_prev = gchdr.c_gc_prev
    +        gchdr.c_gc_prev.c_gc_next = next
    +
         def _gc_list_pop(self, pygclist):
             ret = pygclist.c_gc_next
             pygclist.c_gc_next = ret.c_gc_next
    diff --git a/rpython/memory/gc/rrc/incmark.py b/rpython/memory/gc/rrc/incmark.py
    --- a/rpython/memory/gc/rrc/incmark.py
    +++ b/rpython/memory/gc/rrc/incmark.py
    @@ -63,26 +63,59 @@
             ll_assert(self.state == self.STATE_GARBAGE_MARKING, "invalid state")
     
             # sync snapshot with pyob_list:
    -        #  * move all dead objs still in pyob_list to pyobj_old_list
    +        #  * check the consistency of "dead" objects and keep all of them
    +        #    alive, in case an inconsistency is found (the graph changed
    +        #    between two pauses, so some of those objects might be alive)
    +        #  * move all dead objects still in pyob_list to pyobj_old_list
             #  * for all other objects (in snapshot and new),
    -        #    set their cyclic refcount to > 0, to mark them as live
    +        #    set their cyclic refcount to > 0 to mark them as live
             pygchdr = self.pyobj_list.c_gc_next
    +        consistent = True
    +        self.snapshot_consistent = True
             while pygchdr <> self.pyobj_list:
                 next_old = pygchdr.c_gc_next
    -            if pygchdr.c_gc_refs > 0:
    +            if pygchdr.c_gc_refs > 0: # object is in snapshot
                     snapobj = self.snapshot_objs[pygchdr.c_gc_refs - 1]
                     pygchdr.c_gc_refs = snapobj.refcnt_external
    -                if snapobj.refcnt_external == 0:
    -                    # remove from old list
    -                    next = pygchdr.c_gc_next
    -                    next.c_gc_prev = pygchdr.c_gc_prev
    -                    pygchdr.c_gc_prev.c_gc_next = next
    -                    # add to new list (or not, if it is a tuple)
    +                if snapobj.refcnt_external == 0: # object considered dead
    +                    # check consistency (dead subgraphs can never change):
    +                    pyobj = self.gc_as_pyobj(pygchdr)
    +                    # refcount equal
    +                    consistent = snapobj.refcnt == pyobj.c_ob_refcnt
    +                    if not consistent:
    +                        break
    +                    # outgoing (internal) references equal
    +                    self.snapshot_curr = snapobj
    +                    self.snapshot_curr_index = 0
    +                    self._check_snapshot_traverse(pyobj)
    +                    consistent = self.snapshot_consistent
    +                    if not consistent:
    +                        break
    +                    # consistent -> prepare object for collection
    +                    self._gc_list_remove(pygchdr)
                         self._gc_list_add(self.pyobj_old_list, pygchdr)
                 else:
                     pygchdr.c_gc_refs = 1 # new object, keep alive
                 pygchdr = next_old
     
    +        self._debug_check_consistency(print_label="end-check-consistency")
    +
    +        if not consistent:  # keep all objects alive
    +            while pygchdr <> self.pyobj_list: # continue previous loop
    +                pygchdr.c_gc_refs = 1
    +                pygchdr = pygchdr.c_gc_next
    +            pygchdr = self.pyobj_old_list.c_gc_next
    +            while pygchdr <> self.pyobj_old_list: # resurrect "dead" objects
    +                pygchdr.c_gc_refs = 1
    +                pygchdr = pygchdr.c_gc_next
    +            if not self._gc_list_is_empty(self.pyobj_old_list):
    +                self._gc_list_merge(self.pyobj_old_list, self.pyobj_list)
    +
    +        self._debug_check_consistency(print_label="before-snap-discard")
    +
    +        # now the snapshot is not needed any more, discard it
    +        self._discard_snapshot()
    +
             # handle legacy finalizers (assumption: not a lot of legacy finalizers,
             # so no need to do it incrementally)
             if self._find_garbage(False):
    @@ -93,8 +126,7 @@
             # handle modern finalizers
             found_finalizer = self._find_finalizer()
             if found_finalizer:
    -            self._gc_list_move(self.pyobj_old_list,
    -                               self.pyobj_isolate_list)
    +            self._gc_list_move(self.pyobj_old_list, self.pyobj_isolate_list)
             use_cylicrc = not found_finalizer
     
             # now mark all pypy objects at the border, depending on the results
    @@ -102,7 +134,6 @@
             debug_print("use_cylicrc", use_cylicrc)
             self.p_list_old.foreach(self._major_trace, (use_cylicrc, False))
             self._debug_check_consistency(print_label="end-mark")
    -        self._discard_snapshot()
             return True
     
         def _collect_roots(self, pygclist):
    @@ -134,7 +165,7 @@
                     obj = self.snapshot_objs[i]
                     found_alive |= self._mark_rawrefcount_obj(obj)
                 simple_limit += 1
    -            if simple_limit > 3:
    +            if simple_limit > 3: # TODO: implement sane limit
                     reached_limit
             return not reached_limit # are there any objects left?
     
    @@ -210,6 +241,7 @@
                 obj = self.snapshot_objs[objs_index]
                 obj.pyobj = llmemory.cast_ptr_to_adr(pyobj)
                 obj.status = 1
    +            obj.refcnt = pyobj.c_ob_refcnt
                 obj.refcnt_external = refcnt
                 obj.refs_index = refs_index
                 obj.refs_len = 0
    @@ -255,6 +287,48 @@
             else:
                 self.tp_traverse(pyobj, self._take_snapshot_visit_action, None)
     
    +    def _check_snapshot_visit(pyobj, self_ptr):
    +        from rpython.rtyper.annlowlevel import cast_adr_to_nongc_instance
    +        #
    +        self_adr = rffi.cast(llmemory.Address, self_ptr)
    +        self = cast_adr_to_nongc_instance(RawRefCountIncMarkGC, self_adr)
    +        self._check_snapshot_visit_action(pyobj, None)
    +        return rffi.cast(rffi.INT_real, 0)
    +
    +    def _check_snapshot_visit_action(self, pyobj, ignore):
    +        pygchdr = self.pyobj_as_gc(pyobj)
    +        if pygchdr <> lltype.nullptr(self.PYOBJ_GC_HDR) and \
    +                pygchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED:
    +            # check consistency with snapshot
    +            curr = self.snapshot_curr
    +            curr_index = self.snapshot_curr_index
    +            if curr_index < curr.refs_len:
    +                # ref changed? -> issue, if traversal order is not stable!!!
    +                index = curr.refs_index + curr_index
    +                ref_addr = self.snapshot_refs[index]
    +                ref = llmemory.cast_adr_to_ptr(ref_addr,
    +                                               self.PYOBJ_SNAPSHOT_OBJ_PTR)
    +                old_value = ref.pyobj
    +                new_value = llmemory.cast_ptr_to_adr(pyobj)
    +                if old_value != new_value:
    +                    self.snapshot_consistent = False # reference changed
    +            else:
    +                self.snapshot_consistent = False # references added
    +            self.snapshot_curr_index += 1
    +
    +    def _check_snapshot_traverse(self, pyobj):
    +        from rpython.rlib.objectmodel import we_are_translated
    +        from rpython.rtyper.annlowlevel import (cast_nongc_instance_to_adr,
    +                                                llhelper)
    +        #
    +        if we_are_translated():
    +            callback_ptr = llhelper(self.RAWREFCOUNT_VISIT,
    +                                    RawRefCountIncMarkGC._check_snapshot_visit)
    +            self_ptr = rffi.cast(rffi.VOIDP, cast_nongc_instance_to_adr(self))
    +            self.tp_traverse(pyobj, callback_ptr, self_ptr)
    +        else:
    +            self.tp_traverse(pyobj, self._check_snapshot_visit_action, None)
    +
         def _discard_snapshot(self):
             lltype.free(self.snapshot_objs, flavor='raw', track_allocation=False)
             lltype.free(self.snapshot_refs, flavor='raw', track_allocation=False)
    \ No newline at end of file
    
    From pypy.commits at gmail.com  Sat Aug 31 17:09:52 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 14:09:52 -0700 (PDT)
    Subject: [pypy-commit] pypy default: Prevent segfault when slicing array
     with a large step size
    Message-ID: <5d6ae220.1c69fb81.11037.ae42@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: 
    Changeset: r97353:be2a55c81f26
    Date: 2019-08-31 22:05 +0100
    http://bitbucket.org/pypy/pypy/changeset/be2a55c81f26/
    
    Log:	Prevent segfault when slicing array with a large step size
    
    	Note: the segfault is caused by UB in the generated C code for
    	range(start, stop, step), and thus only appears after translation.
    
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -1122,12 +1122,12 @@
                 w_a = mytype.w_class(self.space)
                 w_a.setlen(size, overallocate=False)
                 assert step != 0
    -            j = 0
                 buf = w_a.get_buffer()
                 srcbuf = self.get_buffer()
    -            for i in range(start, stop, step):
    +            i = start
    +            for j in range(size):
                     buf[j] = srcbuf[i]
    -                j += 1
    +                i += step
                 keepalive_until_here(self)
                 keepalive_until_here(w_a)
                 return w_a
    @@ -1159,12 +1159,12 @@
                         self.setlen(0)
                         self.fromsequence(w_lst)
                 else:
    -                j = 0
                     buf = self.get_buffer()
                     srcbuf = w_item.get_buffer()
    -                for i in range(start, stop, step):
    +                i = start
    +                for j in range(size):
                         buf[i] = srcbuf[j]
    -                    j += 1
    +                    i += step
                     keepalive_until_here(w_item)
                     keepalive_until_here(self)
     
    diff --git a/pypy/module/array/test/test_array.py b/pypy/module/array/test/test_array.py
    --- a/pypy/module/array/test/test_array.py
    +++ b/pypy/module/array/test/test_array.py
    @@ -372,6 +372,17 @@
                             except ValueError:
                                 assert not ok
     
    +    def test_getslice_large_step(self):
    +        import sys
    +        a = self.array('b', [1, 2, 3])
    +        assert list(a[1::sys.maxsize]) == [2]
    +
    +    def test_setslice_large_step(self):
    +        import sys
    +        a = self.array('b', [1, 2, 3])
    +        a[1::sys.maxsize] = self.array('b', [42])
    +        assert a.tolist() == [1, 42, 3]
    +
         def test_toxxx(self):
             a = self.array('i', [1, 2, 3])
             l = a.tolist()
    
    From pypy.commits at gmail.com  Sat Aug 31 17:12:33 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 14:12:33 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.6: hg merge default
    Message-ID: <5d6ae2c1.1c69fb81.57737.945b@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.6
    Changeset: r97354:013d6f142414
    Date: 2019-08-31 22:10 +0100
    http://bitbucket.org/pypy/pypy/changeset/013d6f142414/
    
    Log:	hg merge default
    
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -1222,12 +1222,12 @@
                 w_a = mytype.w_class(self.space)
                 w_a.setlen(size, overallocate=False)
                 assert step != 0
    -            j = 0
                 buf = w_a.get_buffer()
                 srcbuf = self.get_buffer()
    -            for i in range(start, stop, step):
    +            i = start
    +            for j in range(size):
                     buf[j] = srcbuf[i]
    -                j += 1
    +                i += step
                 keepalive_until_here(self)
                 keepalive_until_here(w_a)
                 return w_a
    @@ -1259,12 +1259,12 @@
                         self.setlen(0)
                         self.fromsequence(w_lst)
                 else:
    -                j = 0
                     buf = self.get_buffer()
                     srcbuf = w_item.get_buffer()
    -                for i in range(start, stop, step):
    +                i = start
    +                for j in range(size):
                         buf[i] = srcbuf[j]
    -                    j += 1
    +                    i += step
                     keepalive_until_here(w_item)
                     keepalive_until_here(self)
     
    diff --git a/pypy/module/array/test/test_array.py b/pypy/module/array/test/test_array.py
    --- a/pypy/module/array/test/test_array.py
    +++ b/pypy/module/array/test/test_array.py
    @@ -400,6 +400,17 @@
                             except ValueError:
                                 assert not ok
     
    +    def test_getslice_large_step(self):
    +        import sys
    +        a = self.array('b', [1, 2, 3])
    +        assert list(a[1::sys.maxsize]) == [2]
    +
    +    def test_setslice_large_step(self):
    +        import sys
    +        a = self.array('b', [1, 2, 3])
    +        a[1::sys.maxsize] = self.array('b', [42])
    +        assert a.tolist() == [1, 42, 3]
    +
         def test_toxxx(self):
             a = self.array('i', [1, 2, 3])
             l = a.tolist()
    diff --git a/rpython/flowspace/flowcontext.py b/rpython/flowspace/flowcontext.py
    --- a/rpython/flowspace/flowcontext.py
    +++ b/rpython/flowspace/flowcontext.py
    @@ -570,8 +570,8 @@
                 # the simple case
                 if issubclass(check_class, (NotImplementedError, AssertionError)):
                     raise FlowingError(
    -                    "Catching %s is not valid in RPython" %
    -                    check_class.__name__)
    +                    "Catching NotImplementedError, AssertionError, or a "
    +                    "subclass is not valid in RPython (%r)" % (check_class,))
                 return self.guessbool(op.issubtype(w_exc_type, w_check_class).eval(self))
             # special case for StackOverflow (see rlib/rstackovf.py)
             if check_class == rstackovf.StackOverflow:
    
    From pypy.commits at gmail.com  Sat Aug 31 17:12:35 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 14:12:35 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: hg merge py3.6
    Message-ID: <5d6ae2c3.1c69fb81.11037.ae87@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97355:24c3e187ed91
    Date: 2019-08-31 22:11 +0100
    http://bitbucket.org/pypy/pypy/changeset/24c3e187ed91/
    
    Log:	hg merge py3.6
    
    diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
    --- a/pypy/module/array/interp_array.py
    +++ b/pypy/module/array/interp_array.py
    @@ -1222,12 +1222,12 @@
                 w_a = mytype.w_class(self.space)
                 w_a.setlen(size, overallocate=False)
                 assert step != 0
    -            j = 0
                 buf = w_a.get_buffer()
                 srcbuf = self.get_buffer()
    -            for i in range(start, stop, step):
    +            i = start
    +            for j in range(size):
                     buf[j] = srcbuf[i]
    -                j += 1
    +                i += step
                 keepalive_until_here(self)
                 keepalive_until_here(w_a)
                 return w_a
    @@ -1259,12 +1259,12 @@
                         self.setlen(0)
                         self.fromsequence(w_lst)
                 else:
    -                j = 0
                     buf = self.get_buffer()
                     srcbuf = w_item.get_buffer()
    -                for i in range(start, stop, step):
    +                i = start
    +                for j in range(size):
                         buf[i] = srcbuf[j]
    -                    j += 1
    +                    i += step
                     keepalive_until_here(w_item)
                     keepalive_until_here(self)
     
    diff --git a/pypy/module/array/test/test_array.py b/pypy/module/array/test/test_array.py
    --- a/pypy/module/array/test/test_array.py
    +++ b/pypy/module/array/test/test_array.py
    @@ -400,6 +400,17 @@
                             except ValueError:
                                 assert not ok
     
    +    def test_getslice_large_step(self):
    +        import sys
    +        a = self.array('b', [1, 2, 3])
    +        assert list(a[1::sys.maxsize]) == [2]
    +
    +    def test_setslice_large_step(self):
    +        import sys
    +        a = self.array('b', [1, 2, 3])
    +        a[1::sys.maxsize] = self.array('b', [42])
    +        assert a.tolist() == [1, 42, 3]
    +
         def test_toxxx(self):
             a = self.array('i', [1, 2, 3])
             l = a.tolist()
    diff --git a/rpython/flowspace/flowcontext.py b/rpython/flowspace/flowcontext.py
    --- a/rpython/flowspace/flowcontext.py
    +++ b/rpython/flowspace/flowcontext.py
    @@ -566,11 +566,12 @@
             if not isinstance(w_check_class, Constant):
                 raise FlowingError("Non-constant except guard.")
             check_class = w_check_class.value
    -        if check_class in (NotImplementedError, AssertionError):
    -            raise FlowingError(
    -                "Catching %s is not valid in RPython" % check_class.__name__)
             if not isinstance(check_class, tuple):
                 # the simple case
    +            if issubclass(check_class, (NotImplementedError, AssertionError)):
    +                raise FlowingError(
    +                    "Catching NotImplementedError, AssertionError, or a "
    +                    "subclass is not valid in RPython (%r)" % (check_class,))
                 return self.guessbool(op.issubtype(w_exc_type, w_check_class).eval(self))
             # special case for StackOverflow (see rlib/rstackovf.py)
             if check_class == rstackovf.StackOverflow:
    diff --git a/rpython/flowspace/test/test_objspace.py b/rpython/flowspace/test/test_objspace.py
    --- a/rpython/flowspace/test/test_objspace.py
    +++ b/rpython/flowspace/test/test_objspace.py
    @@ -1135,6 +1135,23 @@
                     pass
             py.test.raises(FlowingError, "self.codetest(f)")
     
    +    def test_cannot_catch_special_exceptions_2(self):
    +        class MyNIE(NotImplementedError):
    +            pass
    +        def f():
    +            try:
    +                f()
    +            except MyNIE:
    +                pass
    +        py.test.raises(FlowingError, "self.codetest(f)")
    +        #
    +        def f():
    +            try:
    +                f()
    +            except (ValueError, MyNIE):
    +                pass
    +        py.test.raises(FlowingError, "self.codetest(f)")
    +
         def test_locals_dict(self):
             def f():
                 x = 5
    diff --git a/rpython/rlib/rawstorage.py b/rpython/rlib/rawstorage.py
    --- a/rpython/rlib/rawstorage.py
    +++ b/rpython/rlib/rawstorage.py
    @@ -55,7 +55,7 @@
         misaligned_is_fine = False
     
     
    -class AlignmentError(NotImplementedError):
    +class AlignmentError(Exception):
         "Means that raw_storage_{get,set}item was used on unaligned memory"
     
     # Tweak?  It seems a reasonable value for any system out there: requiring
    
    From pypy.commits at gmail.com  Sat Aug 31 18:38:00 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 15:38:00 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: update lib-python/conftest.py
    Message-ID: <5d6af6c8.1c69fb81.7eb6d.1341@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97356:84bf78fa6c47
    Date: 2019-08-31 23:36 +0100
    http://bitbucket.org/pypy/pypy/changeset/84bf78fa6c47/
    
    Log:	update lib-python/conftest.py
    
    diff --git a/lib-python/conftest.py b/lib-python/conftest.py
    --- a/lib-python/conftest.py
    +++ b/lib-python/conftest.py
    @@ -109,10 +109,10 @@
         RegrTest('test_array.py', core=True, usemodules='struct array binascii'),
         RegrTest('test_asdl_parser.py'),
         RegrTest('test_ast.py', core=True, usemodules='struct'),
    +    RegrTest('test_asyncgen.py'),
         RegrTest('test_asynchat.py', usemodules='select fcntl'),
         RegrTest('test_asyncio'),
         RegrTest('test_asyncore.py', usemodules='select fcntl'),
    -    RegrTest('test_asyncgen.py'),
         RegrTest('test_atexit.py', core=True),
         RegrTest('test_audioop.py'),
         RegrTest('test_augassign.py', core=True),
    @@ -131,6 +131,7 @@
         RegrTest('test_builtin.py', core=True, usemodules='binascii'),
         RegrTest('test_bytes.py', usemodules='struct binascii'),
         RegrTest('test_bz2.py', usemodules='bz2'),
    +    RegrTest('test_c_locale_coercion.py'),
         RegrTest('test_calendar.py'),
         RegrTest('test_call.py', core=True),
         RegrTest('test_capi.py', usemodules='cpyext'),
    @@ -138,6 +139,7 @@
         RegrTest('test_cgitb.py'),
         RegrTest('test_charmapcodec.py', core=True),
         RegrTest('test_class.py', core=True),
    +    RegrTest('test_clinic.py'),
         RegrTest('test_cmath.py', core=True),
         RegrTest('test_cmd.py'),
         RegrTest('test_cmd_line.py'),
    @@ -167,7 +169,9 @@
         RegrTest('test_concurrent_futures.py', skip="XXX: deadlocks" if sys.platform == 'win32' else False),
         RegrTest('test_configparser.py'),
         RegrTest('test_contains.py', core=True),
    +    RegrTest('test_context.py'),
         RegrTest('test_contextlib.py', usemodules="thread"),
    +    RegrTest('test_contextlib_async.py'),
         RegrTest('test_copy.py', core=True),
         RegrTest('test_copyreg.py', core=True),
         RegrTest('test_coroutines.py'),
    @@ -177,6 +181,7 @@
         RegrTest('test_csv.py', usemodules='_csv'),
         RegrTest('test_ctypes.py', usemodules="_rawffi thread cpyext"),
         RegrTest('test_curses.py'),
    +    RegrTest('test_dataclasses.py'),
         RegrTest('test_datetime.py', usemodules='binascii struct'),
         RegrTest('test_dbm.py'),
         RegrTest('test_dbm_dumb.py'),
    @@ -206,6 +211,7 @@
         RegrTest('test_dynamicclassattribute.py'),
         RegrTest('test_eintr.py'),
         RegrTest('test_email'),
    +    RegrTest('test_embed.py'),
         RegrTest('test_ensurepip.py'),
         RegrTest('test_enum.py'),
         RegrTest('test_enumerate.py', core=True),
    @@ -231,6 +237,7 @@
         RegrTest('test_format.py', core=True),
         RegrTest('test_fractions.py'),
         RegrTest('test_frame.py'),
    +    RegrTest('test_frozen.py'),
         RegrTest('test_fstring.py'),
         RegrTest('test_ftplib.py'),
         RegrTest('test_funcattrs.py', core=True),
    @@ -241,14 +248,15 @@
         RegrTest('test_future5.py', core=True),
         RegrTest('test_gc.py', usemodules='_weakref', skip="implementation detail"),
         RegrTest('test_gdb.py', skip="not applicable"),
    +    RegrTest('test_generator_stop.py'),
         RegrTest('test_generators.py', core=True, usemodules='thread _weakref'),
    +    RegrTest('test_genericclass.py'),
         RegrTest('test_genericpath.py'),
         RegrTest('test_genexps.py', core=True, usemodules='_weakref'),
         RegrTest('test_getargs2.py', usemodules='binascii', skip=True),
         RegrTest('test_getopt.py', core=True),
         RegrTest('test_getpass.py'),
         RegrTest('test_gettext.py'),
    -    RegrTest('test_generator_stop.py'),
         RegrTest('test_glob.py', core=True),
         RegrTest('test_global.py', core=True),
         RegrTest('test_grammar.py', core=True),
    @@ -267,9 +275,9 @@
         RegrTest('test_idle.py'),
         RegrTest('test_imaplib.py'),
         RegrTest('test_imghdr.py'),
    +    RegrTest('test_imp.py', core=True, usemodules='thread'),
         RegrTest('test_import'),
         RegrTest('test_importlib'),
    -    RegrTest('test_imp.py', core=True, usemodules='thread'),
         RegrTest('test_index.py'),
         RegrTest('test_inspect.py', usemodules="struct unicodedata"),
         RegrTest('test_int.py', core=True),
    @@ -296,7 +304,6 @@
         RegrTest('test_longexp.py', core=True),
         RegrTest('test_lzma.py'),
         RegrTest('test_macpath.py'),
    -    RegrTest('test_macurl2path.py'),
         RegrTest('test_mailbox.py'),
         RegrTest('test_mailcap.py'),
         RegrTest('test_marshal.py', core=True),
    @@ -445,9 +452,9 @@
         RegrTest('test_tk.py'),
         RegrTest('test_tokenize.py'),
         RegrTest('test_tools', skip="CPython internal details"),
    +    RegrTest('test_trace.py'),
         RegrTest('test_traceback.py', core=True),
         RegrTest('test_tracemalloc.py'),
    -    RegrTest('test_trace.py'),
         RegrTest('test_ttk_guionly.py'),
         RegrTest('test_ttk_textonly.py'),
         RegrTest('test_tuple.py', core=True),
    @@ -464,7 +471,6 @@
         RegrTest('test_unicodedata.py'),
         RegrTest('test_unittest.py', core=True),
         RegrTest('test_univnewlines.py'),
    -    RegrTest('test_utf8source.py'),
         RegrTest('test_unpack.py', core=True),
         RegrTest('test_unpack_ex.py', core=True),
         RegrTest('test_urllib.py'),
    @@ -477,6 +483,8 @@
         RegrTest('test_userdict.py', core=True),
         RegrTest('test_userlist.py', core=True),
         RegrTest('test_userstring.py', core=True),
    +    RegrTest('test_utf8_mode.py'),
    +    RegrTest('test_utf8source.py'),
         RegrTest('test_uu.py'),
         RegrTest('test_uuid.py'),
         RegrTest('test_venv.py', usemodules="struct"),
    @@ -498,6 +506,7 @@
         RegrTest('test_xml_etree_c.py'),
         RegrTest('test_xmlrpc.py'),
         RegrTest('test_xmlrpc_net.py'),
    +    RegrTest('test_xxtestfuzz.py', skip="CPython internal details"),
         RegrTest('test_yield_from.py'),
         RegrTest('test_zipapp.py'),
         RegrTest('test_zipfile.py'),
    
    From pypy.commits at gmail.com  Sat Aug 31 19:32:44 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 16:32:44 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: hack to get the ABCs back into collections
    Message-ID: <5d6b039c.1c69fb81.87b4f.6716@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97357:e0e7a3c09e30
    Date: 2019-09-01 00:07 +0100
    http://bitbucket.org/pypy/pypy/changeset/e0e7a3c09e30/
    
    Log:	hack to get the ABCs back into collections
    
    diff --git a/lib-python/3/collections/__init__.py b/lib-python/3/collections/__init__.py
    --- a/lib-python/3/collections/__init__.py
    +++ b/lib-python/3/collections/__init__.py
    @@ -39,6 +39,9 @@
         pass
     
     
    +# XXX: temporary PyPy hack until we implement module __getattr__
    +from _collections_abc import *
    +
     def __getattr__(name):
         # For backwards compatibility, continue to make the collections ABCs
         # through Python 3.6 available through the collections module.
    
    From pypy.commits at gmail.com  Sat Aug 31 19:32:46 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 16:32:46 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: Implement str.isascii
    Message-ID: <5d6b039e.1c69fb81.2abcd.7996@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97358:f765d10dbfa2
    Date: 2019-09-01 00:30 +0100
    http://bitbucket.org/pypy/pypy/changeset/f765d10dbfa2/
    
    Log:	Implement str.isascii
    
    diff --git a/pypy/objspace/std/test/test_unicodeobject.py b/pypy/objspace/std/test/test_unicodeobject.py
    --- a/pypy/objspace/std/test/test_unicodeobject.py
    +++ b/pypy/objspace/std/test/test_unicodeobject.py
    @@ -355,6 +355,12 @@
             # single surrogate character
             assert not "\ud800".isprintable()
     
    +    def test_isascii(self):
    +        assert "".isascii()
    +        assert "abcdefg\t".isascii()
    +        assert not "abc\u0374".isascii()
    +        assert not "\ud800abc".isascii()
    +
         @py.test.mark.skipif("not config.option.runappdirect and sys.maxunicode == 0xffff")
         def test_isprintable_wide(self):
             assert '\U0001F46F'.isprintable()  # Since unicode 6.0
    diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
    --- a/pypy/objspace/std/unicodeobject.py
    +++ b/pypy/objspace/std/unicodeobject.py
    @@ -1035,6 +1035,9 @@
         def is_ascii(self):
             return self._length == len(self._utf8)
     
    +    def descr_isascii(self, space):
    +        return space.newbool(self.is_ascii())
    +
         def _index_to_byte(self, index):
             if self.is_ascii():
                 assert index >= 0
    @@ -1506,6 +1509,13 @@
             and there is at least one character in S, False otherwise.
             """
     
    +    def isascii():
    +        """Return True if all characters in the string are ASCII, False otherwise.
    +
    +        ASCII characters have code points in the range U+0000-U+007F.
    +        Empty string is ASCII too.
    +        """
    +
         def casefold():
             """S.casefold() -> str
     
    @@ -1824,6 +1834,8 @@
                              doc=UnicodeDocstrings.isalnum.__doc__),
         isalpha = interp2app(W_UnicodeObject.descr_isalpha,
                              doc=UnicodeDocstrings.isalpha.__doc__),
    +    isascii = interp2app(W_UnicodeObject.descr_isascii,
    +                         doc=UnicodeDocstrings.isascii.__doc__),
         isdecimal = interp2app(W_UnicodeObject.descr_isdecimal,
                                doc=UnicodeDocstrings.isdecimal.__doc__),
         isdigit = interp2app(W_UnicodeObject.descr_isdigit,
    
    From pypy.commits at gmail.com  Sat Aug 31 20:08:24 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 17:08:24 -0700 (PDT)
    Subject: [pypy-commit] pypy py3.7: get _testcapi to compile again
    Message-ID: <5d6b0bf8.1c69fb81.1ff80.69bf@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: py3.7
    Changeset: r97359:72ff9d7c73e8
    Date: 2019-09-01 00:54 +0100
    http://bitbucket.org/pypy/pypy/changeset/72ff9d7c73e8/
    
    Log:	get _testcapi to compile again
    
    diff --git a/lib_pypy/_testcapimodule.c b/lib_pypy/_testcapimodule.c
    --- a/lib_pypy/_testcapimodule.c
    +++ b/lib_pypy/_testcapimodule.c
    @@ -2279,11 +2279,13 @@
     }
     
     
    +#ifndef PYPY_VERSION
     /* Makes three variations on timezone representing UTC-5:
        1. timezone with offset and name from PyDateTimeAPI
        2. timezone with offset and name from PyTimeZone_FromOffsetAndName
        3. timezone with offset (no name) from PyTimeZone_FromOffset
     */
    +
     static PyObject *
     make_timezones_capi(PyObject *self, PyObject *args) {
         PyObject *offset = PyDelta_FromDSU(0, -18000, 0);
    @@ -2342,6 +2344,7 @@
             return PyDateTimeAPI->TimeZone_UTC;
         }
     }
    +#endif /* PYPY_VERSION */
     
     
     /* test_thread_state spawns a thread of its own, and that thread releases
    @@ -2427,6 +2430,7 @@
         Py_RETURN_NONE;
     }
     
    +#ifndef PYPY_VERSION
     /* test Py_AddPendingCalls using threads */
     static int _pending_callback(void *arg)
     {
    @@ -2463,8 +2467,6 @@
         Py_RETURN_TRUE;
     }
     
    -#ifndef PYPY_VERSION
    -
     /* Some tests of PyUnicode_FromFormat().  This needs more tests. */
     static PyObject *
     test_string_from_format(PyObject *self, PyObject *Py_UNUSED(ignored))
    @@ -4574,6 +4576,7 @@
     }
     
     
    +#ifndef PYPY_VERSION
     static PyObject *
     test_pythread_tss_key_state(PyObject *self, PyObject *args)
     {
    @@ -4659,6 +4662,7 @@
         const _PyMainInterpreterConfig *config = &interp->config;
         return _PyMainInterpreterConfig_AsDict(config);
     }
    +#endif  /* PYPY_VERSION */
     
     
     static PyMethodDef TestMethods[] = {
    @@ -4673,9 +4677,11 @@
         {"datetime_check_datetime",     datetime_check_datetime,     METH_VARARGS},
         {"datetime_check_delta",     datetime_check_delta,           METH_VARARGS},
         {"datetime_check_tzinfo",     datetime_check_tzinfo,         METH_VARARGS},
    +#ifndef PYPY_VERSION
         {"make_timezones_capi",     make_timezones_capi,             METH_NOARGS},
         {"get_timezones_offset_zero",   get_timezones_offset_zero,   METH_NOARGS},
         {"get_timezone_utc_capi",    get_timezone_utc_capi,            METH_VARARGS},
    +#endif
         {"test_list_api",           (PyCFunction)test_list_api,      METH_NOARGS},
         {"test_dict_iteration",     (PyCFunction)test_dict_iteration,METH_NOARGS},
     #ifndef PYPY_VERSION
    @@ -4785,7 +4791,9 @@
         {"unicode_transformdecimaltoascii", unicode_transformdecimaltoascii, METH_VARARGS},
         {"unicode_legacy_string",   unicode_legacy_string,           METH_VARARGS},
         {"_test_thread_state",      test_thread_state,               METH_VARARGS},
    +#ifndef PYPY_VERSION
         {"_pending_threadfunc",     pending_threadfunc,              METH_VARARGS},
    +#endif
     #ifdef HAVE_GETTIMEOFDAY
         {"profile_int",             profile_int,                     METH_NOARGS},
     #endif
    @@ -4905,11 +4913,13 @@
         {"get_mapping_keys", get_mapping_keys, METH_O},
         {"get_mapping_values", get_mapping_values, METH_O},
         {"get_mapping_items", get_mapping_items, METH_O},
    +#ifndef PYPY_VERSION
         {"test_pythread_tss_key_state", test_pythread_tss_key_state, METH_VARARGS},
         {"hamt", new_hamt, METH_NOARGS},
         {"get_global_config", get_global_config, METH_NOARGS},
         {"get_core_config", get_core_config, METH_NOARGS},
         {"get_main_config", get_main_config, METH_NOARGS},
    +#endif /* PYPY_VERSION */
         {NULL, NULL} /* sentinel */
     };
     
    
    From pypy.commits at gmail.com  Sat Aug 31 20:56:15 2019
    From: pypy.commits at gmail.com (rlamy)
    Date: Sat, 31 Aug 2019 17:56:15 -0700 (PDT)
    Subject: [pypy-commit] buildbot default: aarch64 shouldn't use the same lock
     as bencher4!
    Message-ID: <5d6b172f.1c69fb81.39fb0.7d58@mx.google.com>
    
    Author: Ronan Lamy 
    Branch: 
    Changeset: r1095:50ceac4fc422
    Date: 2019-09-01 01:55 +0100
    http://bitbucket.org/pypy/buildbot/changeset/50ceac4fc422/
    
    Log:	aarch64 shouldn't use the same lock as bencher4!
    
    diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py
    --- a/bot2/pypybuildbot/master.py
    +++ b/bot2/pypybuildbot/master.py
    @@ -70,7 +70,7 @@
     WinSlaveLock = pypybuilds.WinSlaveLock
     #SpeedOldLock = pypybuilds.SpeedOldLock
     Bencher4Lock = pypybuilds.Bencher4Lock
    -AARCH64Lock = pypybuilds.Bencher4Lock
    +AARCH64Lock = pypybuilds.AARCH64Lock
     
     pypyOwnTestFactory = pypybuilds.Own()
     pypyOwnTestFactoryWin = pypybuilds.Own(platform="win32")
    @@ -311,7 +311,7 @@
                 JITLINUX64,                # on bencher4, uses 1 core
                 JITAARCH64,
                 JITLINUX_S390X,
    -            #APPLVLLINUX32,            
    +            #APPLVLLINUX32,
                 #APPLVLLINUX64,             # on bencher4, uses 1 core
                 # other platforms
                 #MACOSX32,                 # on minime
    @@ -329,7 +329,7 @@
             Nightly("nightly-0-01", [
                 LINUX32RPYTHON,            # on benchermarker_32, uses all cores
                 LINUX64RPYTHON,            # on bencher4, uses all cores
    -            AARCH64RPYTHON,            
    +            AARCH64RPYTHON,
                 WIN32RPYTHON,              # on SalsaSalsa
                 LINUX_S390XRPYTHON,
                 ], branch='default', hour=0, minute=0, onlyIfChanged=True,
    @@ -363,7 +363,7 @@
                 LINUX64OWN,                # on bencher4, uses all cores
                 AARCH64OWN,
                 JITLINUX64,                # on bencher4, uses 1 core
    -            JITAARCH64,                
    +            JITAARCH64,
                 JITMACOSX64,               # on xerxes
                 JITWIN32,                  # on SalsaSalsa
                 ], branch="py3.6", hour=3, minute=0,