[pypy-commit] pypy default: clean out lib-python/3*

mattip pypy.commits at gmail.com
Wed Aug 21 13:37:20 EDT 2019


Author: Matti Picus <matti.picus at gmail.com>
Branch: 
Changeset: r97235:b75bc7eac637
Date: 2019-08-21 20:11 +0300
http://bitbucket.org/pypy/pypy/changeset/b75bc7eac637/

Log:	clean out lib-python/3*

diff too long, truncating to 2000 out of 5659 lines

diff --git a/lib-python/3.2/test/test_tools.py b/lib-python/3.2/test/test_tools.py
deleted file mode 100644
--- a/lib-python/3.2/test/test_tools.py
+++ /dev/null
@@ -1,433 +0,0 @@
-"""Tests for scripts in the Tools directory.
-
-This file contains regression tests for some of the scripts found in the
-Tools directory of a Python checkout or tarball, such as reindent.py.
-"""
-
-import os
-import sys
-import imp
-import unittest
-import shutil
-import subprocess
-import sysconfig
-import tempfile
-import textwrap
-from test import support
-from test.script_helper import assert_python_ok, temp_dir
-
-if not sysconfig.is_python_build():
-    # XXX some installers do contain the tools, should we detect that
-    # and run the tests in that case too?
-    raise unittest.SkipTest('test irrelevant for an installed Python')
-
-basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
-                        'Tools')
-scriptsdir = os.path.join(basepath, 'scripts')
-
-
-class ReindentTests(unittest.TestCase):
-    script = os.path.join(scriptsdir, 'reindent.py')
-
-    def test_noargs(self):
-        assert_python_ok(self.script)
-
-    def test_help(self):
-        rc, out, err = assert_python_ok(self.script, '-h')
-        self.assertEqual(out, b'')
-        self.assertGreater(err, b'')
-
-
-class PindentTests(unittest.TestCase):
-    script = os.path.join(scriptsdir, 'pindent.py')
-
-    def assertFileEqual(self, fn1, fn2):
-        with open(fn1) as f1, open(fn2) as f2:
-            self.assertEqual(f1.readlines(), f2.readlines())
-
-    def pindent(self, source, *args):
-        with subprocess.Popen(
-                (sys.executable, self.script) + args,
-                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                universal_newlines=True) as proc:
-            out, err = proc.communicate(source)
-        self.assertIsNone(err)
-        return out
-
-    def lstriplines(self, data):
-        return '\n'.join(line.lstrip() for line in data.splitlines()) + '\n'
-
-    def test_selftest(self):
-        self.maxDiff = None
-        with temp_dir() as directory:
-            data_path = os.path.join(directory, '_test.py')
-            with open(self.script) as f:
-                closed = f.read()
-            with open(data_path, 'w') as f:
-                f.write(closed)
-
-            rc, out, err = assert_python_ok(self.script, '-d', data_path)
-            self.assertEqual(out, b'')
-            self.assertEqual(err, b'')
-            backup = data_path + '~'
-            self.assertTrue(os.path.exists(backup))
-            with open(backup) as f:
-                self.assertEqual(f.read(), closed)
-            with open(data_path) as f:
-                clean = f.read()
-            compile(clean, '_test.py', 'exec')
-            self.assertEqual(self.pindent(clean, '-c'), closed)
-            self.assertEqual(self.pindent(closed, '-d'), clean)
-
-            rc, out, err = assert_python_ok(self.script, '-c', data_path)
-            self.assertEqual(out, b'')
-            self.assertEqual(err, b'')
-            with open(backup) as f:
-                self.assertEqual(f.read(), clean)
-            with open(data_path) as f:
-                self.assertEqual(f.read(), closed)
-
-            broken = self.lstriplines(closed)
-            with open(data_path, 'w') as f:
-                f.write(broken)
-            rc, out, err = assert_python_ok(self.script, '-r', data_path)
-            self.assertEqual(out, b'')
-            self.assertEqual(err, b'')
-            with open(backup) as f:
-                self.assertEqual(f.read(), broken)
-            with open(data_path) as f:
-                indented = f.read()
-            compile(indented, '_test.py', 'exec')
-            self.assertEqual(self.pindent(broken, '-r'), indented)
-
-    def pindent_test(self, clean, closed):
-        self.assertEqual(self.pindent(clean, '-c'), closed)
-        self.assertEqual(self.pindent(closed, '-d'), clean)
-        broken = self.lstriplines(closed)
-        self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '4'), closed)
-
-    def test_statements(self):
-        clean = textwrap.dedent("""\
-            if a:
-                pass
-
-            if a:
-                pass
-            else:
-                pass
-
-            if a:
-                pass
-            elif:
-                pass
-            else:
-                pass
-
-            while a:
-                break
-
-            while a:
-                break
-            else:
-                pass
-
-            for i in a:
-                break
-
-            for i in a:
-                break
-            else:
-                pass
-
-            try:
-                pass
-            finally:
-                pass
-
-            try:
-                pass
-            except TypeError:
-                pass
-            except ValueError:
-                pass
-            else:
-                pass
-
-            try:
-                pass
-            except TypeError:
-                pass
-            except ValueError:
-                pass
-            finally:
-                pass
-
-            with a:
-                pass
-
-            class A:
-                pass
-
-            def f():
-                pass
-            """)
-
-        closed = textwrap.dedent("""\
-            if a:
-                pass
-            # end if
-
-            if a:
-                pass
-            else:
-                pass
-            # end if
-
-            if a:
-                pass
-            elif:
-                pass
-            else:
-                pass
-            # end if
-
-            while a:
-                break
-            # end while
-
-            while a:
-                break
-            else:
-                pass
-            # end while
-
-            for i in a:
-                break
-            # end for
-
-            for i in a:
-                break
-            else:
-                pass
-            # end for
-
-            try:
-                pass
-            finally:
-                pass
-            # end try
-
-            try:
-                pass
-            except TypeError:
-                pass
-            except ValueError:
-                pass
-            else:
-                pass
-            # end try
-
-            try:
-                pass
-            except TypeError:
-                pass
-            except ValueError:
-                pass
-            finally:
-                pass
-            # end try
-
-            with a:
-                pass
-            # end with
-
-            class A:
-                pass
-            # end class A
-
-            def f():
-                pass
-            # end def f
-            """)
-        self.pindent_test(clean, closed)
-
-    def test_multilevel(self):
-        clean = textwrap.dedent("""\
-            def foobar(a, b):
-                if a == b:
-                    a = a+1
-                elif a < b:
-                    b = b-1
-                    if b > a: a = a-1
-                else:
-                    print 'oops!'
-            """)
-        closed = textwrap.dedent("""\
-            def foobar(a, b):
-                if a == b:
-                    a = a+1
-                elif a < b:
-                    b = b-1
-                    if b > a: a = a-1
-                    # end if
-                else:
-                    print 'oops!'
-                # end if
-            # end def foobar
-            """)
-        self.pindent_test(clean, closed)
-
-    def test_preserve_indents(self):
-        clean = textwrap.dedent("""\
-            if a:
-                     if b:
-                              pass
-            """)
-        closed = textwrap.dedent("""\
-            if a:
-                     if b:
-                              pass
-                     # end if
-            # end if
-            """)
-        self.assertEqual(self.pindent(clean, '-c'), closed)
-        self.assertEqual(self.pindent(closed, '-d'), clean)
-        broken = self.lstriplines(closed)
-        self.assertEqual(self.pindent(broken, '-r', '-e', '-s', '9'), closed)
-        clean = textwrap.dedent("""\
-            if a:
-            \tif b:
-            \t\tpass
-            """)
-        closed = textwrap.dedent("""\
-            if a:
-            \tif b:
-            \t\tpass
-            \t# end if
-            # end if
-            """)
-        self.assertEqual(self.pindent(clean, '-c'), closed)
-        self.assertEqual(self.pindent(closed, '-d'), clean)
-        broken = self.lstriplines(closed)
-        self.assertEqual(self.pindent(broken, '-r'), closed)
-
-    def test_escaped_newline(self):
-        clean = textwrap.dedent("""\
-            class\\
-            \\
-             A:
-               def\
-            \\
-            f:
-                  pass
-            """)
-        closed = textwrap.dedent("""\
-            class\\
-            \\
-             A:
-               def\
-            \\
-            f:
-                  pass
-               # end def f
-            # end class A
-            """)
-        self.assertEqual(self.pindent(clean, '-c'), closed)
-        self.assertEqual(self.pindent(closed, '-d'), clean)
-
-    def test_empty_line(self):
-        clean = textwrap.dedent("""\
-            if a:
-
-                pass
-            """)
-        closed = textwrap.dedent("""\
-            if a:
-
-                pass
-            # end if
-            """)
-        self.pindent_test(clean, closed)
-
-    def test_oneline(self):
-        clean = textwrap.dedent("""\
-            if a: pass
-            """)
-        closed = textwrap.dedent("""\
-            if a: pass
-            # end if
-            """)
-        self.pindent_test(clean, closed)
-
-
-class TestSundryScripts(unittest.TestCase):
-    # At least make sure the rest don't have syntax errors.  When tests are
-    # added for a script it should be added to the whitelist below.
-
-    # scripts that have independent tests.
-    whitelist = ['reindent.py']
-    # scripts that can't be imported without running
-    blacklist = ['make_ctype.py']
-    # scripts that use windows-only modules
-    windows_only = ['win_add2path.py']
-    # blacklisted for other reasons
-    other = ['analyze_dxp.py']
-
-    skiplist = blacklist + whitelist + windows_only + other
-
-    def setUp(self):
-        cm = support.DirsOnSysPath(scriptsdir)
-        cm.__enter__()
-        self.addCleanup(cm.__exit__)
-
-    def test_sundry(self):
-        for fn in os.listdir(scriptsdir):
-            if fn.endswith('.py') and fn not in self.skiplist:
-                __import__(fn[:-3])
-
-    @unittest.skipIf(sys.platform != "win32", "Windows-only test")
-    def test_sundry_windows(self):
-        for fn in self.windows_only:
-            __import__(fn[:-3])
-
-    @unittest.skipIf(not support.threading, "test requires _thread module")
-    def test_analyze_dxp_import(self):
-        if hasattr(sys, 'getdxp'):
-            import analyze_dxp
-        else:
-            with self.assertRaises(RuntimeError):
-                import analyze_dxp
-
-
-class PdepsTests(unittest.TestCase):
-
-    @classmethod
-    def setUpClass(self):
-        path = os.path.join(scriptsdir, 'pdeps.py')
-        self.pdeps = imp.load_source('pdeps', path)
-
-    @classmethod
-    def tearDownClass(self):
-        if 'pdeps' in sys.modules:
-            del sys.modules['pdeps']
-
-    def test_process_errors(self):
-        # Issue #14492: m_import.match(line) can be None.
-        with tempfile.TemporaryDirectory() as tmpdir:
-            fn = os.path.join(tmpdir, 'foo')
-            with open(fn, 'w') as stream:
-                stream.write("#!/this/will/fail")
-            self.pdeps.process(fn, {})
-
-    def test_inverse_attribute_error(self):
-        # Issue #14492: this used to fail with an AttributeError.
-        self.pdeps.inverse({'a': []})
-
-
-def test_main():
-    support.run_unittest(*[obj for obj in globals().values()
-                               if isinstance(obj, type)])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/lib-python/3/_osx_support.py b/lib-python/3/_osx_support.py
deleted file mode 100644
--- a/lib-python/3/_osx_support.py
+++ /dev/null
@@ -1,488 +0,0 @@
-"""Shared OS X support functions."""
-
-import os
-import re
-import sys
-
-__all__ = [
-    'compiler_fixup',
-    'customize_config_vars',
-    'customize_compiler',
-    'get_platform_osx',
-]
-
-# configuration variables that may contain universal build flags,
-# like "-arch" or "-isdkroot", that may need customization for
-# the user environment
-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
-                            'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
-                            'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
-                            'PY_CORE_CFLAGS')
-
-# configuration variables that may contain compiler calls
-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
-
-# prefix added to original configuration variable names
-_INITPRE = '_OSX_SUPPORT_INITIAL_'
-
-
-def _find_executable(executable, path=None):
-    """Tries to find 'executable' in the directories listed in 'path'.
-
-    A string listing directories separated by 'os.pathsep'; defaults to
-    os.environ['PATH'].  Returns the complete filename or None if not found.
-    """
-    if path is None:
-        path = os.environ['PATH']
-
-    paths = path.split(os.pathsep)
-    base, ext = os.path.splitext(executable)
-
-    if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
-        executable = executable + '.exe'
-
-    if not os.path.isfile(executable):
-        for p in paths:
-            f = os.path.join(p, executable)
-            if os.path.isfile(f):
-                # the file exists, we have a shot at spawn working
-                return f
-        return None
-    else:
-        return executable
-
-
-def _read_output(commandstring):
-    """Output from succesful command execution or None"""
-    # Similar to os.popen(commandstring, "r").read(),
-    # but without actually using os.popen because that
-    # function is not usable during python bootstrap.
-    # tempfile is also not available then.
-    import contextlib
-    try:
-        import tempfile
-        fp = tempfile.NamedTemporaryFile()
-    except ImportError:
-        fp = open("/tmp/_osx_support.%s"%(
-            os.getpid(),), "w+b")
-
-    with contextlib.closing(fp) as fp:
-        cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
-        return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
-
-
-def _find_build_tool(toolname):
-    """Find a build tool on current path or using xcrun"""
-    return (_find_executable(toolname)
-                or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
-                or ''
-            )
-
-_SYSTEM_VERSION = None
-
-def _get_system_version():
-    """Return the OS X system version as a string"""
-    # Reading this plist is a documented way to get the system
-    # version (see the documentation for the Gestalt Manager)
-    # We avoid using platform.mac_ver to avoid possible bootstrap issues during
-    # the build of Python itself (distutils is used to build standard library
-    # extensions).
-
-    global _SYSTEM_VERSION
-
-    if _SYSTEM_VERSION is None:
-        _SYSTEM_VERSION = ''
-        try:
-            f = open('/System/Library/CoreServices/SystemVersion.plist')
-        except IOError:
-            # We're on a plain darwin box, fall back to the default
-            # behaviour.
-            pass
-        else:
-            try:
-                m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
-                              r'<string>(.*?)</string>', f.read())
-            finally:
-                f.close()
-            if m is not None:
-                _SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
-            # else: fall back to the default behaviour
-
-    return _SYSTEM_VERSION
-
-def _remove_original_values(_config_vars):
-    """Remove original unmodified values for testing"""
-    # This is needed for higher-level cross-platform tests of get_platform.
-    for k in list(_config_vars):
-        if k.startswith(_INITPRE):
-            del _config_vars[k]
-
-def _save_modified_value(_config_vars, cv, newvalue):
-    """Save modified and original unmodified value of configuration var"""
-
-    oldvalue = _config_vars.get(cv, '')
-    if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
-        _config_vars[_INITPRE + cv] = oldvalue
-    _config_vars[cv] = newvalue
-
-def _supports_universal_builds():
-    """Returns True if universal builds are supported on this system"""
-    # As an approximation, we assume that if we are running on 10.4 or above,
-    # then we are running with an Xcode environment that supports universal
-    # builds, in particular -isysroot and -arch arguments to the compiler. This
-    # is in support of allowing 10.4 universal builds to run on 10.3.x systems.
-
-    osx_version = _get_system_version()
-    if osx_version:
-        try:
-            osx_version = tuple(int(i) for i in osx_version.split('.'))
-        except ValueError:
-            osx_version = ''
-    return bool(osx_version >= (10, 4)) if osx_version else False
-
-
-def _find_appropriate_compiler(_config_vars):
-    """Find appropriate C compiler for extension module builds"""
-
-    # Issue #13590:
-    #    The OSX location for the compiler varies between OSX
-    #    (or rather Xcode) releases.  With older releases (up-to 10.5)
-    #    the compiler is in /usr/bin, with newer releases the compiler
-    #    can only be found inside Xcode.app if the "Command Line Tools"
-    #    are not installed.
-    #
-    #    Futhermore, the compiler that can be used varies between
-    #    Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2'
-    #    as the compiler, after that 'clang' should be used because
-    #    gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
-    #    miscompiles Python.
-
-    # skip checks if the compiler was overriden with a CC env variable
-    if 'CC' in os.environ:
-        return _config_vars
-
-    # The CC config var might contain additional arguments.
-    # Ignore them while searching.
-    cc = oldcc = _config_vars['CC'].split()[0]
-    if not _find_executable(cc):
-        # Compiler is not found on the shell search PATH.
-        # Now search for clang, first on PATH (if the Command LIne
-        # Tools have been installed in / or if the user has provided
-        # another location via CC).  If not found, try using xcrun
-        # to find an uninstalled clang (within a selected Xcode).
-
-        # NOTE: Cannot use subprocess here because of bootstrap
-        # issues when building Python itself (and os.popen is
-        # implemented on top of subprocess and is therefore not
-        # usable as well)
-
-        cc = _find_build_tool('clang')
-
-    elif os.path.basename(cc).startswith('gcc'):
-        # Compiler is GCC, check if it is LLVM-GCC
-        data = _read_output("'%s' --version"
-                             % (cc.replace("'", "'\"'\"'"),))
-        if 'llvm-gcc' in data:
-            # Found LLVM-GCC, fall back to clang
-            cc = _find_build_tool('clang')
-
-    if not cc:
-        raise SystemError(
-               "Cannot locate working compiler")
-
-    if cc != oldcc:
-        # Found a replacement compiler.
-        # Modify config vars using new compiler, if not already explictly
-        # overriden by an env variable, preserving additional arguments.
-        for cv in _COMPILER_CONFIG_VARS:
-            if cv in _config_vars and cv not in os.environ:
-                cv_split = _config_vars[cv].split()
-                cv_split[0] = cc if cv != 'CXX' else cc + '++'
-                _save_modified_value(_config_vars, cv, ' '.join(cv_split))
-
-    return _config_vars
-
-
-def _remove_universal_flags(_config_vars):
-    """Remove all universal build arguments from config vars"""
-
-    for cv in _UNIVERSAL_CONFIG_VARS:
-        # Do not alter a config var explicitly overriden by env var
-        if cv in _config_vars and cv not in os.environ:
-            flags = _config_vars[cv]
-            flags = re.sub('-arch\s+\w+\s', ' ', flags, re.ASCII)
-            flags = re.sub('-isysroot [^ \t]*', ' ', flags)
-            _save_modified_value(_config_vars, cv, flags)
-
-    return _config_vars
-
-
-def _remove_unsupported_archs(_config_vars):
-    """Remove any unsupported archs from config vars"""
-    # Different Xcode releases support different sets for '-arch'
-    # flags. In particular, Xcode 4.x no longer supports the
-    # PPC architectures.
-    #
-    # This code automatically removes '-arch ppc' and '-arch ppc64'
-    # when these are not supported. That makes it possible to
-    # build extensions on OSX 10.7 and later with the prebuilt
-    # 32-bit installer on the python.org website.
-
-    # skip checks if the compiler was overriden with a CC env variable
-    if 'CC' in os.environ:
-        return _config_vars
-
-    if re.search('-arch\s+ppc', _config_vars['CFLAGS']) is not None:
-        # NOTE: Cannot use subprocess here because of bootstrap
-        # issues when building Python itself
-        status = os.system("'%s' -arch ppc -x c /dev/null 2>/dev/null"%(
-            _config_vars['CC'].replace("'", "'\"'\"'"),))
-        # The Apple compiler drivers return status 255 if no PPC
-        if (status >> 8) == 255:
-            # Compiler doesn't support PPC, remove the related
-            # '-arch' flags if not explicitly overridden by an
-            # environment variable
-            for cv in _UNIVERSAL_CONFIG_VARS:
-                if cv in _config_vars and cv not in os.environ:
-                    flags = _config_vars[cv]
-                    flags = re.sub('-arch\s+ppc\w*\s', ' ', flags)
-                    _save_modified_value(_config_vars, cv, flags)
-
-    return _config_vars
-
-
-def _override_all_archs(_config_vars):
-    """Allow override of all archs with ARCHFLAGS env var"""
-    # NOTE: This name was introduced by Apple in OSX 10.5 and
-    # is used by several scripting languages distributed with
-    # that OS release.
-    if 'ARCHFLAGS' in os.environ:
-        arch = os.environ['ARCHFLAGS']
-        for cv in _UNIVERSAL_CONFIG_VARS:
-            if cv in _config_vars and '-arch' in _config_vars[cv]:
-                flags = _config_vars[cv]
-                flags = re.sub('-arch\s+\w+\s', ' ', flags)
-                flags = flags + ' ' + arch
-                _save_modified_value(_config_vars, cv, flags)
-
-    return _config_vars
-
-
-def _check_for_unavailable_sdk(_config_vars):
-    """Remove references to any SDKs not available"""
-    # If we're on OSX 10.5 or later and the user tries to
-    # compile an extension using an SDK that is not present
-    # on the current machine it is better to not use an SDK
-    # than to fail.  This is particularly important with
-    # the standalong Command Line Tools alternative to a
-    # full-blown Xcode install since the CLT packages do not
-    # provide SDKs.  If the SDK is not present, it is assumed
-    # that the header files and dev libs have been installed
-    # to /usr and /System/Library by either a standalone CLT
-    # package or the CLT component within Xcode.
-    cflags = _config_vars.get('CFLAGS', '')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is not None:
-        sdk = m.group(1)
-        if not os.path.exists(sdk):
-            for cv in _UNIVERSAL_CONFIG_VARS:
-                # Do not alter a config var explicitly overriden by env var
-                if cv in _config_vars and cv not in os.environ:
-                    flags = _config_vars[cv]
-                    flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
-                    _save_modified_value(_config_vars, cv, flags)
-
-    return _config_vars
-
-
-def compiler_fixup(compiler_so, cc_args):
-    """
-    This function will strip '-isysroot PATH' and '-arch ARCH' from the
-    compile flags if the user has specified one them in extra_compile_flags.
-
-    This is needed because '-arch ARCH' adds another architecture to the
-    build, without a way to remove an architecture. Furthermore GCC will
-    barf if multiple '-isysroot' arguments are present.
-    """
-    stripArch = stripSysroot = False
-
-    compiler_so = list(compiler_so)
-
-    if not _supports_universal_builds():
-        # OSX before 10.4.0, these don't support -arch and -isysroot at
-        # all.
-        stripArch = stripSysroot = True
-    else:
-        stripArch = '-arch' in cc_args
-        stripSysroot = '-isysroot' in cc_args
-
-    if stripArch or 'ARCHFLAGS' in os.environ:
-        while True:
-            try:
-                index = compiler_so.index('-arch')
-                # Strip this argument and the next one:
-                del compiler_so[index:index+2]
-            except ValueError:
-                break
-
-    if 'ARCHFLAGS' in os.environ and not stripArch:
-        # User specified different -arch flags in the environ,
-        # see also distutils.sysconfig
-        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
-
-    if stripSysroot:
-        while True:
-            try:
-                index = compiler_so.index('-isysroot')
-                # Strip this argument and the next one:
-                del compiler_so[index:index+2]
-            except ValueError:
-                break
-
-    # Check if the SDK that is used during compilation actually exists,
-    # the universal build requires the usage of a universal SDK and not all
-    # users have that installed by default.
-    sysroot = None
-    if '-isysroot' in cc_args:
-        idx = cc_args.index('-isysroot')
-        sysroot = cc_args[idx+1]
-    elif '-isysroot' in compiler_so:
-        idx = compiler_so.index('-isysroot')
-        sysroot = compiler_so[idx+1]
-
-    if sysroot and not os.path.isdir(sysroot):
-        from distutils import log
-        log.warn("Compiling with an SDK that doesn't seem to exist: %s",
-                sysroot)
-        log.warn("Please check your Xcode installation")
-
-    return compiler_so
-
-
-def customize_config_vars(_config_vars):
-    """Customize Python build configuration variables.
-
-    Called internally from sysconfig with a mutable mapping
-    containing name/value pairs parsed from the configured
-    makefile used to build this interpreter.  Returns
-    the mapping updated as needed to reflect the environment
-    in which the interpreter is running; in the case of
-    a Python from a binary installer, the installed
-    environment may be very different from the build
-    environment, i.e. different OS levels, different
-    built tools, different available CPU architectures.
-
-    This customization is performed whenever
-    distutils.sysconfig.get_config_vars() is first
-    called.  It may be used in environments where no
-    compilers are present, i.e. when installing pure
-    Python dists.  Customization of compiler paths
-    and detection of unavailable archs is deferred
-    until the first extention module build is
-    requested (in distutils.sysconfig.customize_compiler).
-
-    Currently called from distutils.sysconfig
-    """
-
-    if not _supports_universal_builds():
-        # On Mac OS X before 10.4, check if -arch and -isysroot
-        # are in CFLAGS or LDFLAGS and remove them if they are.
-        # This is needed when building extensions on a 10.3 system
-        # using a universal build of python.
-        _remove_universal_flags(_config_vars)
-
-    # Allow user to override all archs with ARCHFLAGS env var
-    _override_all_archs(_config_vars)
-
-    # Remove references to sdks that are not found
-    _check_for_unavailable_sdk(_config_vars)
-
-    return _config_vars
-
-
-def customize_compiler(_config_vars):
-    """Customize compiler path and configuration variables.
-
-    This customization is performed when the first
-    extension module build is requested
-    in distutils.sysconfig.customize_compiler).
-    """
-
-    # Find a compiler to use for extension module builds
-    _find_appropriate_compiler(_config_vars)
-
-    # Remove ppc arch flags if not supported here
-    _remove_unsupported_archs(_config_vars)
-
-    # Allow user to override all archs with ARCHFLAGS env var
-    _override_all_archs(_config_vars)
-
-    return _config_vars
-
-
-def get_platform_osx(_config_vars, osname, release, machine):
-    """Filter values for get_platform()"""
-    # called from get_platform() in sysconfig and distutils.util
-    #
-    # For our purposes, we'll assume that the system version from
-    # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
-    # to. This makes the compatibility story a bit more sane because the
-    # machine is going to compile and link as if it were
-    # MACOSX_DEPLOYMENT_TARGET.
-
-    macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
-    macrelease = _get_system_version() or macver
-    macver = macver or macrelease
-
-    if macver:
-        release = macver
-        osname = "macosx"
-
-        # Use the original CFLAGS value, if available, so that we
-        # return the same machine type for the platform string.
-        # Otherwise, distutils may consider this a cross-compiling
-        # case and disallow installs.
-        cflags = _config_vars.get(_INITPRE+'CFLAGS',
-                                    _config_vars.get('CFLAGS', ''))
-        if ((macrelease + '.') >= '10.4.' and
-            '-arch' in cflags.strip()):
-            # The universal build will build fat binaries, but not on
-            # systems before 10.4
-
-            machine = 'fat'
-
-            archs = re.findall('-arch\s+(\S+)', cflags)
-            archs = tuple(sorted(set(archs)))
-
-            if len(archs) == 1:
-                machine = archs[0]
-            elif archs == ('i386', 'ppc'):
-                machine = 'fat'
-            elif archs == ('i386', 'x86_64'):
-                machine = 'intel'
-            elif archs == ('i386', 'ppc', 'x86_64'):
-                machine = 'fat3'
-            elif archs == ('ppc64', 'x86_64'):
-                machine = 'fat64'
-            elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
-                machine = 'universal'
-            else:
-                raise ValueError(
-                   "Don't know machine value for archs=%r" % (archs,))
-
-        elif machine == 'i386':
-            # On OSX the machine type returned by uname is always the
-            # 32-bit variant, even if the executable architecture is
-            # the 64-bit variant
-            if sys.maxsize >= 2**32:
-                machine = 'x86_64'
-
-        elif machine in ('PowerPC', 'Power_Macintosh'):
-            # Pick a sane name for the PPC architecture.
-            # See 'i386' case
-            if sys.maxsize >= 2**32:
-                machine = 'ppc64'
-            else:
-                machine = 'ppc'
-
-    return (osname, release, machine)
diff --git a/lib-python/3/test/crashers/trace_at_recursion_limit.py b/lib-python/3/test/crashers/trace_at_recursion_limit.py
deleted file mode 100644
--- a/lib-python/3/test/crashers/trace_at_recursion_limit.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""
-From http://bugs.python.org/issue6717
-
-A misbehaving trace hook can trigger a segfault by exceeding the recursion
-limit.
-"""
-import sys
-
-
-def x():
-    pass
-
-def g(*args):
-    if True: # change to True to crash interpreter
-        try:
-            x()
-        except:
-            pass
-    return g
-
-def f():
-    print(sys.getrecursionlimit())
-    f()
-
-sys.settrace(g)
-
-f()
diff --git a/lib-python/3/test/json_tests/test_tool.py b/lib-python/3/test/json_tests/test_tool.py
deleted file mode 100644
--- a/lib-python/3/test/json_tests/test_tool.py
+++ /dev/null
@@ -1,69 +0,0 @@
-import os
-import sys
-import textwrap
-import unittest
-import subprocess
-from test import support
-from test.script_helper import assert_python_ok
-
-class TestTool(unittest.TestCase):
-    data = """
-
-        [["blorpie"],[ "whoops" ] , [
-                                 ],\t"d-shtaeou",\r"d-nthiouh",
-        "i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field"
-            :"yes"}  ]
-           """
-
-    expect = textwrap.dedent("""\
-    [
-        [
-            "blorpie"
-        ],
-        [
-            "whoops"
-        ],
-        [],
-        "d-shtaeou",
-        "d-nthiouh",
-        "i-vhbjkhnth",
-        {
-            "nifty": 87
-        },
-        {
-            "field": "yes",
-            "morefield": false
-        }
-    ]
-    """)
-
-    def test_stdin_stdout(self):
-        with subprocess.Popen(
-                (sys.executable, '-m', 'json.tool'),
-                stdin=subprocess.PIPE, stdout=subprocess.PIPE) as proc:
-            out, err = proc.communicate(self.data.encode())
-        self.assertEqual(out.splitlines(), self.expect.encode().splitlines())
-        self.assertEqual(err, None)
-
-    def _create_infile(self):
-        infile = support.TESTFN
-        with open(infile, "w") as fp:
-            self.addCleanup(os.remove, infile)
-            fp.write(self.data)
-        return infile
-
-    def test_infile_stdout(self):
-        infile = self._create_infile()
-        rc, out, err = assert_python_ok('-m', 'json.tool', infile)
-        self.assertEqual(out.splitlines(), self.expect.encode().splitlines())
-        self.assertEqual(err, b'')
-
-    def test_infile_outfile(self):
-        infile = self._create_infile()
-        outfile = support.TESTFN + '.out'
-        rc, out, err = assert_python_ok('-m', 'json.tool', infile, outfile)
-        self.addCleanup(os.remove, outfile)
-        with open(outfile, "r") as fp:
-            self.assertEqual(fp.read(), self.expect)
-        self.assertEqual(out, b'')
-        self.assertEqual(err, b'')
diff --git a/lib-python/3/test/mp_fork_bomb.py b/lib-python/3/test/mp_fork_bomb.py
deleted file mode 100644
--- a/lib-python/3/test/mp_fork_bomb.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import multiprocessing, sys
-
-def foo():
-    print("123")
-
-# Because "if __name__ == '__main__'" is missing this will not work
-# correctly on Windows.  However, we should get a RuntimeError rather
-# than the Windows equivalent of a fork bomb.
-
-p = multiprocessing.Process(target=foo)
-p.start()
-p.join()
-sys.exit(p.exitcode)
diff --git a/lib-python/3/test/sample_doctest_no_docstrings.py b/lib-python/3/test/sample_doctest_no_docstrings.py
deleted file mode 100644
--- a/lib-python/3/test/sample_doctest_no_docstrings.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# This is a sample module used for testing doctest.
-#
-# This module is for testing how doctest handles a module with no
-# docstrings.
-
-
-class Foo(object):
-
-    # A class with no docstring.
-
-    def __init__(self):
-        pass
diff --git a/lib-python/3/test/sample_doctest_no_doctests.py b/lib-python/3/test/sample_doctest_no_doctests.py
deleted file mode 100644
--- a/lib-python/3/test/sample_doctest_no_doctests.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""This is a sample module used for testing doctest.
-
-This module is for testing how doctest handles a module with docstrings
-but no doctest examples.
-
-"""
-
-
-class Foo(object):
-    """A docstring with no doctest examples.
-
-    """
-
-    def __init__(self):
-        pass
diff --git a/lib-python/3/test/test__osx_support.py b/lib-python/3/test/test__osx_support.py
deleted file mode 100644
--- a/lib-python/3/test/test__osx_support.py
+++ /dev/null
@@ -1,279 +0,0 @@
-"""
-Test suite for _osx_support: shared OS X support functions.
-"""
-
-import os
-import platform
-import shutil
-import stat
-import sys
-import unittest
-
-import test.support
-
-import _osx_support
-
- at unittest.skipUnless(sys.platform.startswith("darwin"), "requires OS X")
-class Test_OSXSupport(unittest.TestCase):
-
-    def setUp(self):
-        self.maxDiff = None
-        self.prog_name = 'bogus_program_xxxx'
-        self.temp_path_dir = os.path.abspath(os.getcwd())
-        self.env = test.support.EnvironmentVarGuard()
-        self.addCleanup(self.env.__exit__)
-        for cv in ('CFLAGS', 'LDFLAGS', 'CPPFLAGS',
-                            'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC',
-                            'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
-                            'PY_CORE_CFLAGS'):
-            if cv in self.env:
-                self.env.unset(cv)
-
-    def add_expected_saved_initial_values(self, config_vars, expected_vars):
-        # Ensure that the initial values for all modified config vars
-        # are also saved with modified keys.
-        expected_vars.update(('_OSX_SUPPORT_INITIAL_'+ k,
-                config_vars[k]) for k in config_vars
-                    if config_vars[k] != expected_vars[k])
-
-    def test__find_executable(self):
-        if self.env['PATH']:
-            self.env['PATH'] = self.env['PATH'] + ':'
-        self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
-        test.support.unlink(self.prog_name)
-        self.assertIsNone(_osx_support._find_executable(self.prog_name))
-        self.addCleanup(test.support.unlink, self.prog_name)
-        with open(self.prog_name, 'w') as f:
-            f.write("#!/bin/sh\n/bin/echo OK\n")
-        os.chmod(self.prog_name, stat.S_IRWXU)
-        self.assertEqual(self.prog_name,
-                            _osx_support._find_executable(self.prog_name))
-
-    def test__read_output(self):
-        if self.env['PATH']:
-            self.env['PATH'] = self.env['PATH'] + ':'
-        self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
-        test.support.unlink(self.prog_name)
-        self.addCleanup(test.support.unlink, self.prog_name)
-        with open(self.prog_name, 'w') as f:
-            f.write("#!/bin/sh\n/bin/echo ExpectedOutput\n")
-        os.chmod(self.prog_name, stat.S_IRWXU)
-        self.assertEqual('ExpectedOutput',
-                            _osx_support._read_output(self.prog_name))
-
-    def test__find_build_tool(self):
-        out = _osx_support._find_build_tool('cc')
-        self.assertTrue(os.path.isfile(out),
-                            'cc not found - check xcode-select')
-
-    def test__get_system_version(self):
-        self.assertTrue(platform.mac_ver()[0].startswith(
-                                    _osx_support._get_system_version()))
-
-    def test__remove_original_values(self):
-        config_vars = {
-        'CC': 'gcc-test -pthreads',
-        }
-        expected_vars = {
-        'CC': 'clang -pthreads',
-        }
-        cv = 'CC'
-        newvalue = 'clang -pthreads'
-        _osx_support._save_modified_value(config_vars, cv, newvalue)
-        self.assertNotEqual(expected_vars, config_vars)
-        _osx_support._remove_original_values(config_vars)
-        self.assertEqual(expected_vars, config_vars)
-
-    def test__save_modified_value(self):
-        config_vars = {
-        'CC': 'gcc-test -pthreads',
-        }
-        expected_vars = {
-        'CC': 'clang -pthreads',
-        }
-        self.add_expected_saved_initial_values(config_vars, expected_vars)
-        cv = 'CC'
-        newvalue = 'clang -pthreads'
-        _osx_support._save_modified_value(config_vars, cv, newvalue)
-        self.assertEqual(expected_vars, config_vars)
-
-    def test__save_modified_value_unchanged(self):
-        config_vars = {
-        'CC': 'gcc-test -pthreads',
-        }
-        expected_vars = config_vars.copy()
-        cv = 'CC'
-        newvalue = 'gcc-test -pthreads'
-        _osx_support._save_modified_value(config_vars, cv, newvalue)
-        self.assertEqual(expected_vars, config_vars)
-
-    def test__supports_universal_builds(self):
-        import platform
-        self.assertEqual(platform.mac_ver()[0].split('.') >= ['10', '4'],
-                            _osx_support._supports_universal_builds())
-
-    def test__find_appropriate_compiler(self):
-        compilers = (
-                        ('gcc-test', 'i686-apple-darwin11-llvm-gcc-4.2'),
-                        ('clang', 'clang version 3.1'),
-                    )
-        config_vars = {
-        'CC': 'gcc-test -pthreads',
-        'CXX': 'cc++-test',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
-        'LDFLAGS': '-arch ppc -arch i386   -g',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
-        'BLDSHARED': 'gcc-test -bundle -arch ppc -arch i386 -g',
-        'LDSHARED': 'gcc-test -bundle -arch ppc -arch i386 '
-                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
-        }
-        expected_vars = {
-        'CC': 'clang -pthreads',
-        'CXX': 'clang++',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
-        'LDFLAGS': '-arch ppc -arch i386   -g',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
-        'BLDSHARED': 'clang -bundle -arch ppc -arch i386 -g',
-        'LDSHARED': 'clang -bundle -arch ppc -arch i386 '
-                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
-        }
-        self.add_expected_saved_initial_values(config_vars, expected_vars)
-
-        suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
-        self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
-        for c_name, c_output in compilers:
-            test.support.unlink(c_name)
-            self.addCleanup(test.support.unlink, c_name)
-            with open(c_name, 'w') as f:
-                f.write("#!/bin/sh\n/bin/echo " + c_output)
-            os.chmod(c_name, stat.S_IRWXU)
-        self.assertEqual(expected_vars,
-                            _osx_support._find_appropriate_compiler(
-                                    config_vars))
-
-    def test__remove_universal_flags(self):
-        config_vars = {
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
-        'LDFLAGS': '-arch ppc -arch i386   -g',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
-        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
-        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
-                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
-        }
-        expected_vars = {
-        'CFLAGS': '-fno-strict-aliasing  -g -O3    ',
-        'LDFLAGS': '    -g',
-        'CPPFLAGS': '-I.  ',
-        'BLDSHARED': 'gcc-4.0 -bundle    -g',
-        'LDSHARED': 'gcc-4.0 -bundle      -g',
-        }
-        self.add_expected_saved_initial_values(config_vars, expected_vars)
-
-        self.assertEqual(expected_vars,
-                            _osx_support._remove_universal_flags(
-                                    config_vars))
-
-    def test__remove_unsupported_archs(self):
-        config_vars = {
-        'CC': 'clang',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
-        'LDFLAGS': '-arch ppc -arch i386   -g',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
-        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
-        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
-                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
-        }
-        expected_vars = {
-        'CC': 'clang',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3  -arch i386  ',
-        'LDFLAGS': ' -arch i386   -g',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
-        'BLDSHARED': 'gcc-4.0 -bundle   -arch i386 -g',
-        'LDSHARED': 'gcc-4.0 -bundle   -arch i386 '
-                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
-        }
-        self.add_expected_saved_initial_values(config_vars, expected_vars)
-
-        suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
-        self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
-        c_name = 'clang'
-        test.support.unlink(c_name)
-        self.addCleanup(test.support.unlink, c_name)
-        # exit status 255 means no PPC support in this compiler chain
-        with open(c_name, 'w') as f:
-            f.write("#!/bin/sh\nexit 255")
-        os.chmod(c_name, stat.S_IRWXU)
-        self.assertEqual(expected_vars,
-                            _osx_support._remove_unsupported_archs(
-                                    config_vars))
-
-    def test__override_all_archs(self):
-        self.env['ARCHFLAGS'] = '-arch x86_64'
-        config_vars = {
-        'CC': 'clang',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  ',
-        'LDFLAGS': '-arch ppc -arch i386   -g',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
-        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
-        'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
-                        '-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
-        }
-        expected_vars = {
-        'CC': 'clang',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3     -arch x86_64',
-        'LDFLAGS': '    -g -arch x86_64',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
-        'BLDSHARED': 'gcc-4.0 -bundle    -g -arch x86_64',
-        'LDSHARED': 'gcc-4.0 -bundle   -isysroot '
-                        '/Developer/SDKs/MacOSX10.4u.sdk -g -arch x86_64',
-        }
-        self.add_expected_saved_initial_values(config_vars, expected_vars)
-
-        self.assertEqual(expected_vars,
-                            _osx_support._override_all_archs(
-                                    config_vars))
-
-    def test__check_for_unavailable_sdk(self):
-        config_vars = {
-        'CC': 'clang',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
-                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk',
-        'LDFLAGS': '-arch ppc -arch i386   -g',
-        'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.1.sdk',
-        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
-        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
-                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk -g',
-        }
-        expected_vars = {
-        'CC': 'clang',
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
-                        ' ',
-        'LDFLAGS': '-arch ppc -arch i386   -g',
-        'CPPFLAGS': '-I.  ',
-        'BLDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 -g',
-        'LDSHARED': 'gcc-4.0 -bundle  -arch ppc -arch i386 '
-                        ' -g',
-        }
-        self.add_expected_saved_initial_values(config_vars, expected_vars)
-
-        self.assertEqual(expected_vars,
-                            _osx_support._check_for_unavailable_sdk(
-                                    config_vars))
-
-    def test_get_platform_osx(self):
-        # Note, get_platform_osx is currently tested more extensively
-        # indirectly by test_sysconfig and test_distutils
-        config_vars = {
-        'CFLAGS': '-fno-strict-aliasing  -g -O3 -arch ppc -arch i386  '
-                        '-isysroot /Developer/SDKs/MacOSX10.1.sdk',
-        'MACOSX_DEPLOYMENT_TARGET': '10.6',
-        }
-        result = _osx_support.get_platform_osx(config_vars, ' ', ' ', ' ')
-        self.assertEqual(('macosx', '10.6', 'fat'), result)
-
-def test_main():
-    if sys.platform == 'darwin':
-        test.support.run_unittest(Test_OSXSupport)
-
-if __name__ == "__main__":
-    test_main()
diff --git a/lib-python/3/test/test_file_eintr.py b/lib-python/3/test/test_file_eintr.py
deleted file mode 100644
--- a/lib-python/3/test/test_file_eintr.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# Written to test interrupted system calls interfering with our many buffered
-# IO implementations.  http://bugs.python.org/issue12268
-#
-# It was suggested that this code could be merged into test_io and the tests
-# made to work using the same method as the existing signal tests in test_io.
-# I was unable to get single process tests using alarm or setitimer that way
-# to reproduce the EINTR problems.  This process based test suite reproduces
-# the problems prior to the issue12268 patch reliably on Linux and OSX.
-#  - gregory.p.smith
-
-import os
-import select
-import signal
-import subprocess
-import sys
-from test.support import run_unittest
-import time
-import unittest
-
-# Test import all of the things we're about to try testing up front.
-from _io import FileIO
-
-
- at unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.')
-class TestFileIOSignalInterrupt(unittest.TestCase):
-    def setUp(self):
-        self._process = None
-
-    def tearDown(self):
-        if self._process and self._process.poll() is None:
-            try:
-                self._process.kill()
-            except OSError:
-                pass
-
-    def _generate_infile_setup_code(self):
-        """Returns the infile = ... line of code for the reader process.
-
-        subclasseses should override this to test different IO objects.
-        """
-        return ('import _io ;'
-                'infile = _io.FileIO(sys.stdin.fileno(), "rb")')
-
-    def fail_with_process_info(self, why, stdout=b'', stderr=b'',
-                               communicate=True):
-        """A common way to cleanup and fail with useful debug output.
-
-        Kills the process if it is still running, collects remaining output
-        and fails the test with an error message including the output.
-
-        Args:
-            why: Text to go after "Error from IO process" in the message.
-            stdout, stderr: standard output and error from the process so
-                far to include in the error message.
-            communicate: bool, when True we call communicate() on the process
-                after killing it to gather additional output.
-        """
-        if self._process.poll() is None:
-            time.sleep(0.1)  # give it time to finish printing the error.
-            try:
-                self._process.terminate()  # Ensure it dies.
-            except OSError:
-                pass
-        if communicate:
-            stdout_end, stderr_end = self._process.communicate()
-            stdout += stdout_end
-            stderr += stderr_end
-        self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' %
-                  (why, stdout.decode(), stderr.decode()))
-
-    def _test_reading(self, data_to_write, read_and_verify_code):
-        """Generic buffered read method test harness to validate EINTR behavior.
-
-        Also validates that Python signal handlers are run during the read.
-
-        Args:
-            data_to_write: String to write to the child process for reading
-                before sending it a signal, confirming the signal was handled,
-                writing a final newline and closing the infile pipe.
-            read_and_verify_code: Single "line" of code to read from a file
-                object named 'infile' and validate the result.  This will be
-                executed as part of a python subprocess fed data_to_write.
-        """
-        infile_setup_code = self._generate_infile_setup_code()
-        # Total pipe IO in this function is smaller than the minimum posix OS
-        # pipe buffer size of 512 bytes.  No writer should block.
-        assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.'
-
-        # Start a subprocess to call our read method while handling a signal.
-        self._process = subprocess.Popen(
-                [sys.executable, '-u', '-c',
-                 'import signal, sys ;'
-                 'signal.signal(signal.SIGINT, '
-                               'lambda s, f: sys.stderr.write("$\\n")) ;'
-                 + infile_setup_code + ' ;' +
-                 'sys.stderr.write("Worm Sign!\\n") ;'
-                 + read_and_verify_code + ' ;' +
-                 'infile.close()'
-                ],
-                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE)
-
-        # Wait for the signal handler to be installed.
-        worm_sign = self._process.stderr.read(len(b'Worm Sign!\n'))
-        if worm_sign != b'Worm Sign!\n':  # See also, Dune by Frank Herbert.
-            self.fail_with_process_info('while awaiting a sign',
-                                        stderr=worm_sign)
-        self._process.stdin.write(data_to_write)
-
-        signals_sent = 0
-        rlist = []
-        # We don't know when the read_and_verify_code in our child is actually
-        # executing within the read system call we want to interrupt.  This
-        # loop waits for a bit before sending the first signal to increase
-        # the likelihood of that.  Implementations without correct EINTR
-        # and signal handling usually fail this test.
-        while not rlist:
-            rlist, _, _ = select.select([self._process.stderr], (), (), 0.05)
-            self._process.send_signal(signal.SIGINT)
-            signals_sent += 1
-            if signals_sent > 200:
-                self._process.kill()
-                self.fail('reader process failed to handle our signals.')
-        # This assumes anything unexpected that writes to stderr will also
-        # write a newline.  That is true of the traceback printing code.
-        signal_line = self._process.stderr.readline()
-        if signal_line != b'$\n':
-            self.fail_with_process_info('while awaiting signal',
-                                        stderr=signal_line)
-
-        # We append a newline to our input so that a readline call can
-        # end on its own before the EOF is seen and so that we're testing
-        # the read call that was interrupted by a signal before the end of
-        # the data stream has been reached.
-        stdout, stderr = self._process.communicate(input=b'\n')
-        if self._process.returncode:
-            self.fail_with_process_info(
-                    'exited rc=%d' % self._process.returncode,
-                    stdout, stderr, communicate=False)
-        # PASS!
-
-    # String format for the read_and_verify_code used by read methods.
-    _READING_CODE_TEMPLATE = (
-            'got = infile.{read_method_name}() ;'
-            'expected = {expected!r} ;'
-            'assert got == expected, ('
-                    '"{read_method_name} returned wrong data.\\n"'
-                    '"got data %r\\nexpected %r" % (got, expected))'
-            )
-
-    def test_readline(self):
-        """readline() must handle signals and not lose data."""
-        self._test_reading(
-                data_to_write=b'hello, world!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='readline',
-                        expected=b'hello, world!\n'))
-
-    def test_readlines(self):
-        """readlines() must handle signals and not lose data."""
-        self._test_reading(
-                data_to_write=b'hello\nworld!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='readlines',
-                        expected=[b'hello\n', b'world!\n']))
-
-    def test_readall(self):
-        """readall() must handle signals and not lose data."""
-        self._test_reading(
-                data_to_write=b'hello\nworld!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='readall',
-                        expected=b'hello\nworld!\n'))
-        # read() is the same thing as readall().
-        self._test_reading(
-                data_to_write=b'hello\nworld!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='read',
-                        expected=b'hello\nworld!\n'))
-
-
-class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt):
-    def _generate_infile_setup_code(self):
-        """Returns the infile = ... line of code to make a BufferedReader."""
-        return ('infile = open(sys.stdin.fileno(), "rb") ;'
-                'import _io ;assert isinstance(infile, _io.BufferedReader)')
-
-    def test_readall(self):
-        """BufferedReader.read() must handle signals and not lose data."""
-        self._test_reading(
-                data_to_write=b'hello\nworld!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='read',
-                        expected=b'hello\nworld!\n'))
-
-
-class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt):
-    def _generate_infile_setup_code(self):
-        """Returns the infile = ... line of code to make a TextIOWrapper."""
-        return ('infile = open(sys.stdin.fileno(), "rt", newline=None) ;'
-                'import _io ;assert isinstance(infile, _io.TextIOWrapper)')
-
-    def test_readline(self):
-        """readline() must handle signals and not lose data."""
-        self._test_reading(
-                data_to_write=b'hello, world!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='readline',
-                        expected='hello, world!\n'))
-
-    def test_readlines(self):
-        """readlines() must handle signals and not lose data."""
-        self._test_reading(
-                data_to_write=b'hello\r\nworld!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='readlines',
-                        expected=['hello\n', 'world!\n']))
-
-    def test_readall(self):
-        """read() must handle signals and not lose data."""
-        self._test_reading(
-                data_to_write=b'hello\nworld!',
-                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
-                        read_method_name='read',
-                        expected="hello\nworld!\n"))
-
-
-def test_main():
-    test_cases = [
-            tc for tc in globals().values()
-            if isinstance(tc, type) and issubclass(tc, unittest.TestCase)]
-    run_unittest(*test_cases)
-
-
-if __name__ == '__main__':
-    test_main()
diff --git a/lib-python/3/test/test_ssl.py b/lib-python/3/test/test_ssl.py
deleted file mode 100644
--- a/lib-python/3/test/test_ssl.py
+++ /dev/null
@@ -1,3987 +0,0 @@
-# Test the support for SSL and sockets
-
-import sys
-import unittest
-from test import support
-import socket
-import select
-import time
-import datetime
-import gc
-import os
-import errno
-import pprint
-import tempfile
-import urllib.request
-import traceback
-import asyncore
-import weakref
-import platform
-import re
-import functools
-try:
-    import ctypes
-except ImportError:
-    ctypes = None
-
-ssl = support.import_module("ssl")
-
-try:
-    import threading
-except ImportError:
-    _have_threads = False
-else:
-    _have_threads = True
-
-PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
-HOST = support.HOST
-IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL')
-IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0)
-
-
-def data_file(*name):
-    return os.path.join(os.path.dirname(__file__), *name)
-
-# The custom key and certificate files used in test_ssl are generated
-# using Lib/test/make_ssl_certs.py.
-# Other certificates are simply fetched from the Internet servers they
-# are meant to authenticate.
-
-CERTFILE = data_file("keycert.pem")
-BYTES_CERTFILE = os.fsencode(CERTFILE)
-ONLYCERT = data_file("ssl_cert.pem")
-ONLYKEY = data_file("ssl_key.pem")
-BYTES_ONLYCERT = os.fsencode(ONLYCERT)
-BYTES_ONLYKEY = os.fsencode(ONLYKEY)
-CERTFILE_PROTECTED = data_file("keycert.passwd.pem")
-ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem")
-KEY_PASSWORD = "somepass"
-CAPATH = data_file("capath")
-BYTES_CAPATH = os.fsencode(CAPATH)
-CAFILE_NEURONIO = data_file("capath", "4e1295a3.0")
-CAFILE_CACERT = data_file("capath", "5ed36f99.0")
-
-# empty CRL
-CRLFILE = data_file("revocation.crl")
-
-# Two keys and certs signed by the same CA (for SNI tests)
-SIGNED_CERTFILE = data_file("keycert3.pem")
-SIGNED_CERTFILE2 = data_file("keycert4.pem")
-# Same certificate as pycacert.pem, but without extra text in file
-SIGNING_CA = data_file("capath", "ceff1710.0")
-# cert with all kinds of subject alt names
-ALLSANFILE = data_file("allsans.pem")
-# cert with all kinds of subject alt names
-ALLSANFILE = data_file("allsans.pem")
-
-REMOTE_HOST = "self-signed.pythontest.net"
-
-EMPTYCERT = data_file("nullcert.pem")
-BADCERT = data_file("badcert.pem")
-NONEXISTINGCERT = data_file("XXXnonexisting.pem")
-BADKEY = data_file("badkey.pem")
-NOKIACERT = data_file("nokia.pem")
-NULLBYTECERT = data_file("nullbytecert.pem")
-TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem")
-
-DHFILE = data_file("ffdh3072.pem")
-BYTES_DHFILE = os.fsencode(DHFILE)
-
-# Not defined in all versions of OpenSSL
-OP_NO_COMPRESSION = getattr(ssl, "OP_NO_COMPRESSION", 0)
-OP_SINGLE_DH_USE = getattr(ssl, "OP_SINGLE_DH_USE", 0)
-OP_SINGLE_ECDH_USE = getattr(ssl, "OP_SINGLE_ECDH_USE", 0)
-OP_CIPHER_SERVER_PREFERENCE = getattr(ssl, "OP_CIPHER_SERVER_PREFERENCE", 0)
-OP_ENABLE_MIDDLEBOX_COMPAT = getattr(ssl, "OP_ENABLE_MIDDLEBOX_COMPAT", 0)
-
-
-def handle_error(prefix):
-    exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
-    if support.verbose:
-        sys.stdout.write(prefix + exc_format)
-
-def can_clear_options():
-    # 0.9.8m or higher
-    return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15)
-
-def no_sslv2_implies_sslv3_hello():
-    # 0.9.7h or higher
-    return ssl.OPENSSL_VERSION_INFO >= (0, 9, 7, 8, 15)
-
-def have_verify_flags():
-    # 0.9.8 or higher
-    return ssl.OPENSSL_VERSION_INFO >= (0, 9, 8, 0, 15)
-
-def utc_offset(): #NOTE: ignore issues like #1647654
-    # local time = utc time + utc offset
-    if time.daylight and time.localtime().tm_isdst > 0:
-        return -time.altzone  # seconds
-    return -time.timezone
-
-def asn1time(cert_time):
-    # Some versions of OpenSSL ignore seconds, see #18207
-    # 0.9.8.i
-    if ssl._OPENSSL_API_VERSION == (0, 9, 8, 9, 15):
-        fmt = "%b %d %H:%M:%S %Y GMT"
-        dt = datetime.datetime.strptime(cert_time, fmt)
-        dt = dt.replace(second=0)
-        cert_time = dt.strftime(fmt)
-        # %d adds leading zero but ASN1_TIME_print() uses leading space
-        if cert_time[4] == "0":
-            cert_time = cert_time[:4] + " " + cert_time[5:]
-
-    return cert_time
-
-# Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
-def skip_if_broken_ubuntu_ssl(func):
-    if hasattr(ssl, 'PROTOCOL_SSLv2'):
-        @functools.wraps(func)
-        def f(*args, **kwargs):
-            try:
-                ssl.SSLContext(ssl.PROTOCOL_SSLv2)
-            except ssl.SSLError:
-                if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
-                    platform.linux_distribution() == ('debian', 'squeeze/sid', '')):
-                    raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
-            return func(*args, **kwargs)
-        return f
-    else:
-        return func
-
-def skip_if_openssl_cnf_minprotocol_gt_tls1(func):
-    """Skip a test if the OpenSSL config MinProtocol is > TLSv1.
-
-    OS distros with an /etc/ssl/openssl.cnf and MinProtocol set often do so to
-    require TLSv1.2 or higher (Debian Buster).  Some of our tests for older
-    protocol versions will fail under such a config.
-
-    Alternative workaround: Run this test in a process with
-    OPENSSL_CONF=/dev/null in the environment.
-    """
-    @functools.wraps(func)
-    def f(*args, **kwargs):
-        openssl_cnf = os.environ.get("OPENSSL_CONF", "/etc/ssl/openssl.cnf")
-        try:
-            with open(openssl_cnf, "r") as config:
-                for line in config:
-                    match = re.match(r"MinProtocol\s*=\s*(TLSv\d+\S*)", line)
-                    if match:
-                        tls_ver = match.group(1)
-                        if tls_ver > "TLSv1":
-                            raise unittest.SkipTest(
-                                "%s has MinProtocol = %s which is > TLSv1." %
-                                (openssl_cnf, tls_ver))
-        except (EnvironmentError, UnicodeDecodeError) as err:
-            # no config file found, etc.
-            if support.verbose:
-                sys.stdout.write("\n Could not scan %s for MinProtocol: %s\n"
-                                 % (openssl_cnf, err))
-        return func(*args, **kwargs)
-    return f
-
-
-needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test")
-
-
-def test_wrap_socket(sock, ssl_version=ssl.PROTOCOL_TLS, *,
-                     cert_reqs=ssl.CERT_NONE, ca_certs=None,
-                     ciphers=None, certfile=None, keyfile=None,
-                     **kwargs):
-    context = ssl.SSLContext(ssl_version)
-    if cert_reqs is not None:
-        context.verify_mode = cert_reqs
-    if ca_certs is not None:
-        context.load_verify_locations(ca_certs)
-    if certfile is not None or keyfile is not None:
-        context.load_cert_chain(certfile, keyfile)
-    if ciphers is not None:
-        context.set_ciphers(ciphers)
-    return context.wrap_socket(sock, **kwargs)
-
-class BasicSocketTests(unittest.TestCase):
-
-    def test_constants(self):
-        ssl.CERT_NONE
-        ssl.CERT_OPTIONAL
-        ssl.CERT_REQUIRED
-        ssl.OP_CIPHER_SERVER_PREFERENCE
-        ssl.OP_SINGLE_DH_USE
-        if ssl.HAS_ECDH:
-            ssl.OP_SINGLE_ECDH_USE
-        if ssl.OPENSSL_VERSION_INFO >= (1, 0):
-            ssl.OP_NO_COMPRESSION
-        self.assertIn(ssl.HAS_SNI, {True, False})
-        self.assertIn(ssl.HAS_ECDH, {True, False})
-        ssl.OP_NO_SSLv2
-        ssl.OP_NO_SSLv3
-        ssl.OP_NO_TLSv1
-        ssl.OP_NO_TLSv1_3
-    if ssl.OPENSSL_VERSION_INFO >= (1, 0, 1):
-        ssl.OP_NO_TLSv1_1
-        ssl.OP_NO_TLSv1_2
-
-    def test_str_for_enums(self):
-        # Make sure that the PROTOCOL_* constants have enum-like string
-        # reprs.
-        proto = ssl.PROTOCOL_TLS
-        self.assertEqual(str(proto), '_SSLMethod.PROTOCOL_TLS')
-        ctx = ssl.SSLContext(proto)
-        self.assertIs(ctx.protocol, proto)
-
-    def test_random(self):
-        v = ssl.RAND_status()
-        if support.verbose:
-            sys.stdout.write("\n RAND_status is %d (%s)\n"
-                             % (v, (v and "sufficient randomness") or
-                                "insufficient randomness"))
-
-        data, is_cryptographic = ssl.RAND_pseudo_bytes(16)
-        self.assertEqual(len(data), 16)
-        self.assertEqual(is_cryptographic, v == 1)
-        if v:
-            data = ssl.RAND_bytes(16)
-            self.assertEqual(len(data), 16)
-        else:
-            self.assertRaises(ssl.SSLError, ssl.RAND_bytes, 16)
-
-        # negative num is invalid
-        self.assertRaises(ValueError, ssl.RAND_bytes, -5)
-        self.assertRaises(ValueError, ssl.RAND_pseudo_bytes, -5)
-
-        if hasattr(ssl, 'RAND_egd'):
-            self.assertRaises(TypeError, ssl.RAND_egd, 1)
-            self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1)
-        ssl.RAND_add("this is a random string", 75.0)
-        ssl.RAND_add(b"this is a random bytes object", 75.0)
-        ssl.RAND_add(bytearray(b"this is a random bytearray object"), 75.0)
-
-    @unittest.skipUnless(os.name == 'posix', 'requires posix')
-    def test_random_fork(self):
-        status = ssl.RAND_status()
-        if not status:
-            self.fail("OpenSSL's PRNG has insufficient randomness")
-
-        rfd, wfd = os.pipe()
-        pid = os.fork()
-        if pid == 0:
-            try:
-                os.close(rfd)
-                child_random = ssl.RAND_pseudo_bytes(16)[0]
-                self.assertEqual(len(child_random), 16)
-                os.write(wfd, child_random)
-                os.close(wfd)
-            except BaseException:
-                os._exit(1)
-            else:
-                os._exit(0)
-        else:
-            os.close(wfd)
-            self.addCleanup(os.close, rfd)
-            _, status = os.waitpid(pid, 0)
-            self.assertEqual(status, 0)
-
-            child_random = os.read(rfd, 16)
-            self.assertEqual(len(child_random), 16)
-            parent_random = ssl.RAND_pseudo_bytes(16)[0]
-            self.assertEqual(len(parent_random), 16)
-
-            self.assertNotEqual(child_random, parent_random)
-
-    maxDiff = None
-
-    def test_parse_cert(self):
-        # note that this uses an 'unofficial' function in _ssl.c,
-        # provided solely for this test, to exercise the certificate
-        # parsing code
-        p = ssl._ssl._test_decode_cert(CERTFILE)
-        if support.verbose:
-            sys.stdout.write("\n" + pprint.pformat(p) + "\n")
-        self.assertEqual(p['issuer'],
-                         ((('countryName', 'XY'),),
-                          (('localityName', 'Castle Anthrax'),),
-                          (('organizationName', 'Python Software Foundation'),),
-                          (('commonName', 'localhost'),))
-                        )
-        # Note the next three asserts will fail if the keys are regenerated
-        self.assertEqual(p['notAfter'], asn1time('Aug 26 14:23:15 2028 GMT'))
-        self.assertEqual(p['notBefore'], asn1time('Aug 29 14:23:15 2018 GMT'))
-        self.assertEqual(p['serialNumber'], '98A7CF88C74A32ED')
-        self.assertEqual(p['subject'],
-                         ((('countryName', 'XY'),),
-                          (('localityName', 'Castle Anthrax'),),
-                          (('organizationName', 'Python Software Foundation'),),
-                          (('commonName', 'localhost'),))
-                        )
-        self.assertEqual(p['subjectAltName'], (('DNS', 'localhost'),))
-        # Issue #13034: the subjectAltName in some certificates
-        # (notably projects.developer.nokia.com:443) wasn't parsed
-        p = ssl._ssl._test_decode_cert(NOKIACERT)
-        if support.verbose:
-            sys.stdout.write("\n" + pprint.pformat(p) + "\n")
-        self.assertEqual(p['subjectAltName'],
-                         (('DNS', 'projects.developer.nokia.com'),
-                          ('DNS', 'projects.forum.nokia.com'))
-                        )
-        # extra OCSP and AIA fields
-        self.assertEqual(p['OCSP'], ('http://ocsp.verisign.com',))
-        self.assertEqual(p['caIssuers'],
-                         ('http://SVRIntl-G3-aia.verisign.com/SVRIntlG3.cer',))
-        self.assertEqual(p['crlDistributionPoints'],
-                         ('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',))
-
-    def test_parse_cert_CVE_2019_5010(self):
-        p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP)
-        if support.verbose:
-            sys.stdout.write("\n" + pprint.pformat(p) + "\n")
-        self.assertEqual(
-            p,
-            {
-                'issuer': (
-                    (('countryName', 'UK'),), (('commonName', 'cody-ca'),)),
-                'notAfter': 'Jun 14 18:00:58 2028 GMT',
-                'notBefore': 'Jun 18 18:00:58 2018 GMT',
-                'serialNumber': '02',
-                'subject': ((('countryName', 'UK'),),
-                            (('commonName',
-                              'codenomicon-vm-2.test.lal.cisco.com'),)),
-                'subjectAltName': (
-                    ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),),
-                'version': 3
-            }
-        )
-
-    def test_parse_cert_CVE_2013_4238(self):
-        p = ssl._ssl._test_decode_cert(NULLBYTECERT)
-        if support.verbose:
-            sys.stdout.write("\n" + pprint.pformat(p) + "\n")
-        subject = ((('countryName', 'US'),),
-                   (('stateOrProvinceName', 'Oregon'),),
-                   (('localityName', 'Beaverton'),),
-                   (('organizationName', 'Python Software Foundation'),),
-                   (('organizationalUnitName', 'Python Core Development'),),
-                   (('commonName', 'null.python.org\x00example.org'),),
-                   (('emailAddress', 'python-dev at python.org'),))
-        self.assertEqual(p['subject'], subject)
-        self.assertEqual(p['issuer'], subject)
-        if ssl._OPENSSL_API_VERSION >= (0, 9, 8):
-            san = (('DNS', 'altnull.python.org\x00example.com'),
-                   ('email', 'null at python.org\x00user at example.org'),
-                   ('URI', 'http://null.python.org\x00http://example.org'),
-                   ('IP Address', '192.0.2.1'),
-                   ('IP Address', '2001:DB8:0:0:0:0:0:1\n'))
-        else:
-            # OpenSSL 0.9.7 doesn't support IPv6 addresses in subjectAltName
-            san = (('DNS', 'altnull.python.org\x00example.com'),
-                   ('email', 'null at python.org\x00user at example.org'),
-                   ('URI', 'http://null.python.org\x00http://example.org'),
-                   ('IP Address', '192.0.2.1'),
-                   ('IP Address', '<invalid>'))


More information about the pypy-commit mailing list