[Python-checkins] cpython (merge default -> default): merge

brett.cannon python-checkins at python.org
Fri Apr 6 18:55:30 CEST 2012


http://hg.python.org/cpython/rev/fbbb14604e94
changeset:   76139:fbbb14604e94
parent:      76138:1a3eb3b8ba42
parent:      76137:d8c5c0f7aa56
user:        Brett Cannon <brett at python.org>
date:        Fri Apr 06 12:54:57 2012 -0400
summary:
  merge

files:
  .bzrignore                           |    1 -
  .gitignore                           |    1 -
  .hgignore                            |    1 -
  Doc/library/_thread.rst              |    2 +-
  Doc/library/collections.rst          |    4 +-
  Doc/library/subprocess.rst           |    2 +-
  Doc/library/threading.rst            |    4 +-
  Doc/whatsnew/3.3.rst                 |   51 +-
  Include/methodobject.h               |   16 +-
  Include/object.h                     |    5 +
  Lib/collections/abc.py               |   10 +-
  Lib/idlelib/NEWS.txt                 |    3 +
  Lib/idlelib/ScriptBinding.py         |   12 +-
  Lib/idlelib/tabbedpages.py           |    4 +-
  Lib/multiprocessing/connection.py    |    4 +
  Lib/test/seq_tests.py                |    7 +
  Lib/test/test_array.py               |   14 +
  Lib/test/test_asyncore.py            |    7 +-
  Lib/test/test_builtin.py             |   41 +
  Lib/test/test_bytes.py               |   18 +
  Lib/test/test_decimal.py             |   72 +
  Lib/test/test_deque.py               |   13 +
  Lib/test/test_dict.py                |   54 +
  Lib/test/test_enumerate.py           |   30 +-
  Lib/test/test_iter.py                |   43 +-
  Lib/test/test_itertools.py           |  386 ++++++-
  Lib/test/test_list.py                |   28 +
  Lib/test/test_multiprocessing.py     |    4 +
  Lib/test/test_range.py               |   24 +-
  Lib/test/test_set.py                 |   21 +
  Lib/test/test_tools.py               |   73 +-
  Lib/test/test_tuple.py               |   29 +
  Lib/test/test_xml_etree.py           |   35 +
  Lib/tkinter/font.py                  |   63 +-
  Lib/tkinter/ttk.py                   |    2 +-
  Lib/webbrowser.py                    |    8 +
  Makefile.pre.in                      |    2 +-
  Misc/ACKS                            |    2 +
  Misc/NEWS                            |   20 +-
  Modules/_collectionsmodule.c         |   93 +-
  Modules/_decimal/_decimal.c          |   16 +-
  Modules/_decimal/tests/deccheck.py   |    1 +
  Modules/_elementtree.c               |   88 +-
  Modules/arraymodule.c                |   30 +-
  Modules/itertoolsmodule.c            |  890 ++++++++++++++-
  Objects/bytearrayobject.c            |   36 +-
  Objects/bytesobject.c                |   34 +
  Objects/dictobject.c                 |   53 +
  Objects/enumobject.c                 |   50 +-
  Objects/iterobject.c                 |   47 +-
  Objects/listobject.c                 |   80 +
  Objects/object.c                     |   13 +
  Objects/rangeobject.c                |   92 +
  Objects/setobject.c                  |   45 +-
  Objects/tupleobject.c                |   31 +
  Objects/unicodeobject.c              |   85 +-
  Python/bltinmodule.c                 |   56 +-
  Python/pythonrun.c                   |   57 +-
  Tools/scripts/abitype.py             |   88 +-
  Tools/scripts/find_recursionlimit.py |   24 +-
  Tools/scripts/findnocoding.py        |   46 +-
  Tools/scripts/fixcid.py              |    2 +-
  Tools/scripts/md5sum.py              |    2 +-
  Tools/scripts/parseentities.py       |    3 +-
  Tools/scripts/pdeps.py               |   10 +-
  65 files changed, 2751 insertions(+), 337 deletions(-)


diff --git a/.bzrignore b/.bzrignore
--- a/.bzrignore
+++ b/.bzrignore
@@ -33,7 +33,6 @@
 Modules/config.c
 Modules/ld_so_aix
 Parser/pgen
-Parser/pgen.stamp
 Lib/test/data/*
 Lib/lib2to3/Grammar*.pickle
 Lib/lib2to3/PatternGrammar*.pickle
diff --git a/.gitignore b/.gitignore
--- a/.gitignore
+++ b/.gitignore
@@ -32,7 +32,6 @@
 PCbuild/*.pdb
 PCbuild/Win32-temp-*
 Parser/pgen
-Parser/pgen.stamp
 __pycache__
 autom4te.cache
 build/
diff --git a/.hgignore b/.hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -32,7 +32,6 @@
 Modules/config.c
 Modules/ld_so_aix$
 Parser/pgen$
-Parser/pgen.stamp$
 PCbuild/amd64/
 ^core
 ^python-gdb.py
diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst
--- a/Doc/library/_thread.rst
+++ b/Doc/library/_thread.rst
@@ -94,7 +94,7 @@
    *size* argument specifies the stack size to be used for subsequently created
    threads, and must be 0 (use platform or configured default) or a positive
    integer value of at least 32,768 (32kB). If changing the thread stack size is
-   unsupported, a :exc:`ThreadError` is raised.  If the specified stack size is
+   unsupported, a :exc:`RuntimeError` is raised.  If the specified stack size is
    invalid, a :exc:`ValueError` is raised and the stack size is unmodified.  32kB
    is currently the minimum supported stack size value to guarantee sufficient
    stack space for the interpreter itself.  Note that some platforms may have
diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst
--- a/Doc/library/collections.rst
+++ b/Doc/library/collections.rst
@@ -41,6 +41,8 @@
 :class:`ChainMap` objects
 -------------------------
 
+.. versionadded:: 3.3
+
 A :class:`ChainMap` class is provided for quickly linking a number of mappings
 so they can be treated as a single unit.  It is often much faster than creating
 a new dictionary and running multiple :meth:`~dict.update` calls.
@@ -91,8 +93,6 @@
       The use-cases also parallel those for the builtin :func:`super` function.
       A reference to  ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``.
 
-   .. versionadded:: 3.3
-
    Example of simulating Python's internal lookup chain::
 
       import builtins
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -804,7 +804,7 @@
 to receive a SIGPIPE if p2 exits before p1.
 
 Alternatively, for trusted input, the shell's own pipeline support may still
-be used directly:
+be used directly::
 
    output=`dmesg | grep hda`
    # becomes
diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -174,7 +174,7 @@
    *size* argument specifies the stack size to be used for subsequently created
    threads, and must be 0 (use platform or configured default) or a positive
    integer value of at least 32,768 (32kB). If changing the thread stack size is
-   unsupported, a :exc:`ThreadError` is raised.  If the specified stack size is
+   unsupported, a :exc:`RuntimeError` is raised.  If the specified stack size is
    invalid, a :exc:`ValueError` is raised and the stack size is unmodified.  32kB
    is currently the minimum supported stack size value to guarantee sufficient
    stack space for the interpreter itself.  Note that some platforms may have
@@ -452,7 +452,7 @@
    are blocked waiting for the lock to become unlocked, allow exactly one of them
    to proceed.
 
-   Do not call this method when the lock is unlocked.
+   When invoked on an unlocked lock, a :exc:`RuntimeError` is raised.
 
    There is no return value.
 
diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst
--- a/Doc/whatsnew/3.3.rst
+++ b/Doc/whatsnew/3.3.rst
@@ -486,6 +486,8 @@
 
   (:issue:`10516`)
 
+.. XXX mention new error messages for passing wrong number of arguments to functions
+
 New and Improved Modules
 ========================
 
@@ -572,6 +574,26 @@
 
 The ``unicode_internal`` codec has been deprecated.
 
+
+collections
+-----------
+
+Addition of a new :class:`~collections.ChainMap` class to allow treating a
+number of mappings as a single unit.
+
+(Written by Raymond Hettinger for :issue:`11089`, made public in
+:issue:`11297`)
+
+The abstract base classes have been moved in a new :mod:`collections.abc`
+module, to better differentiate between the abstract and the concrete
+collections classes.  Aliases for ABCs are still present in the
+:mod:`collections` module to preserve existing imports.
+
+(:issue:`11085`)
+
+.. XXX addition of __slots__ to ABCs not recorded here: internal detail
+
+
 crypt
 -----
 
@@ -865,11 +887,12 @@
 ---------
 
 :mod:`distutils` has undergone additions and refactoring under a new name,
-:mod:`packaging`, to allow developers to break backward compatibility.
+:mod:`packaging`, to allow developers to make far-reaching changes without
+being constrained by backward compatibility.
 :mod:`distutils` is still provided in the standard library, but users are
 encouraged to transition to :mod:`packaging`.  For older versions of Python, a
-backport compatible with 2.4+ and 3.1+ will be made available on PyPI under the
-name :mod:`distutils2`.
+backport compatible with Python 2.5 and newer and 3.2 is available on PyPI
+under the name `distutils2 <http://pypi.python.org/pypi/Distutils2>`_.
 
 .. TODO add examples and howto to the packaging docs and link to them
 
@@ -1057,12 +1080,24 @@
 (:issue:`1673007`)
 
 
+webbrowser
+----------
+
+The :mod:`webbrowser` module supports more browsers: Google Chrome (named
+:program:`chrome`, :program:`chromium`, :program:`chrome-browser` or
+:program:`chromium-browser` depending on the version and operating system) as
+well as the the generic launchers :program:`xdg-open` from the FreeDesktop.org
+project and :program:`gvfs-open` which is the default URI handler for GNOME 3.
+
+(:issue:`13620` and :issue:`14493`)
+
+
 Optimizations
 =============
 
 Major performance enhancements have been added:
 
-* Thanks to the :pep:`393`, some operations on Unicode strings has been optimized:
+* Thanks to :pep:`393`, some operations on Unicode strings have been optimized:
 
   * the memory footprint is divided by 2 to 4 depending on the text
   * encode an ASCII string to UTF-8 doesn't need to encode characters anymore,
@@ -1081,7 +1116,7 @@
 
   * :c:func:`PyMemoryView_FromMemory`
 
-* The :pep:`393` added new Unicode types, macros and functions:
+* :pep:`393` added new Unicode types, macros and functions:
 
   * High-level API:
 
@@ -1124,7 +1159,7 @@
 Deprecated Python modules, functions and methods
 ------------------------------------------------
 
-* The :mod:`distutils` modules has been deprecated.  Use the new
+* The :mod:`distutils` module has been deprecated.  Use the new
   :mod:`packaging` module instead.
 * The ``unicode_internal`` codec has been deprecated because of the
   :pep:`393`, use UTF-8, UTF-16 (``utf-16-le`` or ``utf-16-be``), or UTF-32
@@ -1143,7 +1178,7 @@
 Deprecated functions and types of the C API
 -------------------------------------------
 
-The :c:type:`Py_UNICODE` has been deprecated by the :pep:`393` and will be
+The :c:type:`Py_UNICODE` has been deprecated by :pep:`393` and will be
 removed in Python 4. All functions using this type are deprecated:
 
 Unicode functions and methods using :c:type:`Py_UNICODE` and
@@ -1245,7 +1280,7 @@
   functions using this type are deprecated (but will stay available for
   at least five years).  If you were using low-level Unicode APIs to
   construct and access unicode objects and you want to benefit of the
-  memory footprint reduction provided by the PEP 393, you have to convert
+  memory footprint reduction provided by PEP 393, you have to convert
   your code to the new :doc:`Unicode API <../c-api/unicode>`.
 
   However, if you only have been using high-level functions such as
diff --git a/Include/methodobject.h b/Include/methodobject.h
--- a/Include/methodobject.h
+++ b/Include/methodobject.h
@@ -17,7 +17,7 @@
 
 typedef PyObject *(*PyCFunction)(PyObject *, PyObject *);
 typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *,
-					     PyObject *);
+                                             PyObject *);
 typedef PyObject *(*PyNoArgsFunction)(PyObject *);
 
 PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *);
@@ -33,22 +33,22 @@
         (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \
          NULL : ((PyCFunctionObject *)func) -> m_self)
 #define PyCFunction_GET_FLAGS(func) \
-	(((PyCFunctionObject *)func) -> m_ml -> ml_flags)
+        (((PyCFunctionObject *)func) -> m_ml -> ml_flags)
 #endif
 PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *);
 
 struct PyMethodDef {
-    const char	*ml_name;	/* The name of the built-in function/method */
-    PyCFunction  ml_meth;	/* The C function that implements it */
-    int		 ml_flags;	/* Combination of METH_xxx flags, which mostly
-				   describe the args expected by the C func */
-    const char	*ml_doc;	/* The __doc__ attribute, or NULL */
+    const char  *ml_name;   /* The name of the built-in function/method */
+    PyCFunction ml_meth;    /* The C function that implements it */
+    int         ml_flags;   /* Combination of METH_xxx flags, which mostly
+                               describe the args expected by the C func */
+    const char  *ml_doc;    /* The __doc__ attribute, or NULL */
 };
 typedef struct PyMethodDef PyMethodDef;
 
 #define PyCFunction_New(ML, SELF) PyCFunction_NewEx((ML), (SELF), NULL)
 PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, 
-					 PyObject *);
+                                         PyObject *);
 
 /* Flag passed to newmethodobject */
 /* #define METH_OLDARGS  0x0000   -- unsupported now */
diff --git a/Include/object.h b/Include/object.h
--- a/Include/object.h
+++ b/Include/object.h
@@ -535,6 +535,11 @@
 _PyObject_GenericSetAttrWithDict(PyObject *, PyObject *,
                                  PyObject *, PyObject *);
 
+/* Helper to look up a builtin object */
+#ifndef Py_LIMITED_API
+PyAPI_FUNC(PyObject *)
+_PyObject_GetBuiltin(const char *name);
+#endif
 
 /* PyObject_Dir(obj) acts like Python builtins.dir(obj), returning a
    list of strings.  PyObject_Dir(NULL) is like builtins.dir(),
diff --git a/Lib/collections/abc.py b/Lib/collections/abc.py
--- a/Lib/collections/abc.py
+++ b/Lib/collections/abc.py
@@ -18,9 +18,13 @@
            "ByteString",
            ]
 
-
-### collection related types which are not exposed through builtin ###
-## iterators ##
+# Private list of types that we want to register with the various ABCs
+# so that they will pass tests like:
+#       it = iter(somebytearray)
+#       assert isinstance(it, Iterable)
+# Note:  in other implementations, these types many not be distinct
+# and they make have their own implementation specific types that
+# are not included on this list.
 bytes_iterator = type(iter(b''))
 bytearray_iterator = type(iter(bytearray()))
 #callable_iterator = ???
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,6 +1,9 @@
 What's New in IDLE 3.3?
 =========================
 
+- Issue #8515: Set __file__ when run file in IDLE.
+  Initial patch by Bruce Frederiksen.
+
 - IDLE can be launched as `python -m idlelib`
 
 - Issue #14409: IDLE now properly executes commands in the Shell window
diff --git a/Lib/idlelib/ScriptBinding.py b/Lib/idlelib/ScriptBinding.py
--- a/Lib/idlelib/ScriptBinding.py
+++ b/Lib/idlelib/ScriptBinding.py
@@ -150,16 +150,16 @@
         dirname = os.path.dirname(filename)
         # XXX Too often this discards arguments the user just set...
         interp.runcommand("""if 1:
-            _filename = %r
+            __file__ = {filename!r}
             import sys as _sys
             from os.path import basename as _basename
             if (not _sys.argv or
-                _basename(_sys.argv[0]) != _basename(_filename)):
-                _sys.argv = [_filename]
+                _basename(_sys.argv[0]) != _basename(__file__)):
+                _sys.argv = [__file__]
             import os as _os
-            _os.chdir(%r)
-            del _filename, _sys, _basename, _os
-            \n""" % (filename, dirname))
+            _os.chdir({dirname!r})
+            del _sys, _basename, _os
+            \n""".format(filename=filename, dirname=dirname))
         interp.prepend_syspath(filename)
         # XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
         #         go to __stderr__.  With subprocess, they go to the shell.
diff --git a/Lib/idlelib/tabbedpages.py b/Lib/idlelib/tabbedpages.py
--- a/Lib/idlelib/tabbedpages.py
+++ b/Lib/idlelib/tabbedpages.py
@@ -78,7 +78,7 @@
     def remove_tab(self, tab_name):
         """Remove the tab named <tab_name>"""
         if not tab_name in self._tab_names:
-            raise KeyError("No such Tab: '%s" % page_name)
+            raise KeyError("No such Tab: '%s" % tab_name)
 
         self._tab_names.remove(tab_name)
         self._arrange_tabs()
@@ -88,7 +88,7 @@
         if tab_name == self._selected_tab:
             return
         if tab_name is not None and tab_name not in self._tabs:
-            raise KeyError("No such Tab: '%s" % page_name)
+            raise KeyError("No such Tab: '%s" % tab_name)
 
         # deselect the current selected tab
         if self._selected_tab is not None:
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -111,6 +111,10 @@
     if sys.platform != 'win32' and family == 'AF_PIPE':
         raise ValueError('Family %s is not recognized.' % family)
 
+    if sys.platform == 'win32' and family == 'AF_UNIX':
+        # double check
+        if not hasattr(socket, family):
+            raise ValueError('Family %s is not recognized.' % family)
 
 def address_type(address):
     '''
diff --git a/Lib/test/seq_tests.py b/Lib/test/seq_tests.py
--- a/Lib/test/seq_tests.py
+++ b/Lib/test/seq_tests.py
@@ -4,6 +4,7 @@
 
 import unittest
 import sys
+import pickle
 
 # Various iterables
 # This is used for checking the constructor (here and in test_deque.py)
@@ -388,3 +389,9 @@
         self.assertEqual(a.index(0, -4*sys.maxsize, 4*sys.maxsize), 2)
         self.assertRaises(ValueError, a.index, 0, 4*sys.maxsize,-4*sys.maxsize)
         self.assertRaises(ValueError, a.index, 2, 0, -10)
+
+    def test_pickle(self):
+        lst = self.type2test([4, 5, 6, 7])
+        lst2 = pickle.loads(pickle.dumps(lst))
+        self.assertEqual(lst2, lst)
+        self.assertNotEqual(id(lst2), id(lst))
diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py
--- a/Lib/test/test_array.py
+++ b/Lib/test/test_array.py
@@ -285,6 +285,20 @@
             self.assertEqual(a.x, b.x)
             self.assertEqual(type(a), type(b))
 
+    def test_iterator_pickle(self):
+        data = array.array(self.typecode, self.example)
+        orgit = iter(data)
+        d = pickle.dumps(orgit)
+        it = pickle.loads(d)
+        self.assertEqual(type(orgit), type(it))
+        self.assertEqual(list(it), list(data))
+
+        if len(data):
+            it = pickle.loads(d)
+            next(it)
+            d = pickle.dumps(it)
+            self.assertEqual(list(it), list(data)[1:])
+
     def test_insert(self):
         a = array.array(self.typecode, self.example)
         a.insert(0, self.example[0])
diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py
--- a/Lib/test/test_asyncore.py
+++ b/Lib/test/test_asyncore.py
@@ -74,15 +74,16 @@
         pass
     else:
         n = 200
-        while n > 0:
-            r, w, e = select.select([conn], [], [])
+        start = time.time()
+        while n > 0 and time.time() - start < 3.0:
+            r, w, e = select.select([conn], [], [], 0.1)
             if r:
+                n -= 1
                 data = conn.recv(10)
                 # keep everything except for the newline terminator
                 buf.write(data.replace(b'\n', b''))
                 if b'\n' in data:
                     break
-            n -= 1
             time.sleep(0.01)
 
         conn.close()
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -14,6 +14,7 @@
 import traceback
 from test.support import TESTFN, unlink,  run_unittest, check_warnings
 from operator import neg
+import pickle
 try:
     import pty, signal
 except ImportError:
@@ -110,7 +111,30 @@
     def __iter__(self):
         raise RuntimeError
 
+def filter_char(arg):
+    return ord(arg) > ord("d")
+
+def map_char(arg):
+    return chr(ord(arg)+1)
+
 class BuiltinTest(unittest.TestCase):
+    # Helper to check picklability
+    def check_iter_pickle(self, it, seq):
+        itorg = it
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(type(itorg), type(it))
+        self.assertEqual(list(it), seq)
+
+        #test the iterator after dropping one from it
+        it = pickle.loads(d)
+        try:
+            next(it)
+        except StopIteration:
+            return
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(list(it), seq[1:])
 
     def test_import(self):
         __import__('sys')
@@ -566,6 +590,11 @@
         self.assertEqual(list(filter(lambda x: x>=3, (1, 2, 3, 4))), [3, 4])
         self.assertRaises(TypeError, list, filter(42, (1, 2)))
 
+    def test_filter_pickle(self):
+        f1 = filter(filter_char, "abcdeabcde")
+        f2 = filter(filter_char, "abcdeabcde")
+        self.check_iter_pickle(f1, list(f2))
+
     def test_getattr(self):
         self.assertTrue(getattr(sys, 'stdout') is sys.stdout)
         self.assertRaises(TypeError, getattr, sys, 1)
@@ -759,6 +788,11 @@
             raise RuntimeError
         self.assertRaises(RuntimeError, list, map(badfunc, range(5)))
 
+    def test_map_pickle(self):
+        m1 = map(map_char, "Is this the real life?")
+        m2 = map(map_char, "Is this the real life?")
+        self.check_iter_pickle(m1, list(m2))
+
     def test_max(self):
         self.assertEqual(max('123123'), '3')
         self.assertEqual(max(1, 2, 3), 3)
@@ -1300,6 +1334,13 @@
                     return i
         self.assertRaises(ValueError, list, zip(BadSeq(), BadSeq()))
 
+    def test_zip_pickle(self):
+        a = (1, 2, 3)
+        b = (4, 5, 6)
+        t = [(1, 4), (2, 5), (3, 6)]
+        z1 = zip(a, b)
+        self.check_iter_pickle(z1, t)
+
     def test_format(self):
         # Test the basic machinery of the format() builtin.  Don't test
         #  the specifics of the various formatters
diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py
--- a/Lib/test/test_bytes.py
+++ b/Lib/test/test_bytes.py
@@ -518,6 +518,24 @@
                 q = pickle.loads(ps)
                 self.assertEqual(b, q)
 
+    def test_iterator_pickling(self):
+        for b in b"", b"a", b"abc", b"\xffab\x80", b"\0\0\377\0\0":
+            it = itorg = iter(self.type2test(b))
+            data = list(self.type2test(b))
+            d = pickle.dumps(it)
+            it = pickle.loads(d)
+            self.assertEqual(type(itorg), type(it))
+            self.assertEqual(list(it), data)
+
+            it = pickle.loads(d)
+            try:
+                next(it)
+            except StopIteration:
+                continue
+            d = pickle.dumps(it)
+            it = pickle.loads(d)
+            self.assertEqual(list(it), data[1:])
+
     def test_strip(self):
         b = self.type2test(b'mississippi')
         self.assertEqual(b.strip(b'i'), b'mississipp')
diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -4953,6 +4953,78 @@
         self.assertRaises(ValueError, get_fmt, 12345, invalid_dot, 'g')
         self.assertRaises(ValueError, get_fmt, 12345, invalid_sep, 'g')
 
+    def test_exact_conversion(self):
+        Decimal = C.Decimal
+        localcontext = C.localcontext
+        InvalidOperation = C.InvalidOperation
+
+        with localcontext() as c:
+
+            c.traps[InvalidOperation] = True
+
+            # Clamped
+            x = "0e%d" % sys.maxsize
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+            x = "0e%d" % (-sys.maxsize-1)
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+            # Overflow
+            x = "1e%d" % sys.maxsize
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+            # Underflow
+            x = "1e%d" % (-sys.maxsize-1)
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+    def test_from_tuple(self):
+        Decimal = C.Decimal
+        localcontext = C.localcontext
+        InvalidOperation = C.InvalidOperation
+        Overflow = C.Overflow
+        Underflow = C.Underflow
+
+        with localcontext() as c:
+
+            c.traps[InvalidOperation] = True
+            c.traps[Overflow] = True
+            c.traps[Underflow] = True
+
+            # SSIZE_MAX
+            x = (1, (), sys.maxsize)
+            self.assertEqual(str(c.create_decimal(x)), '-0E+999999')
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+            x = (1, (0, 1, 2), sys.maxsize)
+            self.assertRaises(Overflow, c.create_decimal, x)
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+            # SSIZE_MIN
+            x = (1, (), -sys.maxsize-1)
+            self.assertEqual(str(c.create_decimal(x)), '-0E-1000026')
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+            x = (1, (0, 1, 2), -sys.maxsize-1)
+            self.assertRaises(Underflow, c.create_decimal, x)
+            self.assertRaises(InvalidOperation, Decimal, x)
+
+            # OverflowError
+            x = (1, (), sys.maxsize+1)
+            self.assertRaises(OverflowError, c.create_decimal, x)
+            self.assertRaises(OverflowError, Decimal, x)
+
+            x = (1, (), -sys.maxsize-2)
+            self.assertRaises(OverflowError, c.create_decimal, x)
+            self.assertRaises(OverflowError, Decimal, x)
+
+            # Specials
+            x = (1, (), "N")
+            self.assertEqual(str(Decimal(x)), '-sNaN')
+            x = (1, (0,), "N")
+            self.assertEqual(str(Decimal(x)), '-sNaN')
+            x = (1, (0, 1), "N")
+            self.assertEqual(str(Decimal(x)), '-sNaN1')
+
 
 all_tests = [
   CExplicitConstructionTest, PyExplicitConstructionTest,
diff --git a/Lib/test/test_deque.py b/Lib/test/test_deque.py
--- a/Lib/test/test_deque.py
+++ b/Lib/test/test_deque.py
@@ -471,6 +471,19 @@
 ##            self.assertNotEqual(id(d), id(e))
 ##            self.assertEqual(id(e), id(e[-1]))
 
+    def test_iterator_pickle(self):
+        data = deque(range(200))
+        it = itorg = iter(data)
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(type(itorg), type(it))
+        self.assertEqual(list(it), list(data))
+
+        it = pickle.loads(d)
+        next(it)
+        d = pickle.dumps(it)
+        self.assertEqual(list(it), list(data)[1:])
+
     def test_deepcopy(self):
         mut = [10]
         d = deque([mut])
diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py
--- a/Lib/test/test_dict.py
+++ b/Lib/test/test_dict.py
@@ -2,7 +2,9 @@
 from test import support
 
 import collections, random, string
+import collections.abc
 import gc, weakref
+import pickle
 
 
 class DictTest(unittest.TestCase):
@@ -803,6 +805,58 @@
             pass
         self._tracked(MyDict())
 
+    def test_iterator_pickling(self):
+        data = {1:"a", 2:"b", 3:"c"}
+        it = iter(data)
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(sorted(it), sorted(data))
+
+        it = pickle.loads(d)
+        try:
+            drop = next(it)
+        except StopIteration:
+            return
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        del data[drop]
+        self.assertEqual(sorted(it), sorted(data))
+
+    def test_itemiterator_pickling(self):
+        data = {1:"a", 2:"b", 3:"c"}
+        # dictviews aren't picklable, only their iterators
+        itorg = iter(data.items())
+        d = pickle.dumps(itorg)
+        it = pickle.loads(d)
+        # note that the type of type of the unpickled iterator
+        # is not necessarily the same as the original.  It is
+        # merely an object supporting the iterator protocol, yielding
+        # the same objects as the original one.
+        # self.assertEqual(type(itorg), type(it))
+        self.assertTrue(isinstance(it, collections.abc.Iterator))
+        self.assertEqual(dict(it), data)
+
+        it = pickle.loads(d)
+        drop = next(it)
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        del data[drop[0]]
+        self.assertEqual(dict(it), data)
+
+    def test_valuesiterator_pickling(self):
+        data = {1:"a", 2:"b", 3:"c"}
+        # data.values() isn't picklable, only its iterator
+        it = iter(data.values())
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(sorted(list(it)), sorted(list(data.values())))
+
+        it = pickle.loads(d)
+        drop = next(it)
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        values = list(it) + [drop]
+        self.assertEqual(sorted(values), sorted(list(data.values())))
 
 from test import mapping_tests
 
diff --git a/Lib/test/test_enumerate.py b/Lib/test/test_enumerate.py
--- a/Lib/test/test_enumerate.py
+++ b/Lib/test/test_enumerate.py
@@ -1,5 +1,6 @@
 import unittest
 import sys
+import pickle
 
 from test import support
 
@@ -61,7 +62,25 @@
     def __iter__(self):
         return self
 
-class EnumerateTestCase(unittest.TestCase):
+class PickleTest:
+    # Helper to check picklability
+    def check_pickle(self, itorg, seq):
+        d = pickle.dumps(itorg)
+        it = pickle.loads(d)
+        self.assertEqual(type(itorg), type(it))
+        self.assertEqual(list(it), seq)
+
+        it = pickle.loads(d)
+        try:
+            next(it)
+        except StopIteration:
+            self.assertFalse(seq[1:])
+            return
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(list(it), seq[1:])
+
+class EnumerateTestCase(unittest.TestCase, PickleTest):
 
     enum = enumerate
     seq, res = 'abc', [(0,'a'), (1,'b'), (2,'c')]
@@ -73,6 +92,9 @@
         self.assertEqual(list(self.enum(self.seq)), self.res)
         self.enum.__doc__
 
+    def test_pickle(self):
+        self.check_pickle(self.enum(self.seq), self.res)
+
     def test_getitemseqn(self):
         self.assertEqual(list(self.enum(G(self.seq))), self.res)
         e = self.enum(G(''))
@@ -126,7 +148,7 @@
     seq = range(10,20000,2)
     res = list(zip(range(20000), seq))
 
-class TestReversed(unittest.TestCase):
+class TestReversed(unittest.TestCase, PickleTest):
 
     def test_simple(self):
         class A:
@@ -212,6 +234,10 @@
         ngi = NoGetItem()
         self.assertRaises(TypeError, reversed, ngi)
 
+    def test_pickle(self):
+        for data in 'abc', range(5), tuple(enumerate('abc')), range(1,17,5):
+            self.check_pickle(reversed(data), list(data)[::-1])
+
 
 class EnumerateStartTestCase(EnumerateTestCase):
 
diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py
--- a/Lib/test/test_iter.py
+++ b/Lib/test/test_iter.py
@@ -2,6 +2,8 @@
 
 import unittest
 from test.support import run_unittest, TESTFN, unlink, cpython_only
+import pickle
+import collections.abc
 
 # Test result of triple loop (too big to inline)
 TRIPLETS = [(0, 0, 0), (0, 0, 1), (0, 0, 2),
@@ -28,6 +30,8 @@
             raise StopIteration
         self.i = res + 1
         return res
+    def __iter__(self):
+        return self
 
 class IteratingSequenceClass:
     def __init__(self, n):
@@ -49,7 +53,9 @@
 class TestCase(unittest.TestCase):
 
     # Helper to check that an iterator returns a given sequence
-    def check_iterator(self, it, seq):
+    def check_iterator(self, it, seq, pickle=True):
+        if pickle:
+            self.check_pickle(it, seq)
         res = []
         while 1:
             try:
@@ -60,12 +66,33 @@
         self.assertEqual(res, seq)
 
     # Helper to check that a for loop generates a given sequence
-    def check_for_loop(self, expr, seq):
+    def check_for_loop(self, expr, seq, pickle=True):
+        if pickle:
+            self.check_pickle(iter(expr), seq)
         res = []
         for val in expr:
             res.append(val)
         self.assertEqual(res, seq)
 
+    # Helper to check picklability
+    def check_pickle(self, itorg, seq):
+        d = pickle.dumps(itorg)
+        it = pickle.loads(d)
+        # Cannot assert type equality because dict iterators unpickle as list
+        # iterators.
+        # self.assertEqual(type(itorg), type(it))
+        self.assertTrue(isinstance(it, collections.abc.Iterator))
+        self.assertEqual(list(it), seq)
+
+        it = pickle.loads(d)
+        try:
+            next(it)
+        except StopIteration:
+            return
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(list(it), seq[1:])
+
     # Test basic use of iter() function
     def test_iter_basic(self):
         self.check_iterator(iter(range(10)), list(range(10)))
@@ -138,7 +165,7 @@
                 if i > 100:
                     raise IndexError # Emergency stop
                 return i
-        self.check_iterator(iter(C(), 10), list(range(10)))
+        self.check_iterator(iter(C(), 10), list(range(10)), pickle=False)
 
     # Test two-argument iter() with function
     def test_iter_function(self):
@@ -146,7 +173,7 @@
             i = state[0]
             state[0] = i+1
             return i
-        self.check_iterator(iter(spam, 10), list(range(10)))
+        self.check_iterator(iter(spam, 10), list(range(10)), pickle=False)
 
     # Test two-argument iter() with function that raises StopIteration
     def test_iter_function_stop(self):
@@ -156,7 +183,7 @@
                 raise StopIteration
             state[0] = i+1
             return i
-        self.check_iterator(iter(spam, 20), list(range(10)))
+        self.check_iterator(iter(spam, 20), list(range(10)), pickle=False)
 
     # Test exception propagation through function iterator
     def test_exception_function(self):
@@ -198,7 +225,7 @@
                 if i == 10:
                     raise StopIteration
                 return SequenceClass.__getitem__(self, i)
-        self.check_for_loop(MySequenceClass(20), list(range(10)))
+        self.check_for_loop(MySequenceClass(20), list(range(10)), pickle=False)
 
     # Test a big range
     def test_iter_big_range(self):
@@ -237,8 +264,8 @@
             f.close()
         f = open(TESTFN, "r")
         try:
-            self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"])
-            self.check_for_loop(f, [])
+            self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"], pickle=False)
+            self.check_for_loop(f, [], pickle=False)
         finally:
             f.close()
             try:
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -37,6 +37,13 @@
     'Test predicate'
     return x%2==1
 
+def tupleize(*args):
+    return args
+
+def irange(n):
+    for i in range(n):
+        yield i
+
 class StopNow:
     'Class emulating an empty iterable.'
     def __iter__(self):
@@ -55,8 +62,59 @@
     'Factorial'
     return prod(range(1, n+1))
 
+# root level methods for pickling ability
+def testR(r):
+    return r[0]
+
+def testR2(r):
+    return r[2]
+
+def underten(x):
+    return x<10
+
 class TestBasicOps(unittest.TestCase):
 
+    def pickletest(self, it, stop=4, take=1, compare=None):
+        """Test that an iterator is the same after pickling, also when part-consumed"""
+        def expand(it, i=0):
+            # Recursively expand iterables, within sensible bounds
+            if i > 10:
+                raise RuntimeError("infinite recursion encountered")
+            if isinstance(it, str):
+                return it
+            try:
+                l = list(islice(it, stop))
+            except TypeError:
+                return it # can't expand it
+            return [expand(e, i+1) for e in l]
+
+        # Test the initial copy against the original
+        dump = pickle.dumps(it)
+        i2 = pickle.loads(dump)
+        self.assertEqual(type(it), type(i2))
+        a, b = expand(it), expand(i2)
+        self.assertEqual(a, b)
+        if compare:
+            c = expand(compare)
+            self.assertEqual(a, c)
+
+        # Take from the copy, and create another copy and compare them.
+        i3 = pickle.loads(dump)
+        took = 0
+        try:
+            for i in range(take):
+                next(i3)
+                took += 1
+        except StopIteration:
+            pass #in case there is less data than 'take'
+        dump = pickle.dumps(i3)
+        i4 = pickle.loads(dump)
+        a, b = expand(i3), expand(i4)
+        self.assertEqual(a, b)
+        if compare:
+            c = expand(compare[took:])
+            self.assertEqual(a, c);
+
     def test_accumulate(self):
         self.assertEqual(list(accumulate(range(10))),               # one positional arg
                           [0, 1, 3, 6, 10, 15, 21, 28, 36, 45])
@@ -83,6 +141,7 @@
                          [2, 16, 144, 720, 5040, 0, 0, 0, 0, 0])
         with self.assertRaises(TypeError):
             list(accumulate(s, chr))                                # unary-operation
+        self.pickletest(accumulate(range(10)))                      # test pickling
 
     def test_chain(self):
 
@@ -106,14 +165,43 @@
         self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd'))
         self.assertRaises(TypeError, list, chain.from_iterable([2, 3]))
 
+    def test_chain_reducible(self):
+        operators = [copy.deepcopy,
+                     lambda s: pickle.loads(pickle.dumps(s))]
+        for oper in operators:
+            it = chain('abc', 'def')
+            self.assertEqual(list(oper(it)), list('abcdef'))
+            self.assertEqual(next(it), 'a')
+            self.assertEqual(list(oper(it)), list('bcdef'))
+
+            self.assertEqual(list(oper(chain(''))), [])
+            self.assertEqual(take(4, oper(chain('abc', 'def'))), list('abcd'))
+            self.assertRaises(TypeError, list, oper(chain(2, 3)))
+        self.pickletest(chain('abc', 'def'), compare=list('abcdef'))
+
     def test_combinations(self):
         self.assertRaises(TypeError, combinations, 'abc')       # missing r argument
         self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments
         self.assertRaises(TypeError, combinations, None)        # pool is not iterable
         self.assertRaises(ValueError, combinations, 'abc', -2)  # r is negative
-        self.assertEqual(list(combinations('abc', 32)), [])     # r > n
-        self.assertEqual(list(combinations(range(4), 3)),
-                                           [(0,1,2), (0,1,3), (0,2,3), (1,2,3)])
+
+        for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))):
+            self.assertEqual(list(op(combinations('abc', 32))), [])     # r > n
+
+            self.assertEqual(list(op(combinations('ABCD', 2))),
+                             [('A','B'), ('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')])
+            testIntermediate = combinations('ABCD', 2)
+            next(testIntermediate)
+            self.assertEqual(list(op(testIntermediate)),
+                             [('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')])
+
+            self.assertEqual(list(op(combinations(range(4), 3))),
+                             [(0,1,2), (0,1,3), (0,2,3), (1,2,3)])
+            testIntermediate = combinations(range(4), 3)
+            next(testIntermediate)
+            self.assertEqual(list(op(testIntermediate)),
+                             [(0,1,3), (0,2,3), (1,2,3)])
+
 
         def combinations1(iterable, r):
             'Pure python version shown in the docs'
@@ -168,6 +256,9 @@
                 self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version
                 self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version
 
+                self.pickletest(combinations(values, r))                 # test pickling
+
+        # Test implementation detail:  tuple re-use
     @support.impl_detail("tuple reuse is specific to CPython")
     def test_combinations_tuple_reuse(self):
         self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1)
@@ -179,8 +270,15 @@
         self.assertRaises(TypeError, cwr, 'abc', 2, 1) # too many arguments
         self.assertRaises(TypeError, cwr, None)        # pool is not iterable
         self.assertRaises(ValueError, cwr, 'abc', -2)  # r is negative
-        self.assertEqual(list(cwr('ABC', 2)),
-                         [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')])
+
+        for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))):
+            self.assertEqual(list(op(cwr('ABC', 2))),
+                             [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')])
+            testIntermediate = cwr('ABC', 2)
+            next(testIntermediate)
+            self.assertEqual(list(op(testIntermediate)),
+                             [('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')])
+
 
         def cwr1(iterable, r):
             'Pure python version shown in the docs'
@@ -239,6 +337,10 @@
                 self.assertEqual(result, list(cwr1(values, r)))         # matches first pure python version
                 self.assertEqual(result, list(cwr2(values, r)))         # matches second pure python version
 
+                self.pickletest(cwr(values,r))                          # test pickling
+
+        # Test implementation detail:  tuple re-use
+
     @support.impl_detail("tuple reuse is specific to CPython")
     def test_combinations_with_replacement_tuple_reuse(self):
         cwr = combinations_with_replacement
@@ -305,6 +407,8 @@
                     self.assertEqual(result, list(permutations(values, None))) # test r as None
                     self.assertEqual(result, list(permutations(values)))       # test default r
 
+                self.pickletest(permutations(values, r))                # test pickling
+
     @support.impl_detail("tuple resuse is CPython specific")
     def test_permutations_tuple_reuse(self):
         self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1)
@@ -359,6 +463,24 @@
         self.assertRaises(TypeError, compress, range(6))            # too few args
         self.assertRaises(TypeError, compress, range(6), None)      # too many args
 
+        # check copy, deepcopy, pickle
+        for op in (lambda a:copy.copy(a), lambda a:copy.deepcopy(a), lambda a:pickle.loads(pickle.dumps(a))):
+            for data, selectors, result1, result2 in [
+                ('ABCDEF', [1,0,1,0,1,1], 'ACEF', 'CEF'),
+                ('ABCDEF', [0,0,0,0,0,0], '', ''),
+                ('ABCDEF', [1,1,1,1,1,1], 'ABCDEF', 'BCDEF'),
+                ('ABCDEF', [1,0,1], 'AC', 'C'),
+                ('ABC', [0,1,1,1,1,1], 'BC', 'C'),
+                ]:
+
+                self.assertEqual(list(op(compress(data=data, selectors=selectors))), list(result1))
+                self.assertEqual(list(op(compress(data, selectors))), list(result1))
+                testIntermediate = compress(data, selectors)
+                if result1:
+                    next(testIntermediate)
+                    self.assertEqual(list(op(testIntermediate)), list(result2))
+
+
     def test_count(self):
         self.assertEqual(lzip('abc',count()), [('a', 0), ('b', 1), ('c', 2)])
         self.assertEqual(lzip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)])
@@ -393,7 +515,7 @@
             c = count(value)
             self.assertEqual(next(copy.copy(c)), value)
             self.assertEqual(next(copy.deepcopy(c)), value)
-            self.assertEqual(next(pickle.loads(pickle.dumps(c))), value)
+            self.pickletest(count(value))
 
         #check proper internal error handling for large "step' sizes
         count(1, maxsize+5); sys.exc_info()
@@ -440,6 +562,7 @@
                 else:
                     r2 = ('count(%r, %r)' % (i, j)).replace('L', '')
                 self.assertEqual(r1, r2)
+                self.pickletest(count(i, j))
 
     def test_cycle(self):
         self.assertEqual(take(10, cycle('abc')), list('abcabcabca'))
@@ -448,6 +571,18 @@
         self.assertRaises(TypeError, cycle, 5)
         self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0])
 
+        # check copy, deepcopy, pickle
+        c = cycle('abc')
+        self.assertEqual(next(c), 'a')
+        #simple copy currently not supported, because __reduce__ returns
+        #an internal iterator
+        #self.assertEqual(take(10, copy.copy(c)), list('bcabcabcab'))
+        self.assertEqual(take(10, copy.deepcopy(c)), list('bcabcabcab'))
+        self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('bcabcabcab'))
+        next(c)
+        self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('cabcabcabc'))
+        self.pickletest(cycle('abc'))
+
     def test_groupby(self):
         # Check whether it accepts arguments correctly
         self.assertEqual([], list(groupby([])))
@@ -466,18 +601,37 @@
                 dup.append(elem)
         self.assertEqual(s, dup)
 
+        # Check normal pickled
+        dup = []
+        for k, g in pickle.loads(pickle.dumps(groupby(s, testR))):
+            for elem in g:
+                self.assertEqual(k, elem[0])
+                dup.append(elem)
+        self.assertEqual(s, dup)
+
         # Check nested case
         dup = []
-        for k, g in groupby(s, lambda r:r[0]):
-            for ik, ig in groupby(g, lambda r:r[2]):
+        for k, g in groupby(s, testR):
+            for ik, ig in groupby(g, testR2):
                 for elem in ig:
                     self.assertEqual(k, elem[0])
                     self.assertEqual(ik, elem[2])
                     dup.append(elem)
         self.assertEqual(s, dup)
 
+        # Check nested and pickled
+        dup = []
+        for k, g in pickle.loads(pickle.dumps(groupby(s, testR))):
+            for ik, ig in pickle.loads(pickle.dumps(groupby(g, testR2))):
+                for elem in ig:
+                    self.assertEqual(k, elem[0])
+                    self.assertEqual(ik, elem[2])
+                    dup.append(elem)
+        self.assertEqual(s, dup)
+
+
         # Check case where inner iterator is not used
-        keys = [k for k, g in groupby(s, lambda r:r[0])]
+        keys = [k for k, g in groupby(s, testR)]
         expectedkeys = set([r[0] for r in s])
         self.assertEqual(set(keys), expectedkeys)
         self.assertEqual(len(keys), len(expectedkeys))
@@ -548,6 +702,20 @@
         self.assertRaises(TypeError, filter, isEven, 3)
         self.assertRaises(TypeError, next, filter(range(6), range(6)))
 
+        # check copy, deepcopy, pickle
+        ans = [0,2,4]
+
+        c = filter(isEven, range(6))
+        self.assertEqual(list(copy.copy(c)), ans)
+        c = filter(isEven, range(6))
+        self.assertEqual(list(copy.deepcopy(c)), ans)
+        c = filter(isEven, range(6))
+        self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans)
+        next(c)
+        self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans[1:])
+        c = filter(isEven, range(6))
+        self.pickletest(c)
+
     def test_filterfalse(self):
         self.assertEqual(list(filterfalse(isEven, range(6))), [1,3,5])
         self.assertEqual(list(filterfalse(None, [0,1,0,2,0])), [0,0,0])
@@ -558,6 +726,7 @@
         self.assertRaises(TypeError, filterfalse, lambda x:x, range(6), 7)
         self.assertRaises(TypeError, filterfalse, isEven, 3)
         self.assertRaises(TypeError, next, filterfalse(range(6), range(6)))
+        self.pickletest(filterfalse(isEven, range(6)))
 
     def test_zip(self):
         # XXX This is rather silly now that builtin zip() calls zip()...
@@ -582,6 +751,23 @@
         ids = list(map(id, list(zip('abc', 'def'))))
         self.assertEqual(len(dict.fromkeys(ids)), len(ids))
 
+        # check copy, deepcopy, pickle
+        ans = [(x,y) for x, y in copy.copy(zip('abc',count()))]
+        self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
+
+        ans = [(x,y) for x, y in copy.deepcopy(zip('abc',count()))]
+        self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
+
+        ans = [(x,y) for x, y in pickle.loads(pickle.dumps(zip('abc',count())))]
+        self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
+
+        testIntermediate = zip('abc',count())
+        next(testIntermediate)
+        ans = [(x,y) for x, y in pickle.loads(pickle.dumps(testIntermediate))]
+        self.assertEqual(ans, [('b', 1), ('c', 2)])
+
+        self.pickletest(zip('abc', count()))
+
     def test_ziplongest(self):
         for args in [
                 ['abc', range(6)],
@@ -631,6 +817,12 @@
         ids = list(map(id, list(zip_longest('abc', 'def'))))
         self.assertEqual(len(dict.fromkeys(ids)), len(ids))
 
+    def test_zip_longest_pickling(self):
+        self.pickletest(zip_longest("abc", "def"))
+        self.pickletest(zip_longest("abc", "defgh"))
+        self.pickletest(zip_longest("abc", "defgh", fillvalue=1))
+        self.pickletest(zip_longest("", "defgh"))
+
     def test_bug_7244(self):
 
         class Repeater:
@@ -734,6 +926,20 @@
         self.assertEqual(len(set(map(id, product('abc', 'def')))), 1)
         self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1)
 
+    def test_product_pickling(self):
+        # check copy, deepcopy, pickle
+        for args, result in [
+            ([], [()]),                     # zero iterables
+            (['ab'], [('a',), ('b',)]),     # one iterable
+            ([range(2), range(3)], [(0,0), (0,1), (0,2), (1,0), (1,1), (1,2)]),     # two iterables
+            ([range(0), range(2), range(3)], []),           # first iterable with zero length
+            ([range(2), range(0), range(3)], []),           # middle iterable with zero length
+            ([range(2), range(3), range(0)], []),           # last iterable with zero length
+            ]:
+            self.assertEqual(list(copy.copy(product(*args))), result)
+            self.assertEqual(list(copy.deepcopy(product(*args))), result)
+            self.pickletest(product(*args))
+
     def test_repeat(self):
         self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a'])
         self.assertEqual(lzip(range(3),repeat('a')),
@@ -752,11 +958,16 @@
         list(r)
         self.assertEqual(repr(r), 'repeat((1+0j), 0)')
 
+        # check copy, deepcopy, pickle
+        c = repeat(object='a', times=10)
+        self.assertEqual(next(c), 'a')
+        self.assertEqual(take(2, copy.copy(c)), list('a' * 2))
+        self.assertEqual(take(2, copy.deepcopy(c)), list('a' * 2))
+        self.pickletest(repeat(object='a', times=10))
+
     def test_map(self):
         self.assertEqual(list(map(operator.pow, range(3), range(1,7))),
                          [0**1, 1**2, 2**3])
-        def tupleize(*args):
-            return args
         self.assertEqual(list(map(tupleize, 'abc', range(5))),
                          [('a',0),('b',1),('c',2)])
         self.assertEqual(list(map(tupleize, 'abc', count())),
@@ -771,6 +982,18 @@
         self.assertRaises(ValueError, next, map(errfunc, [4], [5]))
         self.assertRaises(TypeError, next, map(onearg, [4], [5]))
 
+        # check copy, deepcopy, pickle
+        ans = [('a',0),('b',1),('c',2)]
+
+        c = map(tupleize, 'abc', count())
+        self.assertEqual(list(copy.copy(c)), ans)
+
+        c = map(tupleize, 'abc', count())
+        self.assertEqual(list(copy.deepcopy(c)), ans)
+
+        c = map(tupleize, 'abc', count())
+        self.pickletest(c)
+
     def test_starmap(self):
         self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))),
                          [0**1, 1**2, 2**3])
@@ -785,6 +1008,18 @@
         self.assertRaises(ValueError, next, starmap(errfunc, [(4,5)]))
         self.assertRaises(TypeError, next, starmap(onearg, [(4,5)]))
 
+        # check copy, deepcopy, pickle
+        ans = [0**1, 1**2, 2**3]
+
+        c = starmap(operator.pow, zip(range(3), range(1,7)))
+        self.assertEqual(list(copy.copy(c)), ans)
+
+        c = starmap(operator.pow, zip(range(3), range(1,7)))
+        self.assertEqual(list(copy.deepcopy(c)), ans)
+
+        c = starmap(operator.pow, zip(range(3), range(1,7)))
+        self.pickletest(c)
+
     def test_islice(self):
         for args in [          # islice(args) should agree with range(args)
                 (10, 20, 3),
@@ -817,17 +1052,18 @@
         self.assertEqual(list(it), list(range(3, 10)))
 
         # Test invalid arguments
-        self.assertRaises(TypeError, islice, range(10))
-        self.assertRaises(TypeError, islice, range(10), 1, 2, 3, 4)
-        self.assertRaises(ValueError, islice, range(10), -5, 10, 1)
-        self.assertRaises(ValueError, islice, range(10), 1, -5, -1)
-        self.assertRaises(ValueError, islice, range(10), 1, 10, -1)
-        self.assertRaises(ValueError, islice, range(10), 1, 10, 0)
-        self.assertRaises(ValueError, islice, range(10), 'a')
-        self.assertRaises(ValueError, islice, range(10), 'a', 1)
-        self.assertRaises(ValueError, islice, range(10), 1, 'a')
-        self.assertRaises(ValueError, islice, range(10), 'a', 1, 1)
-        self.assertRaises(ValueError, islice, range(10), 1, 'a', 1)
+        ra = range(10)
+        self.assertRaises(TypeError, islice, ra)
+        self.assertRaises(TypeError, islice, ra, 1, 2, 3, 4)
+        self.assertRaises(ValueError, islice, ra, -5, 10, 1)
+        self.assertRaises(ValueError, islice, ra, 1, -5, -1)
+        self.assertRaises(ValueError, islice, ra, 1, 10, -1)
+        self.assertRaises(ValueError, islice, ra, 1, 10, 0)
+        self.assertRaises(ValueError, islice, ra, 'a')
+        self.assertRaises(ValueError, islice, ra, 'a', 1)
+        self.assertRaises(ValueError, islice, ra, 1, 'a')
+        self.assertRaises(ValueError, islice, ra, 'a', 1, 1)
+        self.assertRaises(ValueError, islice, ra, 1, 'a', 1)
         self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1)
 
         # Issue #10323:  Less islice in a predictable state
@@ -835,9 +1071,22 @@
         self.assertEqual(list(islice(c, 1, 3, 50)), [1])
         self.assertEqual(next(c), 3)
 
+        # check copy, deepcopy, pickle
+        for args in [          # islice(args) should agree with range(args)
+                (10, 20, 3),
+                (10, 3, 20),
+                (10, 20),
+                (10, 3),
+                (20,)
+                ]:
+            self.assertEqual(list(copy.copy(islice(range(100), *args))),
+                             list(range(*args)))
+            self.assertEqual(list(copy.deepcopy(islice(range(100), *args))),
+                             list(range(*args)))
+            self.pickletest(islice(range(100), *args))
+
     def test_takewhile(self):
         data = [1, 3, 5, 20, 2, 4, 6, 8]
-        underten = lambda x: x<10
         self.assertEqual(list(takewhile(underten, data)), [1, 3, 5])
         self.assertEqual(list(takewhile(underten, [])), [])
         self.assertRaises(TypeError, takewhile)
@@ -849,9 +1098,14 @@
         self.assertEqual(list(t), [1, 1, 1])
         self.assertRaises(StopIteration, next, t)
 
+        # check copy, deepcopy, pickle
+        self.assertEqual(list(copy.copy(takewhile(underten, data))), [1, 3, 5])
+        self.assertEqual(list(copy.deepcopy(takewhile(underten, data))),
+                        [1, 3, 5])
+        self.pickletest(takewhile(underten, data))
+
     def test_dropwhile(self):
         data = [1, 3, 5, 20, 2, 4, 6, 8]
-        underten = lambda x: x<10
         self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8])
         self.assertEqual(list(dropwhile(underten, [])), [])
         self.assertRaises(TypeError, dropwhile)
@@ -860,11 +1114,14 @@
         self.assertRaises(TypeError, next, dropwhile(10, [(4,5)]))
         self.assertRaises(ValueError, next, dropwhile(errfunc, [(4,5)]))
 
+        # check copy, deepcopy, pickle
+        self.assertEqual(list(copy.copy(dropwhile(underten, data))), [20, 2, 4, 6, 8])
+        self.assertEqual(list(copy.deepcopy(dropwhile(underten, data))),
+                        [20, 2, 4, 6, 8])
+        self.pickletest(dropwhile(underten, data))
+
     def test_tee(self):
         n = 200
-        def irange(n):
-            for i in range(n):
-                yield i
 
         a, b = tee([])        # test empty iterator
         self.assertEqual(list(a), [])
@@ -949,6 +1206,67 @@
         del a
         self.assertRaises(ReferenceError, getattr, p, '__class__')
 
+        ans = list('abc')
+        long_ans = list(range(10000))
+
+        # check copy
+        a, b = tee('abc')
+        self.assertEqual(list(copy.copy(a)), ans)
+        self.assertEqual(list(copy.copy(b)), ans)
+        a, b = tee(list(range(10000)))
+        self.assertEqual(list(copy.copy(a)), long_ans)
+        self.assertEqual(list(copy.copy(b)), long_ans)
+
+        # check partially consumed copy
+        a, b = tee('abc')
+        take(2, a)
+        take(1, b)
+        self.assertEqual(list(copy.copy(a)), ans[2:])
+        self.assertEqual(list(copy.copy(b)), ans[1:])
+        self.assertEqual(list(a), ans[2:])
+        self.assertEqual(list(b), ans[1:])
+        a, b = tee(range(10000))
+        take(100, a)
+        take(60, b)
+        self.assertEqual(list(copy.copy(a)), long_ans[100:])
+        self.assertEqual(list(copy.copy(b)), long_ans[60:])
+        self.assertEqual(list(a), long_ans[100:])
+        self.assertEqual(list(b), long_ans[60:])
+
+        # check deepcopy
+        a, b = tee('abc')
+        self.assertEqual(list(copy.deepcopy(a)), ans)
+        self.assertEqual(list(copy.deepcopy(b)), ans)
+        self.assertEqual(list(a), ans)
+        self.assertEqual(list(b), ans)
+        a, b = tee(range(10000))
+        self.assertEqual(list(copy.deepcopy(a)), long_ans)
+        self.assertEqual(list(copy.deepcopy(b)), long_ans)
+        self.assertEqual(list(a), long_ans)
+        self.assertEqual(list(b), long_ans)
+
+        # check partially consumed deepcopy
+        a, b = tee('abc')
+        take(2, a)
+        take(1, b)
+        self.assertEqual(list(copy.deepcopy(a)), ans[2:])
+        self.assertEqual(list(copy.deepcopy(b)), ans[1:])
+        self.assertEqual(list(a), ans[2:])
+        self.assertEqual(list(b), ans[1:])
+        a, b = tee(range(10000))
+        take(100, a)
+        take(60, b)
+        self.assertEqual(list(copy.deepcopy(a)), long_ans[100:])
+        self.assertEqual(list(copy.deepcopy(b)), long_ans[60:])
+        self.assertEqual(list(a), long_ans[100:])
+        self.assertEqual(list(b), long_ans[60:])
+
+        # check pickle
+        self.pickletest(iter(tee('abc')))
+        a, b = tee('abc')
+        self.pickletest(a, compare=ans)
+        self.pickletest(b, compare=ans)
+
     def test_StopIteration(self):
         self.assertRaises(StopIteration, next, zip())
 
@@ -974,9 +1292,21 @@
 
 class TestExamples(unittest.TestCase):
 
-    def test_accumlate(self):
+    def test_accumulate(self):
         self.assertEqual(list(accumulate([1,2,3,4,5])), [1, 3, 6, 10, 15])
 
+    def test_accumulate_reducible(self):
+        # check copy, deepcopy, pickle
+        data = [1, 2, 3, 4, 5]
+        accumulated = [1, 3, 6, 10, 15]
+        it = accumulate(data)
+
+        self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[:])
+        self.assertEqual(next(it), 1)
+        self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[1:])
+        self.assertEqual(list(copy.deepcopy(it)), accumulated[1:])
+        self.assertEqual(list(copy.copy(it)), accumulated[1:])
+
     def test_chain(self):
         self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF')
 
diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py
--- a/Lib/test/test_list.py
+++ b/Lib/test/test_list.py
@@ -1,5 +1,6 @@
 import sys
 from test import support, list_tests
+import pickle
 
 class ListTest(list_tests.CommonTest):
     type2test = list
@@ -69,6 +70,33 @@
         check(10)       # check our checking code
         check(1000000)
 
+    def test_iterator_pickle(self):
+        # Userlist iterators don't support pickling yet since
+        # they are based on generators.
+        data = self.type2test([4, 5, 6, 7])
+        it = itorg = iter(data)
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(type(itorg), type(it))
+        self.assertEqual(self.type2test(it), self.type2test(data))
+
+        it = pickle.loads(d)
+        next(it)
+        d = pickle.dumps(it)
+        self.assertEqual(self.type2test(it), self.type2test(data)[1:])
+
+    def test_reversed_pickle(self):
+        data = self.type2test([4, 5, 6, 7])
+        it = itorg = reversed(data)
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(type(itorg), type(it))
+        self.assertEqual(self.type2test(it), self.type2test(reversed(data)))
+
+        it = pickle.loads(d)
+        next(it)
+        d = pickle.dumps(it)
+        self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:])
 
 def test_main(verbose=None):
     support.run_unittest(ListTest)
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -2649,6 +2649,10 @@
         with self.assertRaises(ValueError):
             multiprocessing.connection.Listener(r'\\.\test')
 
+    @unittest.skipUnless(WIN32, "skipped on non-Windows platforms")
+    def test_invalid_family_win32(self):
+        with self.assertRaises(ValueError):
+            multiprocessing.connection.Listener('/var/test.pipe')
 
 testcases_other = [OtherTest, TestInvalidHandle, TestInitializers,
                    TestStdinBadfiledescriptor, TestWait, TestInvalidFamily]
diff --git a/Lib/test/test_range.py b/Lib/test/test_range.py
--- a/Lib/test/test_range.py
+++ b/Lib/test/test_range.py
@@ -341,13 +341,35 @@
 
     def test_pickling(self):
         testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1),
-                     (13, 21, 3), (-2, 2, 2)]
+                     (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)]
         for proto in range(pickle.HIGHEST_PROTOCOL + 1):
             for t in testcases:
                 r = range(*t)
                 self.assertEqual(list(pickle.loads(pickle.dumps(r, proto))),
                                  list(r))
 
+    def test_iterator_pickling(self):
+        testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1),
+                     (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)]
+        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+            for t in testcases:
+                it = itorg = iter(range(*t))
+                data = list(range(*t))
+
+                d = pickle.dumps(it)
+                it = pickle.loads(d)
+                self.assertEqual(type(itorg), type(it))
+                self.assertEqual(list(it), data)
+
+                it = pickle.loads(d)
+                try:
+                    next(it)
+                except StopIteration:
+                    continue
+                d = pickle.dumps(it)
+                it = pickle.loads(d)
+                self.assertEqual(list(it), data[1:])
+
     def test_odd_bug(self):
         # This used to raise a "SystemError: NULL result without error"
         # because the range validation step was eating the exception
diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py
--- a/Lib/test/test_set.py
+++ b/Lib/test/test_set.py
@@ -9,6 +9,7 @@
 import sys
 import warnings
 import collections
+import collections.abc
 
 class PassThru(Exception):
     pass
@@ -234,6 +235,26 @@
                 dup = pickle.loads(p)
                 self.assertEqual(self.s.x, dup.x)
 
+    def test_iterator_pickling(self):
+        itorg = iter(self.s)
+        data = self.thetype(self.s)
+        d = pickle.dumps(itorg)
+        it = pickle.loads(d)
+        # Set iterators unpickle as list iterators due to the
+        # undefined order of set items.
+        # self.assertEqual(type(itorg), type(it))
+        self.assertTrue(isinstance(it, collections.abc.Iterator))
+        self.assertEqual(self.thetype(it), data)
+
+        it = pickle.loads(d)
+        try:
+            drop = next(it)
+        except StopIteration:
+            return
+        d = pickle.dumps(it)
+        it = pickle.loads(d)
+        self.assertEqual(self.thetype(it), data - self.thetype((drop,)))
+
     def test_deepcopy(self):
         class Tracer:
             def __init__(self, value):
diff --git a/Lib/test/test_tools.py b/Lib/test/test_tools.py
--- a/Lib/test/test_tools.py
+++ b/Lib/test/test_tools.py
@@ -5,8 +5,11 @@
 """
 
 import os
+import sys
+import imp
 import unittest
 import sysconfig
+import tempfile
 from test import support
 from test.script_helper import assert_python_ok
 
@@ -17,10 +20,11 @@
 
 srcdir = sysconfig.get_config_var('projectbase')
 basepath = os.path.join(os.getcwd(), srcdir, 'Tools')
+scriptsdir = os.path.join(basepath, 'scripts')
 
 
 class ReindentTests(unittest.TestCase):
-    script = os.path.join(basepath, 'scripts', 'reindent.py')
+    script = os.path.join(scriptsdir, 'reindent.py')
 
     def test_noargs(self):
         assert_python_ok(self.script)
@@ -31,8 +35,73 @@
         self.assertGreater(err, b'')
 
 
+class TestSundryScripts(unittest.TestCase):
+    # At least make sure the rest don't have syntax errors.  When tests are
+    # added for a script it should be added to the whitelist below.
+
+    # scripts that have independent tests.
+    whitelist = ['reindent.py']
+    # scripts that can't be imported without running
+    blacklist = ['make_ctype.py']
+    # scripts that use windows-only modules
+    windows_only = ['win_add2path.py']
+    # blacklisted for other reasons
+    other = ['analyze_dxp.py']
+
+    skiplist = blacklist + whitelist + windows_only + other
+
+    def setUp(self):
+        cm = support.DirsOnSysPath(scriptsdir)
+        cm.__enter__()
+        self.addCleanup(cm.__exit__)
+
+    def test_sundry(self):
+        for fn in os.listdir(scriptsdir):
+            if fn.endswith('.py') and fn not in self.skiplist:
+                __import__(fn[:-3])
+
+    @unittest.skipIf(sys.platform != "win32", "Windows-only test")
+    def test_sundry_windows(self):
+        for fn in self.windows_only:
+            __import__(fn[:-3])
+
+    @unittest.skipIf(not support.threading, "test requires _thread module")
+    def test_analyze_dxp_import(self):
+        if hasattr(sys, 'getdxp'):
+            import analyze_dxp
+        else:
+            with self.assertRaises(RuntimeError):
+                import analyze_dxp
+
+
+class PdepsTests(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(self):
+        path = os.path.join(scriptsdir, 'pdeps.py')
+        self.pdeps = imp.load_source('pdeps', path)
+
+    @classmethod
+    def tearDownClass(self):
+        if 'pdeps' in sys.modules:
+            del sys.modules['pdeps']
+
+    def test_process_errors(self):
+        # Issue #14492: m_import.match(line) can be None.
+        with tempfile.TemporaryDirectory() as tmpdir:
+            fn = os.path.join(tmpdir, 'foo')
+            with open(fn, 'w') as stream:
+                stream.write("#!/this/will/fail")
+            self.pdeps.process(fn, {})
+
+    def test_inverse_attribute_error(self):
+        # Issue #14492: this used to fail with an AttributeError.
+        self.pdeps.inverse({'a': []})
+
+
 def test_main():
-    support.run_unittest(ReindentTests)
+    support.run_unittest(*[obj for obj in globals().values()
+                               if isinstance(obj, type)])
 
 
 if __name__ == '__main__':
diff --git a/Lib/test/test_tuple.py b/Lib/test/test_tuple.py
--- a/Lib/test/test_tuple.py
+++ b/Lib/test/test_tuple.py
@@ -1,6 +1,7 @@
 from test import support, seq_tests
 
 import gc
+import pickle
 
 class TupleTest(seq_tests.CommonTest):
     type2test = tuple
@@ -164,6 +165,34 @@
         check(10)       # check our checking code
         check(1000000)
 
+    def test_iterator_pickle(self):
+        # Userlist iterators don't support pickling yet since
+        # they are based on generators.
+        data = self.type2test([4, 5, 6, 7])
+        itorg = iter(data)
+        d = pickle.dumps(itorg)
+        it = pickle.loads(d)
+        self.assertEqual(type(itorg), type(it))
+        self.assertEqual(self.type2test(it), self.type2test(data))
+
+        it = pickle.loads(d)
+        next(it)
+        d = pickle.dumps(it)
+        self.assertEqual(self.type2test(it), self.type2test(data)[1:])
+
+    def test_reversed_pickle(self):
+        data = self.type2test([4, 5, 6, 7])
+        itorg = reversed(data)
+        d = pickle.dumps(itorg)
+        it = pickle.loads(d)
+        self.assertEqual(type(itorg), type(it))
+        self.assertEqual(self.type2test(it), self.type2test(reversed(data)))
+
+        it = pickle.loads(d)
+        next(it)
+        d = pickle.dumps(it)
+        self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:])
+
 def test_main():
     support.run_unittest(TupleTest)
 
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -1859,6 +1859,41 @@
         gc_collect()
         self.assertIsNone(wref())
 
+        # A longer cycle: d->e->e2->d
+        e = ET.Element('joe')
+        d = Dummy()
+        d.dummyref = e
+        wref = weakref.ref(d)
+        e2 = ET.SubElement(e, 'foo', attr=d)
+        del d, e, e2
+        gc_collect()
+        self.assertIsNone(wref())
+
+        # A cycle between Element objects as children of one another
+        # e1->e2->e3->e1
+        e1 = ET.Element('e1')
+        e2 = ET.Element('e2')
+        e3 = ET.Element('e3')
+        e1.append(e2)
+        e2.append(e2)
+        e3.append(e1)
+        wref = weakref.ref(e1)
+        del e1, e2, e3
+        gc_collect()
+        self.assertIsNone(wref())
+
+    def test_weakref(self):
+        flag = False
+        def wref_cb(w):
+            nonlocal flag
+            flag = True
+        e = ET.Element('e')
+        wref = weakref.ref(e, wref_cb)
+        self.assertEqual(wref().tag, 'e')
+        del e
+        self.assertEqual(flag, True)
+        self.assertEqual(wref(), None)
+
 
 class ElementTreeTest(unittest.TestCase):
     def test_istype(self):
diff --git a/Lib/tkinter/font.py b/Lib/tkinter/font.py
--- a/Lib/tkinter/font.py
+++ b/Lib/tkinter/font.py
@@ -2,27 +2,27 @@
 #
 # written by Fredrik Lundh, February 1998
 #
-# FIXME: should add 'displayof' option where relevant (actual, families,
-#        measure, and metrics)
-#
 
 __version__ = "0.9"
 
+import itertools
 import tkinter
 
+
 # weight/slant
 NORMAL = "normal"
 ROMAN = "roman"
 BOLD   = "bold"
 ITALIC = "italic"
 
+
 def nametofont(name):
     """Given the name of a tk named font, returns a Font representation.
     """
     return Font(name=name, exists=True)
 
+
 class Font:
-
     """Represents a named font.
 
     Constructor options are:
@@ -44,6 +44,8 @@
 
     """
 
+    counter = itertools.count(1)
+
     def _set(self, kw):
         options = []
         for k, v in kw.items():
@@ -63,7 +65,8 @@
             options[args[i][1:]] = args[i+1]
         return options
 
-    def __init__(self, root=None, font=None, name=None, exists=False, **options):
+    def __init__(self, root=None, font=None, name=None, exists=False,
+                 **options):
         if not root:
             root = tkinter._default_root
         if font:
@@ -72,7 +75,7 @@
         else:
             font = self._set(options)
         if not name:
-            name = "font" + str(id(self))
+            name = "font" + str(next(self.counter))
         self.name = name
 
         if exists:
@@ -118,14 +121,17 @@
         "Return a distinct copy of the current font"
         return Font(self._root, **self.actual())
 
-    def actual(self, option=None):
+    def actual(self, option=None, displayof=None):
         "Return actual font attributes"
+        args = ()
+        if displayof:
+            args = ('-displayof', displayof)
         if option:
-            return self._call("font", "actual", self.name, "-"+option)
+            args = args + ('-' + option, )
+            return self._call("font", "actual", self.name, *args)
         else:
             return self._mkdict(
-                self._split(self._call("font", "actual", self.name))
-                )
+                self._split(self._call("font", "actual", self.name, *args)))
 
     def cget(self, option):
         "Get font attribute"
@@ -138,37 +144,47 @@
                   *self._set(options))
         else:
             return self._mkdict(
-                self._split(self._call("font", "config", self.name))
-                )
+                self._split(self._call("font", "config", self.name)))
 
     configure = config
 
-    def measure(self, text):
+    def measure(self, text, displayof=None):
         "Return text width"
-        return int(self._call("font", "measure", self.name, text))
+        args = (text,)
+        if displayof:
+            args = ('-displayof', displayof, text)
+        return int(self._call("font", "measure", self.name, *args))
 
-    def metrics(self, *options):
+    def metrics(self, *options, **kw):
         """Return font metrics.
 
         For best performance, create a dummy widget
         using this font before calling this method."""
-
+        args = ()
+        displayof = kw.pop('displayof', None)
+        if displayof:
+            args = ('-displayof', displayof)
         if options:
+            args = args + self._get(options)
             return int(
-                self._call("font", "metrics", self.name, self._get(options))
-                )
+                self._call("font", "metrics", self.name, *args))
         else:
-            res = self._split(self._call("font", "metrics", self.name))
+            res = self._split(self._call("font", "metrics", self.name, *args))
             options = {}
             for i in range(0, len(res), 2):
                 options[res[i][1:]] = int(res[i+1])
             return options
 
-def families(root=None):
+
+def families(root=None, displayof=None):
     "Get font families (as a tuple)"
     if not root:
         root = tkinter._default_root
-    return root.tk.splitlist(root.tk.call("font", "families"))
+    args = ()
+    if displayof:
+        args = ('-displayof', displayof)
+    return root.tk.splitlist(root.tk.call("font", "families", *args))
+
 
 def names(root=None):
     "Get names of defined fonts (as a tuple)"
@@ -176,6 +192,7 @@
         root = tkinter._default_root
     return root.tk.splitlist(root.tk.call("font", "names"))
 
+
 # --------------------------------------------------------------------
 # test stuff
 
@@ -198,10 +215,10 @@
 
     print(f.measure("hello"), f.metrics("linespace"))
 
-    print(f.metrics())
+    print(f.metrics(displayof=root))
 
     f = Font(font=("Courier", 20, "bold"))
-    print(f.measure("hello"), f.metrics("linespace"))
+    print(f.measure("hello"), f.metrics("linespace", displayof=root))
 
     w = tkinter.Label(root, text="Hello, world", font=f)
     w.pack()
diff --git a/Lib/tkinter/ttk.py b/Lib/tkinter/ttk.py
--- a/Lib/tkinter/ttk.py
+++ b/Lib/tkinter/ttk.py
@@ -1253,7 +1253,7 @@
 
 
     def exists(self, item):
-        """Returns True if the specified item is present in the three,
+        """Returns True if the specified item is present in the tree,
         False otherwise."""
         return bool(self.tk.call(self._w, "exists", item))
 
diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py
--- a/Lib/webbrowser.py
+++ b/Lib/webbrowser.py
@@ -448,6 +448,14 @@
 
 def register_X_browsers():
 
+    # use xdg-open if around
+    if _iscommand("xdg-open"):
+        register("xdg-open", None, BackgroundBrowser("xdg-open"))
+
+    # The default GNOME3 browser
+    if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gvfs-open"):
+        register("gvfs-open", None, BackgroundBrowser("gvfs-open"))
+
     # The default GNOME browser
     if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gnome-open"):
         register("gnome-open", None, BackgroundBrowser("gnome-open"))
diff --git a/Makefile.pre.in b/Makefile.pre.in
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -1351,7 +1351,7 @@
 
 clobber: clean profile-removal
 	-rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
-		tags TAGS Parser/pgen.stamp \
+		tags TAGS \
 		config.cache config.log pyconfig.h Modules/config.c
 	-rm -rf build platform
 	-rm -rf $(PYTHONFRAMEWORKDIR)
diff --git a/Misc/ACKS b/Misc/ACKS
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -333,6 +333,7 @@
 John Fouhy
 Andrew Francis
 Martin Franklin
+Bruce Frederiksen
 Robin Friedrich
 Ivan Frohne
 Matthias Fuchs
@@ -976,6 +977,7 @@
 Peter Stoehr
 Casper Stoel
 Michael Stone
+Serhiy Storchaka
 Ken Stox
 Dan Stromberg
 Daniel Stutzbach
diff --git a/Misc/NEWS b/Misc/NEWS
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -19,10 +19,26 @@
 Library
 -------
 
+- Issue #8515: Set __file__ when run file in IDLE.
+  Initial patch by Bruce Frederiksen.
+
+- Issue #14496: Fix wrong name in idlelib/tabbedpages.py.
+  Patch by Popa Claudiu.
+
+- Issue #3033: Add displayof parameter to tkinter font. Patch by Guilherme Polo.
+
+- Issue #14482: Raise a ValueError, not a NameError, when trying to create
+  a multiprocessing Client or Listener with an AF_UNIX type address under
+  Windows.  Patch by Popa Claudiu.
+
+- Issue #802310: Generate always unique tkinter font names if not directly passed.
+
 - Issue #14151: Raise a ValueError, not a NameError, when trying to create
   a multiprocessing Client or Listener with an AF_PIPE type address under
   non-Windows platforms.  Patch by Popa Claudiu.
 
+- Issue #14493: Use gvfs-open or xdg-open in webbrowser.
+
 
 What's New in Python 3.3.0 Alpha 2?
 ===================================
@@ -58,6 +74,8 @@
 
 - Issue #14471: Fix a possible buffer overrun in the winreg module.
 
+- Issue #14288: Allow the serialization of builtin iterators
+
 Library
 -------
 
@@ -997,7 +1015,7 @@
 
 - Issue #11006: Don't issue low level warning in subprocess when pipe2() fails.
 
-- Issue #13620: Support for Chrome browser in webbrowser.py Patch contributed
+- Issue #13620: Support for Chrome browser in webbrowser.  Patch contributed
   by Arnaud Calmettes.
 
 - Issue #11829: Fix code execution holes in inspect.getattr_static for
diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c
--- a/Modules/_collectionsmodule.c
+++ b/Modules/_collectionsmodule.c
@@ -1122,6 +1122,35 @@
 }
 
 static PyObject *
+dequeiter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+    Py_ssize_t i, index=0;
+    PyObject *deque;
+    dequeiterobject *it;
+    if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index))
+        return NULL;
+    assert(type == &dequeiter_type);
+
+    it = (dequeiterobject*)deque_iter((dequeobject *)deque);
+    if (!it)
+        return NULL;
+    /* consume items from the queue */
+    for(i=0; i<index; i++) {
+        PyObject *item = dequeiter_next(it);
+        if (item) {
+            Py_DECREF(item);
+        } else {
+            if (it->counter) {
+                Py_DECREF(it);
+                return NULL;
+            } else
+                break;
+        }
+    }
+    return (PyObject*)it;
+}
+
+static PyObject *
 dequeiter_len(dequeiterobject *it)
 {
     return PyLong_FromSsize_t(it->counter);
@@ -1129,14 +1158,21 @@
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+dequeiter_reduce(dequeiterobject *it)
+{
+    return Py_BuildValue("O(On)", Py_TYPE(it), it->deque, it->deque->len - it->counter);
+}
+
 static PyMethodDef dequeiter_methods[] = {
     {"__length_hint__", (PyCFunction)dequeiter_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__", (PyCFunction)dequeiter_reduce, METH_NOARGS, reduce_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
 static PyTypeObject dequeiter_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
-    "deque_iterator",                           /* tp_name */
+    "_collections._deque_iterator",              /* tp_name */
     sizeof(dequeiterobject),                    /* tp_basicsize */
     0,                                          /* tp_itemsize */
     /* methods */
@@ -1164,6 +1200,16 @@
     PyObject_SelfIter,                          /* tp_iter */
     (iternextfunc)dequeiter_next,               /* tp_iternext */
     dequeiter_methods,                          /* tp_methods */
+    0,                                          /* tp_members */
+    0,                                          /* tp_getset */
+    0,                                          /* tp_base */
+    0,                                          /* tp_dict */
+    0,                                          /* tp_descr_get */
+    0,                                          /* tp_descr_set */
+    0,                                          /* tp_dictoffset */
+    0,                                          /* tp_init */
+    0,                                          /* tp_alloc */
+    dequeiter_new,                              /* tp_new */
     0,
 };
 
@@ -1217,9 +1263,38 @@
     return item;
 }
 
+static PyObject *
+dequereviter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+    Py_ssize_t i, index=0;
+    PyObject *deque;
+    dequeiterobject *it;
+    if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index))
+        return NULL;
+    assert(type == &dequereviter_type);
+
+    it = (dequeiterobject*)deque_reviter((dequeobject *)deque);
+    if (!it)
+        return NULL;
+    /* consume items from the queue */
+    for(i=0; i<index; i++) {
+        PyObject *item = dequereviter_next(it);
+        if (item) {
+            Py_DECREF(item);
+        } else {
+            if (it->counter) {
+                Py_DECREF(it);
+                return NULL;
+            } else
+                break;
+        }
+    }
+    return (PyObject*)it;
+}
+
 static PyTypeObject dequereviter_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
-    "deque_reverse_iterator",                   /* tp_name */
+    "_collections._deque_reverse_iterator",      /* tp_name */
     sizeof(dequeiterobject),                    /* tp_basicsize */
     0,                                          /* tp_itemsize */
     /* methods */
@@ -1247,6 +1322,16 @@
     PyObject_SelfIter,                          /* tp_iter */
     (iternextfunc)dequereviter_next,            /* tp_iternext */
     dequeiter_methods,                          /* tp_methods */
+    0,                                          /* tp_members */
+    0,                                          /* tp_getset */
+    0,                                          /* tp_base */
+    0,                                          /* tp_dict */
+    0,                                          /* tp_descr_get */
+    0,                                          /* tp_descr_set */
+    0,                                          /* tp_dictoffset */
+    0,                                          /* tp_init */
+    0,                                          /* tp_alloc */
+    dequereviter_new,                           /* tp_new */
     0,
 };
 
@@ -1653,9 +1738,13 @@
 
     if (PyType_Ready(&dequeiter_type) < 0)
         return NULL;
+    Py_INCREF(&dequeiter_type);
+    PyModule_AddObject(m, "_deque_iterator", (PyObject *)&dequeiter_type);
 
     if (PyType_Ready(&dequereviter_type) < 0)
         return NULL;
+    Py_INCREF(&dequereviter_type);
+    PyModule_AddObject(m, "_deque_reverse_iterator", (PyObject *)&dequereviter_type);
 
     return m;
 }
diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c
--- a/Modules/_decimal/_decimal.c
+++ b/Modules/_decimal/_decimal.c
@@ -1935,7 +1935,7 @@
     mpd_maxcontext(&maxctx);
 
     mpd_qset_string(MPD(dec), s, &maxctx, &status);
-    if (status & (MPD_Inexact|MPD_Rounded)) {
+    if (status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) {
         /* we want exact results */
         mpd_seterror(MPD(dec), MPD_Invalid_operation, &status);
     }
@@ -2139,7 +2139,7 @@
         return NULL;
     }
 
-    if (status & (MPD_Inexact|MPD_Rounded)) {
+    if (status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) {
         /* we want exact results */
         mpd_seterror(MPD(dec), MPD_Invalid_operation, &status);
     }
@@ -2385,8 +2385,8 @@
     }
 
     /* coefficient */
-    digits = sequence_as_tuple(PyTuple_GET_ITEM(dectuple, 1),
-                               PyExc_ValueError, "coefficient must be a tuple of digits");
+    digits = sequence_as_tuple(PyTuple_GET_ITEM(dectuple, 1), PyExc_ValueError,
+                               "coefficient must be a tuple of digits");
     if (digits == NULL) {
         goto error;
     }
@@ -2435,8 +2435,8 @@
     if (sign_special[1] == '\0') {
         /* not a special number */
         *cp++ = 'E';
-        n = snprintf(cp, MPD_EXPDIGITS+1, "%" PRI_mpd_ssize_t, exp);
-        if (n < 0 || n >= MPD_EXPDIGITS+1) {
+        n = snprintf(cp, MPD_EXPDIGITS+2, "%" PRI_mpd_ssize_t, exp);
+        if (n < 0 || n >= MPD_EXPDIGITS+2) {
             PyErr_SetString(PyExc_RuntimeError,
                 "internal error in dec_sequence_as_str");
             goto error;
@@ -4215,7 +4215,7 @@
     mpd_uint_t p_data[1] = {2305843009213693951ULL};
     mpd_t p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA, 0, 19, 1, 1, p_data};
     /* Inverse of 10 modulo p */
-    mpd_uint_t inv10_p_data[2] = {2075258708292324556ULL};
+    mpd_uint_t inv10_p_data[1] = {2075258708292324556ULL};
     mpd_t inv10_p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA,
                      0, 19, 1, 1, inv10_p_data};
 #elif defined(CONFIG_32) && _PyHASH_BITS == 31
@@ -4934,7 +4934,7 @@
     PyObject *result;
 
     CONVERT_OP_RAISE(&result, v, context);
-    return  result;
+    return result;
 }
 
 static PyObject *
diff --git a/Modules/_decimal/tests/deccheck.py b/Modules/_decimal/tests/deccheck.py
--- a/Modules/_decimal/tests/deccheck.py
+++ b/Modules/_decimal/tests/deccheck.py
@@ -302,6 +302,7 @@
     dec = maxcontext.create_decimal(value)
     if maxcontext.flags[P.Inexact] or \
        maxcontext.flags[P.Rounded] or \
+       maxcontext.flags[P.Clamped] or \
        maxcontext.flags[P.InvalidOperation]:
         return context.p._raise_error(P.InvalidOperation)
     if maxcontext.flags[P.FloatOperation]:
diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c
--- a/Modules/_elementtree.c
+++ b/Modules/_elementtree.c
@@ -48,6 +48,7 @@
 /* See http://www.python.org/psf/license for licensing details. */
 
 #include "Python.h"
+#include "structmember.h"
 
 #define VERSION "1.0.6"
 
@@ -229,6 +230,8 @@
 
     ElementObjectExtra* extra;
 
+    PyObject *weakreflist; /* For tp_weaklistoffset */
+
 } ElementObject;
 
 static PyTypeObject Element_Type;
@@ -261,17 +264,26 @@
 LOCAL(void)
 dealloc_extra(ElementObject* self)
 {
+    ElementObjectExtra *myextra;
     int i;
 
-    Py_DECREF(self->extra->attrib);
-
-    for (i = 0; i < self->extra->length; i++)
-        Py_DECREF(self->extra->children[i]);
-
-    if (self->extra->children != self->extra->_children)
-        PyObject_Free(self->extra->children);
-
-    PyObject_Free(self->extra);
+    if (!self->extra)
+        return;
+
+    /* Avoid DECREFs calling into this code again (cycles, etc.)
+    */
+    myextra = self->extra;
+    self->extra = NULL;
+
+    Py_DECREF(myextra->attrib);
+
+    for (i = 0; i < myextra->length; i++)
+        Py_DECREF(myextra->children[i]);
+
+    if (myextra->children != myextra->_children)
+        PyObject_Free(myextra->children);
+
+    PyObject_Free(myextra);
 }
 
 /* Convenience internal function to create new Element objects with the given
@@ -308,6 +320,8 @@
     Py_INCREF(Py_None);
     self->tail = Py_None;
 
+    self->weakreflist = NULL;
+
     ALLOC(sizeof(ElementObject), "create element");
     PyObject_GC_Track(self);
     return (PyObject*) self;
@@ -328,6 +342,7 @@
         e->tail = Py_None;
 
         e->extra = NULL;
+        e->weakreflist = NULL;
     }
     return (PyObject *)e;
 }
@@ -576,19 +591,28 @@
 static int
 element_gc_clear(ElementObject *self)
 {
-    PyObject *text = JOIN_OBJ(self->text);
-    PyObject *tail = JOIN_OBJ(self->tail);
     Py_CLEAR(self->tag);
-    Py_CLEAR(text);
-    Py_CLEAR(tail);
+
+    /* The following is like Py_CLEAR for self->text and self->tail, but
+     * written explicitily because the real pointers hide behind access
+     * macros.
+    */
+    if (self->text) {
+        PyObject *tmp = JOIN_OBJ(self->text);
+        self->text = NULL;
+        Py_DECREF(tmp);
+    }
+
+    if (self->tail) {
+        PyObject *tmp = JOIN_OBJ(self->tail);
+        self->tail = NULL;
+        Py_DECREF(tmp);
+    }
 
     /* After dropping all references from extra, it's no longer valid anyway,
-    ** so fully deallocate it (see also element_clearmethod)
+     * so fully deallocate it.
     */
-    if (self->extra) {
-        dealloc_extra(self);
-        self->extra = NULL;
-    }
+    dealloc_extra(self);
     return 0;
 }
 
@@ -596,6 +620,10 @@
 element_dealloc(ElementObject* self)
 {
     PyObject_GC_UnTrack(self);
+
+    if (self->weakreflist != NULL)
+        PyObject_ClearWeakRefs((PyObject *) self);
+
     /* element_gc_clear clears all references and deallocates extra
     */
     element_gc_clear(self);
@@ -626,10 +654,7 @@
     if (!PyArg_ParseTuple(args, ":clear"))
         return NULL;
 
-    if (self->extra) {
-        dealloc_extra(self);
-        self->extra = NULL;
-    }
+    dealloc_extra(self);
 
     Py_INCREF(Py_None);
     Py_DECREF(JOIN_OBJ(self->text));
@@ -1693,7 +1718,7 @@
     (traverseproc)element_gc_traverse,              /* tp_traverse */
     (inquiry)element_gc_clear,                      /* tp_clear */
     0,                                              /* tp_richcompare */
-    0,                                              /* tp_weaklistoffset */
+    offsetof(ElementObject, weakreflist),           /* tp_weaklistoffset */
     0,                                              /* tp_iter */
     0,                                              /* tp_iternext */
     element_methods,                                /* tp_methods */
@@ -3009,8 +3034,7 @@
 PyMODINIT_FUNC
 PyInit__elementtree(void)
 {
-    PyObject* m;
-    PyObject* g;
+    PyObject *m, *g, *temp;
     char* bootstrap;
 
     /* Initialize object types */
@@ -3042,10 +3066,6 @@
     PyDict_SetItemString(g, "__builtins__", PyEval_GetBuiltins());
 
     bootstrap = (
-
-        "from copy import deepcopy\n"
-        "from xml.etree import ElementPath\n"
-
         "def iter(node, tag=None):\n" /* helper */
         "  if tag == '*':\n"
         "    tag = None\n"
@@ -3069,8 +3089,14 @@
     if (!PyRun_String(bootstrap, Py_file_input, g, NULL))
         return NULL;
 
-    elementpath_obj = PyDict_GetItemString(g, "ElementPath");
-    elementtree_deepcopy_obj = PyDict_GetItemString(g, "deepcopy");
+    if (!(temp = PyImport_ImportModule("copy")))
+        return NULL;
+    elementtree_deepcopy_obj = PyObject_GetAttrString(temp, "deepcopy");
+    Py_XDECREF(temp);
+
+    if (!(elementpath_obj = PyImport_ImportModule("xml.etree.ElementPath")))
+        return NULL;
+
     elementtree_iter_obj = PyDict_GetItemString(g, "iter");
     elementtree_itertext_obj = PyDict_GetItemString(g, "itertext");
 
diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c
--- a/Modules/arraymodule.c
+++ b/Modules/arraymodule.c
@@ -2753,6 +2753,34 @@
     return 0;
 }
 
+static PyObject *
+arrayiter_reduce(arrayiterobject *it)
+{
+    return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
+                         it->ao, it->index);
+}
+
+static PyObject *
+arrayiter_setstate(arrayiterobject *it, PyObject *state)
+{
+    Py_ssize_t index = PyLong_AsSsize_t(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (index < 0)
+        index = 0;
+    it->index = index;
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+static PyMethodDef arrayiter_methods[] = {
+    {"__reduce__",      (PyCFunction)arrayiter_reduce, METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)arrayiter_setstate, METH_O,
+     setstate_doc},
+    {NULL, NULL} /* sentinel */
+};
+
 static PyTypeObject PyArrayIter_Type = {
     PyVarObject_HEAD_INIT(NULL, 0)
     "arrayiterator",                        /* tp_name */
@@ -2782,7 +2810,7 @@
     0,                                      /* tp_weaklistoffset */
     PyObject_SelfIter,                          /* tp_iter */
     (iternextfunc)arrayiter_next,               /* tp_iternext */
-    0,                                          /* tp_methods */
+    arrayiter_methods,                      /* tp_methods */
 };
 
 
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -134,6 +134,53 @@
     return r;
 }
 
+static PyObject *
+groupby_reduce(groupbyobject *lz)
+{
+    /* reduce as a 'new' call with an optional 'setstate' if groupby
+     * has started
+     */
+    PyObject *value;
+    if (lz->tgtkey && lz->currkey && lz->currvalue)
+        value = Py_BuildValue("O(OO)(OOO)", Py_TYPE(lz),
+            lz->it, lz->keyfunc, lz->currkey, lz->currvalue, lz->tgtkey);
+    else
+        value = Py_BuildValue("O(OO)", Py_TYPE(lz),
+            lz->it, lz->keyfunc);
+
+    return value;
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
+static PyObject *
+groupby_setstate(groupbyobject *lz, PyObject *state)
+{
+    PyObject *currkey, *currvalue, *tgtkey;
+    if (!PyArg_ParseTuple(state, "OOO", &currkey, &currvalue, &tgtkey))
+        return NULL;
+    Py_CLEAR(lz->currkey);
+    lz->currkey = currkey;
+    Py_INCREF(lz->currkey);
+    Py_CLEAR(lz->currvalue);
+    lz->currvalue = currvalue;
+    Py_INCREF(lz->currvalue);
+    Py_CLEAR(lz->tgtkey);
+    lz->tgtkey = tgtkey;
+    Py_INCREF(lz->tgtkey);
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
+static PyMethodDef groupby_methods[] = {
+    {"__reduce__",      (PyCFunction)groupby_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)groupby_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(groupby_doc,
 "groupby(iterable[, keyfunc]) -> create an iterator which returns\n\
 (key, sub-iterator) grouped by each value of key(value).\n");
@@ -168,7 +215,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)groupby_next,         /* tp_iternext */
-    0,                                  /* tp_methods */
+    groupby_methods,                    /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -194,6 +241,17 @@
 static PyTypeObject _grouper_type;
 
 static PyObject *
+_grouper_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+    PyObject *parent, *tgtkey;
+
+    if (!PyArg_ParseTuple(args, "O!O", &groupby_type, &parent, &tgtkey))
+        return NULL;
+
+    return _grouper_create((groupbyobject*) parent, tgtkey);
+}
+
+static PyObject *
 _grouper_create(groupbyobject *parent, PyObject *tgtkey)
 {
     _grouperobject *igo;
@@ -269,6 +327,20 @@
     return r;
 }
 
+static PyObject *
+_grouper_reduce(_grouperobject *lz)
+{
+    return Py_BuildValue("O(OO)", Py_TYPE(lz),
+            lz->parent, lz->tgtkey);
+}
+
+static PyMethodDef _grouper_methods[] = {
+    {"__reduce__",      (PyCFunction)_grouper_reduce,      METH_NOARGS,
+     reduce_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
+
 static PyTypeObject _grouper_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
     "itertools._grouper",               /* tp_name */
@@ -298,7 +370,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)_grouper_next,        /* tp_iternext */
-    0,                                  /* tp_methods */
+    _grouper_methods,                   /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -308,7 +380,7 @@
     0,                                  /* tp_dictoffset */
     0,                                  /* tp_init */
     0,                                  /* tp_alloc */
-    0,                                  /* tp_new */
+    _grouper_new,                       /* tp_new */
     PyObject_GC_Del,                    /* tp_free */
 };
 
@@ -344,7 +416,7 @@
 static PyTypeObject teedataobject_type;
 
 static PyObject *
-teedataobject_new(PyObject *it)
+teedataobject_newinternal(PyObject *it)
 {
     teedataobject *tdo;
 
@@ -364,7 +436,7 @@
 teedataobject_jumplink(teedataobject *tdo)
 {
     if (tdo->nextlink == NULL)
-        tdo->nextlink = teedataobject_new(tdo->it);
+        tdo->nextlink = teedataobject_newinternal(tdo->it);
     Py_XINCREF(tdo->nextlink);
     return tdo->nextlink;
 }
@@ -420,11 +492,80 @@
     PyObject_GC_Del(tdo);
 }
 
+static PyObject *
+teedataobject_reduce(teedataobject *tdo)
+{
+    int i;
+    /* create a temporary list of already iterated values */
+    PyObject *values = PyList_New(tdo->numread);
+    if (!values)
+        return NULL;
+    for (i=0 ; i<tdo->numread ; i++) {
+        Py_INCREF(tdo->values[i]);
+        PyList_SET_ITEM(values, i, tdo->values[i]);
+    }
+    return Py_BuildValue("O(ONO)", Py_TYPE(tdo), tdo->it,
+                         values,
+                         tdo->nextlink ? tdo->nextlink : Py_None);
+}
+
+static PyTypeObject teedataobject_type;
+
+static PyObject *
+teedataobject_new(PyTypeObject *type, PyObject *args, PyObject *kw)
+{
+    teedataobject *tdo;
+    PyObject *it, *values, *next;
+    Py_ssize_t i, len;
+
+    assert(type == &teedataobject_type);
+    if (!PyArg_ParseTuple(args, "OO!O", &it, &PyList_Type, &values, &next))
+        return NULL;
+
+    tdo = (teedataobject *)teedataobject_newinternal(it);
+    if (!tdo)
+        return NULL;
+
+    len = PyList_GET_SIZE(values);
+    if (len > LINKCELLS)
+        goto err;
+    for (i=0; i<len; i++) {
+        tdo->values[i] = PyList_GET_ITEM(values, i);
+        Py_INCREF(tdo->values[i]);
+    }
+    tdo->numread = len;
+
+    if (len == LINKCELLS) {
+        if (next != Py_None) {
+            if (Py_TYPE(next) != &teedataobject_type)
+                goto err;
+            assert(tdo->nextlink == NULL);
+            Py_INCREF(next);
+            tdo->nextlink = next;
+        }
+    } else {
+        if (next != Py_None)
+            goto err; /* shouldn't have a next if we are not full */
+    }
+    return (PyObject*)tdo;
+
+err:
+    Py_XDECREF(tdo);
+    PyErr_SetString(PyExc_ValueError, "Invalid arguments");
+    return NULL;
+}
+
+static PyMethodDef teedataobject_methods[] = {
+    {"__reduce__",      (PyCFunction)teedataobject_reduce, METH_NOARGS,
+     reduce_doc},
+    {NULL,              NULL}           /* sentinel */
+};
+
 PyDoc_STRVAR(teedataobject_doc, "Data container common to multiple tee objects.");
 
 static PyTypeObject teedataobject_type = {
     PyVarObject_HEAD_INIT(0, 0)         /* Must fill in type value later */
-    "itertools.tee_dataobject",                 /* tp_name */
+    "itertools._tee_dataobject",                /* tp_name */
     sizeof(teedataobject),                      /* tp_basicsize */
     0,                                          /* tp_itemsize */
     /* methods */
@@ -451,7 +592,7 @@
     0,                                          /* tp_weaklistoffset */
     0,                                          /* tp_iter */
     0,                                          /* tp_iternext */
-    0,                                          /* tp_methods */
+    teedataobject_methods,                      /* tp_methods */
     0,                                          /* tp_members */
     0,                                          /* tp_getset */
     0,                                          /* tp_base */
@@ -461,7 +602,7 @@
     0,                                          /* tp_dictoffset */
     0,                                          /* tp_init */
     0,                                          /* tp_alloc */
-    0,                                          /* tp_new */
+    teedataobject_new,                          /* tp_new */
     PyObject_GC_Del,                            /* tp_free */
 };
 
@@ -528,7 +669,7 @@
     to = PyObject_GC_New(teeobject, &tee_type);
     if (to == NULL)
         goto done;
-    to->dataobj = (teedataobject *)teedataobject_new(it);
+    to->dataobj = (teedataobject *)teedataobject_newinternal(it);
     if (!to->dataobj) {
         PyObject_GC_Del(to);
         to = NULL;
@@ -548,7 +689,7 @@
 {
     PyObject *iterable;
 
-    if (!PyArg_UnpackTuple(args, "tee", 1, 1, &iterable))
+    if (!PyArg_UnpackTuple(args, "_tee", 1, 1, &iterable))
         return NULL;
     return tee_fromiterable(iterable);
 }
@@ -570,17 +711,43 @@
     PyObject_GC_Del(to);
 }
 
+static PyObject *
+tee_reduce(teeobject *to)
+{
+    return Py_BuildValue("O(())(Oi)", Py_TYPE(to), to->dataobj, to->index);
+}
+
+static PyObject *
+tee_setstate(teeobject *to, PyObject *state)
+{
+    teedataobject *tdo;
+    int index;
+    if (!PyArg_ParseTuple(state, "O!i", &teedataobject_type, &tdo, &index))
+        return NULL;
+    if (index < 0 || index > LINKCELLS) {
+        PyErr_SetString(PyExc_ValueError, "Index out of range");
+        return NULL;
+    }
+    Py_CLEAR(to->dataobj);
+    to->dataobj = tdo;
+    Py_INCREF(to->dataobj);
+    to->index = index;
+    Py_RETURN_NONE;
+}
+
 PyDoc_STRVAR(teeobject_doc,
 "Iterator wrapped to make it copyable");
 
 static PyMethodDef tee_methods[] = {
     {"__copy__",        (PyCFunction)tee_copy,  METH_NOARGS, teecopy_doc},
+    {"__reduce__",      (PyCFunction)tee_reduce,      METH_NOARGS, reduce_doc},
+    {"__setstate__",    (PyCFunction)tee_setstate,    METH_O, setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
 static PyTypeObject tee_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
-    "itertools.tee",                    /* tp_name */
+    "itertools._tee",                   /* tp_name */
     sizeof(teeobject),                  /* tp_basicsize */
     0,                                  /* tp_itemsize */
     /* methods */
@@ -771,6 +938,38 @@
     }
 }
 
+static PyObject *
+cycle_reduce(cycleobject *lz)
+{
+    /* Create a new cycle with the iterator tuple, then set
+     * the saved state on it.
+     */
+    return Py_BuildValue("O(O)(Oi)", Py_TYPE(lz), 
+        lz->it, lz->saved, lz->firstpass);
+    }
+
+static PyObject *
+cycle_setstate(cycleobject *lz, PyObject *state)
+{
+    PyObject *saved=NULL;
+    int firstpass;
+    if (!PyArg_ParseTuple(state, "Oi", &saved, &firstpass))
+        return NULL;
+    Py_CLEAR(lz->saved);
+    lz->saved = saved;
+    Py_XINCREF(lz->saved);
+    lz->firstpass = firstpass != 0;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef cycle_methods[] = {
+    {"__reduce__",      (PyCFunction)cycle_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)cycle_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(cycle_doc,
 "cycle(iterable) --> cycle object\n\
 \n\
@@ -807,7 +1006,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)cycle_next,           /* tp_iternext */
-    0,                                  /* tp_methods */
+    cycle_methods,                      /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -913,6 +1112,31 @@
     }
 }
 
+static PyObject *
+dropwhile_reduce(dropwhileobject *lz)
+{
+    return Py_BuildValue("O(OO)l", Py_TYPE(lz),
+                         lz->func, lz->it, lz->start);
+}
+
+static PyObject *
+dropwhile_setstate(dropwhileobject *lz, PyObject *state)
+{
+    int start = PyObject_IsTrue(state);
+    if (start == -1)
+        return NULL;
+    lz->start = start;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef dropwhile_methods[] = {
+    {"__reduce__",      (PyCFunction)dropwhile_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)dropwhile_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(dropwhile_doc,
 "dropwhile(predicate, iterable) --> dropwhile object\n\
 \n\
@@ -949,7 +1173,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)dropwhile_next,       /* tp_iternext */
-    0,                                  /* tp_methods */
+    dropwhile_methods,                                  /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -1052,6 +1276,30 @@
     return NULL;
 }
 
+static PyObject *
+takewhile_reduce(takewhileobject *lz)
+{
+    return Py_BuildValue("O(OO)l", Py_TYPE(lz),
+                         lz->func, lz->it, lz->stop);
+}
+
+static PyObject *
+takewhile_reduce_setstate(takewhileobject *lz, PyObject *state)
+{
+    int stop = PyObject_IsTrue(state);
+    if (stop == -1)
+        return NULL;
+    lz->stop = stop;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef takewhile_reduce_methods[] = {
+    {"__reduce__",      (PyCFunction)takewhile_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)takewhile_reduce_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
 PyDoc_STRVAR(takewhile_doc,
 "takewhile(predicate, iterable) --> takewhile object\n\
 \n\
@@ -1088,7 +1336,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)takewhile_next,       /* tp_iternext */
-    0,                                  /* tp_methods */
+    takewhile_reduce_methods,           /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -1244,6 +1492,44 @@
     return item;
 }
 
+static PyObject *
+islice_reduce(isliceobject *lz)
+{
+    /* When unpickled, generate a new object with the same bounds,
+     * then 'setstate' with the next and count
+     */
+    PyObject *stop;
+    if (lz->stop == -1) {
+        stop = Py_None;
+        Py_INCREF(stop);
+    } else {
+        stop = PyLong_FromSsize_t(lz->stop);
+        if (stop == NULL)
+            return NULL;
+    }
+    return Py_BuildValue("O(OnNn)n", Py_TYPE(lz),
+        lz->it, lz->next, stop, lz->step,
+        lz->cnt);
+}
+
+static PyObject *
+islice_setstate(isliceobject *lz, PyObject *state)
+{
+    Py_ssize_t cnt = PyLong_AsSsize_t(state);
+    if (cnt == -1 && PyErr_Occurred())
+        return NULL;
+    lz->cnt = cnt;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef islice_methods[] = {
+    {"__reduce__",      (PyCFunction)islice_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)islice_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(islice_doc,
 "islice(iterable, [start,] stop [, step]) --> islice object\n\
 \n\
@@ -1284,7 +1570,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)islice_next,          /* tp_iternext */
-    0,                                  /* tp_methods */
+    islice_methods,                     /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -1379,6 +1665,19 @@
     return result;
 }
 
+static PyObject *
+starmap_reduce(starmapobject *lz)
+{
+    /* Just pickle the iterator */
+    return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it);
+}
+
+static PyMethodDef starmap_methods[] = {
+    {"__reduce__",      (PyCFunction)starmap_reduce,      METH_NOARGS,
+     reduce_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(starmap_doc,
 "starmap(function, sequence) --> starmap object\n\
 \n\
@@ -1415,7 +1714,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)starmap_next,         /* tp_iternext */
-    0,                                  /* tp_methods */
+    starmap_methods,                    /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -1534,6 +1833,41 @@
     return chain_next(lz);                      /* recurse and use next active */
 }
 
+static PyObject *
+chain_reduce(chainobject *lz)
+{
+    if (lz->source) {
+        /* we can't pickle function objects (itertools.from_iterable) so
+         * we must use setstate to replace the iterable.  One day we
+         * will fix pickling of functions
+         */
+        if (lz->active) {
+            return Py_BuildValue("O()(OO)", Py_TYPE(lz), lz->source, lz->active);
+        } else {
+            return Py_BuildValue("O()(O)", Py_TYPE(lz), lz->source);
+        }
+    } else {
+        return Py_BuildValue("O()", Py_TYPE(lz)); /* exhausted */
+    }
+    return NULL;
+}
+
+static PyObject *
+chain_setstate(chainobject *lz, PyObject *state)
+{
+    PyObject *source, *active=NULL;
+    if (! PyArg_ParseTuple(state, "O|O", &source, &active))
+        return NULL;
+
+    Py_CLEAR(lz->source);
+    lz->source = source;
+    Py_INCREF(lz->source);
+    Py_CLEAR(lz->active);
+    lz->active = active;
+    Py_XINCREF(lz->active);
+    Py_RETURN_NONE;
+}
+
 PyDoc_STRVAR(chain_doc,
 "chain(*iterables) --> chain object\n\
 \n\
@@ -1550,6 +1884,10 @@
 static PyMethodDef chain_methods[] = {
     {"from_iterable", (PyCFunction) chain_new_from_iterable,            METH_O | METH_CLASS,
         chain_from_iterable_doc},
+    {"__reduce__",      (PyCFunction)chain_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)chain_setstate,    METH_O,
+     setstate_doc},
     {NULL,              NULL}   /* sentinel */
 };
 
@@ -1790,6 +2128,83 @@
     return NULL;
 }
 
+static PyObject *
+product_reduce(productobject *lz)
+{
+    if (lz->stopped) {
+        return Py_BuildValue("O(())", Py_TYPE(lz));
+    } else if (lz->result == NULL) {
+        return Py_BuildValue("OO", Py_TYPE(lz), lz->pools);
+    } else {
+        PyObject *indices;
+        Py_ssize_t n, i;
+
+        /* we must pickle the indices use them for setstate, and
+         * additionally indicate that the iterator has started
+         */
+        n = PyTuple_GET_SIZE(lz->pools);
+        indices = PyTuple_New(n);
+        if (indices == NULL)
+            return NULL;
+        for (i=0; i<n; i++){
+            PyObject* index = PyLong_FromSsize_t(lz->indices[i]);
+            if (!index) {
+                Py_DECREF(indices);
+                return NULL;
+            }
+            PyTuple_SET_ITEM(indices, i, index);
+        }
+        return Py_BuildValue("OON", Py_TYPE(lz), lz->pools, indices);
+    }
+}
+
+static PyObject *
+product_setstate(productobject *lz, PyObject *state)
+{
+    PyObject *result;
+    Py_ssize_t n, i;
+
+    n = PyTuple_GET_SIZE(lz->pools);
+    if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != n) {
+        PyErr_SetString(PyExc_ValueError, "invalid arguments");
+        return NULL;
+    }
+    for (i=0; i<n; i++)
+    {
+        PyObject* indexObject = PyTuple_GET_ITEM(state, i);
+        Py_ssize_t index = PyLong_AsSsize_t(indexObject);
+        if (index < 0 && PyErr_Occurred())
+            return NULL; /* not an integer */
+        /* clamp the index */
+        if (index < 0)
+            index = 0;
+        else if (index > n-1)
+            index = n-1;
+        lz->indices[i] = index;
+    }
+
+    result = PyTuple_New(n);
+    if (!result)
+        return NULL;
+    for (i=0; i<n; i++) {
+        PyObject *pool = PyTuple_GET_ITEM(lz->pools, i);
+        PyObject *element = PyTuple_GET_ITEM(pool, lz->indices[i]);
+        Py_INCREF(element);
+        PyTuple_SET_ITEM(result, i, element);
+    }
+    Py_CLEAR(lz->result);
+    lz->result = result;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef product_methods[] = {
+    {"__reduce__",      (PyCFunction)product_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)product_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(product_doc,
 "product(*iterables) --> product object\n\
 \n\
@@ -1834,7 +2249,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)product_next,         /* tp_iternext */
-    0,                                  /* tp_methods */
+    product_methods,                    /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -2021,6 +2436,86 @@
     return NULL;
 }
 
+static PyObject *
+combinations_reduce(combinationsobject *lz)
+{
+    if (lz->result == NULL) {
+        return Py_BuildValue("O(On)", Py_TYPE(lz), lz->pool, lz->r);
+    } else if (lz->stopped) {
+        return Py_BuildValue("O(()n)", Py_TYPE(lz), lz->r);
+    } else {
+        PyObject *indices;
+        Py_ssize_t i;
+
+        /* we must pickle the indices and use them for setstate */
+        indices = PyTuple_New(lz->r);
+        if (!indices)
+            return NULL;
+        for (i=0; i<lz->r; i++)
+        {
+            PyObject* index = PyLong_FromSsize_t(lz->indices[i]);
+            if (!index) {
+                Py_DECREF(indices);
+                return NULL;
+            }
+            PyTuple_SET_ITEM(indices, i, index);
+        }
+
+        return Py_BuildValue("O(On)N", Py_TYPE(lz), lz->pool, lz->r, indices);
+    }
+}
+
+static PyObject *
+combinations_setstate(combinationsobject *lz, PyObject *state)
+{
+    PyObject *result;
+    Py_ssize_t i;
+    Py_ssize_t n = PyTuple_GET_SIZE(lz->pool);
+
+    if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != lz->r)
+    {
+        PyErr_SetString(PyExc_ValueError, "invalid arguments");
+        return NULL;
+    }
+
+    for (i=0; i<lz->r; i++)
+    {
+        Py_ssize_t max;
+        PyObject* indexObject = PyTuple_GET_ITEM(state, i);
+        Py_ssize_t index = PyLong_AsSsize_t(indexObject);
+        if (index == -1 && PyErr_Occurred())
+            return NULL; /* not an integer */
+        max = i + n - lz->r;
+        /* clamp the index (beware of negative max) */
+        if (index > max)
+            index = max;
+        if (index < 0)
+            index = 0;
+        lz->indices[i] = index;
+    }
+
+    result = PyTuple_New(lz->r);
+    if (result == NULL)
+        return NULL;
+    for (i=0; i<lz->r; i++) {
+        PyObject *element = PyTuple_GET_ITEM(lz->pool, lz->indices[i]);
+        Py_INCREF(element);
+        PyTuple_SET_ITEM(result, i, element);
+    }
+
+    Py_CLEAR(lz->result);
+    lz->result = result;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef combinations_methods[] = {
+    {"__reduce__",      (PyCFunction)combinations_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)combinations_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(combinations_doc,
 "combinations(iterable, r) --> combinations object\n\
 \n\
@@ -2029,11 +2524,11 @@
 
 static PyTypeObject combinations_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
-    "itertools.combinations",                   /* tp_name */
+    "itertools.combinations",           /* tp_name */
     sizeof(combinationsobject),         /* tp_basicsize */
     0,                                  /* tp_itemsize */
     /* methods */
-    (destructor)combinations_dealloc,           /* tp_dealloc */
+    (destructor)combinations_dealloc,   /* tp_dealloc */
     0,                                  /* tp_print */
     0,                                  /* tp_getattr */
     0,                                  /* tp_setattr */
@@ -2050,14 +2545,14 @@
     0,                                  /* tp_as_buffer */
     Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
         Py_TPFLAGS_BASETYPE,            /* tp_flags */
-    combinations_doc,                           /* tp_doc */
-    (traverseproc)combinations_traverse,        /* tp_traverse */
+    combinations_doc,                   /* tp_doc */
+    (traverseproc)combinations_traverse,/* tp_traverse */
     0,                                  /* tp_clear */
     0,                                  /* tp_richcompare */
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
-    (iternextfunc)combinations_next,            /* tp_iternext */
-    0,                                  /* tp_methods */
+    (iternextfunc)combinations_next,    /* tp_iternext */
+    combinations_methods,               /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -2067,7 +2562,7 @@
     0,                                  /* tp_dictoffset */
     0,                                  /* tp_init */
     0,                                  /* tp_alloc */
-    combinations_new,                           /* tp_new */
+    combinations_new,                   /* tp_new */
     PyObject_GC_Del,                    /* tp_free */
 };
 
@@ -2266,6 +2761,82 @@
     return NULL;
 }
 
+static PyObject *
+cwr_reduce(cwrobject *lz)
+{
+    if (lz->result == NULL) {
+        return Py_BuildValue("O(On)", Py_TYPE(lz), lz->pool, lz->r);
+    } else if (lz->stopped) {
+        return Py_BuildValue("O(()n)", Py_TYPE(lz), lz->r);
+    } else {
+        PyObject *indices;
+        Py_ssize_t i;
+
+        /* we must pickle the indices and use them for setstate */
+        indices = PyTuple_New(lz->r);
+        if (!indices)
+            return NULL;
+        for (i=0; i<lz->r; i++)
+        {
+            PyObject* index = PyLong_FromSsize_t(lz->indices[i]);
+            if (!index) {
+                Py_DECREF(indices);
+                return NULL;
+            }
+            PyTuple_SET_ITEM(indices, i, index);
+        }
+
+        return Py_BuildValue("O(On)N", Py_TYPE(lz), lz->pool, lz->r, indices);
+    }
+}
+
+static PyObject *
+cwr_setstate(cwrobject *lz, PyObject *state)
+{
+    PyObject *result;
+    Py_ssize_t n, i;
+
+    if (!PyTuple_Check(state) || PyTuple_GET_SIZE(state) != lz->r)
+    {
+        PyErr_SetString(PyExc_ValueError, "invalid arguments");
+        return NULL;
+    }
+
+    n = PyTuple_GET_SIZE(lz->pool);
+    for (i=0; i<lz->r; i++)
+    {
+        PyObject* indexObject = PyTuple_GET_ITEM(state, i);
+        Py_ssize_t index = PyLong_AsSsize_t(indexObject);
+        if (index < 0 && PyErr_Occurred())
+            return NULL; /* not an integer */
+        /* clamp the index */
+        if (index < 0)
+            index = 0;
+        else if (index > n-1)
+            index = n-1;
+        lz->indices[i] = index;
+    }
+    result = PyTuple_New(lz->r);
+    if (result == NULL)
+        return NULL;
+    for (i=0; i<lz->r; i++) {
+        PyObject *element = PyTuple_GET_ITEM(lz->pool, lz->indices[i]);
+        Py_INCREF(element);
+        PyTuple_SET_ITEM(result, i, element);
+    }
+    Py_CLEAR(lz->result);
+    lz->result = result;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef cwr_methods[] = {
+    {"__reduce__",      (PyCFunction)cwr_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)cwr_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(cwr_doc,
 "combinations_with_replacement(iterable, r) --> combinations_with_replacement object\n\
 \n\
@@ -2275,11 +2846,11 @@
 
 static PyTypeObject cwr_type = {
     PyVarObject_HEAD_INIT(NULL, 0)
-    "itertools.combinations_with_replacement",                  /* tp_name */
-    sizeof(cwrobject),                  /* tp_basicsize */
+    "itertools.combinations_with_replacement",          /* tp_name */
+    sizeof(cwrobject),                                  /* tp_basicsize */
     0,                                                  /* tp_itemsize */
     /* methods */
-    (destructor)cwr_dealloc,            /* tp_dealloc */
+    (destructor)cwr_dealloc,                            /* tp_dealloc */
     0,                                                  /* tp_print */
     0,                                                  /* tp_getattr */
     0,                                                  /* tp_setattr */
@@ -2291,19 +2862,19 @@
     0,                                                  /* tp_hash */
     0,                                                  /* tp_call */
     0,                                                  /* tp_str */
-    PyObject_GenericGetAttr,            /* tp_getattro */
+    PyObject_GenericGetAttr,                            /* tp_getattro */
     0,                                                  /* tp_setattro */
     0,                                                  /* tp_as_buffer */
     Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
-        Py_TPFLAGS_BASETYPE,            /* tp_flags */
-    cwr_doc,                                    /* tp_doc */
-    (traverseproc)cwr_traverse,         /* tp_traverse */
+        Py_TPFLAGS_BASETYPE,                            /* tp_flags */
+    cwr_doc,                                            /* tp_doc */
+    (traverseproc)cwr_traverse,                         /* tp_traverse */
     0,                                                  /* tp_clear */
     0,                                                  /* tp_richcompare */
     0,                                                  /* tp_weaklistoffset */
-    PyObject_SelfIter,                  /* tp_iter */
-    (iternextfunc)cwr_next,     /* tp_iternext */
-    0,                                                  /* tp_methods */
+    PyObject_SelfIter,                                  /* tp_iter */
+    (iternextfunc)cwr_next,                             /* tp_iternext */
+    cwr_methods,                                        /* tp_methods */
     0,                                                  /* tp_members */
     0,                                                  /* tp_getset */
     0,                                                  /* tp_base */
@@ -2313,8 +2884,8 @@
     0,                                                  /* tp_dictoffset */
     0,                                                  /* tp_init */
     0,                                                  /* tp_alloc */
-    cwr_new,                                    /* tp_new */
-    PyObject_GC_Del,                    /* tp_free */
+    cwr_new,                                            /* tp_new */
+    PyObject_GC_Del,                                    /* tp_free */
 };
 
 
@@ -2538,6 +3109,115 @@
     return NULL;
 }
 
+static PyObject *
+permutations_reduce(permutationsobject *po)
+{
+    if (po->result == NULL) {
+        return Py_BuildValue("O(On)", Py_TYPE(po), po->pool, po->r);
+    } else if (po->stopped) {
+        return Py_BuildValue("O(()n)", Py_TYPE(po), po->r);
+    } else {
+        PyObject *indices=NULL, *cycles=NULL;
+        Py_ssize_t n, i;
+
+        /* we must pickle the indices and cycles and use them for setstate */
+        n = PyTuple_GET_SIZE(po->pool);
+        indices = PyTuple_New(n);
+        if (indices == NULL)
+            goto err;
+        for (i=0; i<n; i++){
+            PyObject* index = PyLong_FromSsize_t(po->indices[i]);
+            if (!index)
+                goto err;
+            PyTuple_SET_ITEM(indices, i, index);
+        }
+            
+        cycles = PyTuple_New(po->r);
+        if (cycles == NULL)
+            goto err;
+        for (i=0; i<po->r; i++)
+        {
+            PyObject* index = PyLong_FromSsize_t(po->cycles[i]);
+            if (!index)
+                goto err;
+            PyTuple_SET_ITEM(cycles, i, index);
+        }
+        return Py_BuildValue("O(On)(NN)", Py_TYPE(po),
+                             po->pool, po->r,
+                             indices, cycles);
+    err:
+        Py_XDECREF(indices);
+        Py_XDECREF(cycles);
+        return NULL;
+    }
+}
+
+static PyObject *
+permutations_setstate(permutationsobject *po, PyObject *state)
+{
+    PyObject *indices, *cycles, *result;
+    Py_ssize_t n, i;
+    
+    if (!PyArg_ParseTuple(state, "O!O!",
+                          &PyTuple_Type, &indices,
+                          &PyTuple_Type, &cycles))
+        return NULL;
+
+    n = PyTuple_GET_SIZE(po->pool);
+    if (PyTuple_GET_SIZE(indices) != n ||
+        PyTuple_GET_SIZE(cycles) != po->r)
+    {
+        PyErr_SetString(PyExc_ValueError, "invalid arguments");
+        return NULL;
+    }
+
+    for (i=0; i<n; i++)
+    {
+        PyObject* indexObject = PyTuple_GET_ITEM(indices, i);
+        Py_ssize_t index = PyLong_AsSsize_t(indexObject);
+        if (index < 0 && PyErr_Occurred())
+            return NULL; /* not an integer */
+        /* clamp the index */
+        if (index < 0)
+            index = 0;
+        else if (index > n-1)
+            index = n-1;
+        po->indices[i] = index;
+    }
+
+    for (i=0; i<po->r; i++)
+    {
+        PyObject* indexObject = PyTuple_GET_ITEM(cycles, i);
+        Py_ssize_t index = PyLong_AsSsize_t(indexObject);
+        if (index < 0 && PyErr_Occurred())
+            return NULL; /* not an integer */
+        if (index < 1)
+            index = 1;
+        else if (index > n-i)
+            index = n-i;
+        po->cycles[i] = index;
+    }
+    result = PyTuple_New(po->r);
+    if (result == NULL)
+        return NULL;
+    for (i=0; i<po->r; i++) {
+        PyObject *element = PyTuple_GET_ITEM(po->pool, po->indices[i]);
+        Py_INCREF(element);
+        PyTuple_SET_ITEM(result, i, element);
+    }
+    Py_CLEAR(po->result);
+    po->result = result;
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef permuations_methods[] = {
+    {"__reduce__",      (PyCFunction)permutations_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)permutations_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(permutations_doc,
 "permutations(iterable[, r]) --> permutations object\n\
 \n\
@@ -2574,7 +3254,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)permutations_next,            /* tp_iternext */
-    0,                                  /* tp_methods */
+    permuations_methods,                /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -2605,7 +3285,7 @@
     static char *kwargs[] = {"iterable", "func", NULL};
     PyObject *iterable;
     PyObject *it;
-    PyObject *binop = NULL;
+    PyObject *binop = Py_None;
     accumulateobject *lz;
 
     if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:accumulate",
@@ -2624,8 +3304,10 @@
         return NULL;
     }
 
-    Py_XINCREF(binop);
-    lz->binop = binop;
+    if (binop != Py_None) {
+        Py_XINCREF(binop);
+        lz->binop = binop;
+    }
     lz->total = NULL;
     lz->it = it;
     return (PyObject *)lz;
@@ -2681,6 +3363,31 @@
     return newtotal;
 }
 
+static PyObject *
+accumulate_reduce(accumulateobject *lz)
+{
+    return Py_BuildValue("O(OO)O", Py_TYPE(lz),
+                            lz->it, lz->binop?lz->binop:Py_None,
+                            lz->total?lz->total:Py_None);
+ }
+
+static PyObject *
+accumulate_setstate(accumulateobject *lz, PyObject *state)
+{
+    Py_CLEAR(lz->total);
+    lz->total = state;
+    Py_INCREF(lz->total);
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef accumulate_methods[] = {
+    {"__reduce__",      (PyCFunction)accumulate_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)accumulate_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(accumulate_doc,
 "accumulate(iterable[, func]) --> accumulate object\n\
 \n\
@@ -2716,7 +3423,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)accumulate_next,      /* tp_iternext */
-    0,                                  /* tp_methods */
+    accumulate_methods,                 /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -2833,6 +3540,19 @@
     }
 }
 
+static PyObject *
+compress_reduce(compressobject *lz)
+{
+    return Py_BuildValue("O(OO)", Py_TYPE(lz),
+        lz->data, lz->selectors);
+    }
+
+static PyMethodDef compress_methods[] = {
+    {"__reduce__",      (PyCFunction)compress_reduce,      METH_NOARGS,
+     reduce_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(compress_doc,
 "compress(data, selectors) --> iterator over selected data\n\
 \n\
@@ -2870,7 +3590,7 @@
     0,                                                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                                  /* tp_iter */
     (iternextfunc)compress_next,        /* tp_iternext */
-    0,                                                                  /* tp_methods */
+    compress_methods,                                                   /* tp_methods */
     0,                                                                  /* tp_members */
     0,                                                                  /* tp_getset */
     0,                                                                  /* tp_base */
@@ -2977,6 +3697,19 @@
     }
 }
 
+static PyObject *
+filterfalse_reduce(filterfalseobject *lz)
+{
+    return Py_BuildValue("O(OO)", Py_TYPE(lz),
+        lz->func, lz->it);
+    }
+
+static PyMethodDef filterfalse_methods[] = {
+    {"__reduce__",      (PyCFunction)filterfalse_reduce,      METH_NOARGS,
+     reduce_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(filterfalse_doc,
 "filterfalse(function or None, sequence) --> filterfalse object\n\
 \n\
@@ -3013,7 +3746,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)filterfalse_next,     /* tp_iternext */
-    0,                                  /* tp_methods */
+    filterfalse_methods,                /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -3207,11 +3940,9 @@
     return Py_BuildValue("O(n)", Py_TYPE(lz), lz->cnt);
 }
 
-PyDoc_STRVAR(count_reduce_doc, "Return state information for pickling.");
-
 static PyMethodDef count_methods[] = {
     {"__reduce__",      (PyCFunction)count_reduce,      METH_NOARGS,
-     count_reduce_doc},
+     reduce_doc},
     {NULL,              NULL}   /* sentinel */
 };
 
@@ -3352,8 +4083,21 @@
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+repeat_reduce(repeatobject *ro)
+{
+    /* unpickle this so that a new repeat iterator is constructed with an
+     * object, then call __setstate__ on it to set cnt
+     */
+    if (ro->cnt >= 0)
+        return Py_BuildValue("O(On)", Py_TYPE(ro), ro->element, ro->cnt);
+    else
+        return Py_BuildValue("O(O)", Py_TYPE(ro), ro->element);
+}
+
 static PyMethodDef repeat_methods[] = {
     {"__length_hint__", (PyCFunction)repeat_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__",      (PyCFunction)repeat_reduce, METH_NOARGS, reduce_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
@@ -3579,6 +4323,49 @@
     return result;
 }
 
+static PyObject *
+zip_longest_reduce(ziplongestobject *lz)
+{
+    
+    /* Create a new tuple with empty sequences where appropriate to pickle.
+     * Then use setstate to set the fillvalue
+     */
+    int i;
+    PyObject *args = PyTuple_New(PyTuple_GET_SIZE(lz->ittuple));
+    if (args == NULL)
+        return NULL;
+    for (i=0; i<PyTuple_GET_SIZE(lz->ittuple); i++) {
+        PyObject *elem = PyTuple_GET_ITEM(lz->ittuple, i);
+        if (elem == NULL) {
+            elem = PyTuple_New(0);
+            if (elem == NULL) {
+                Py_DECREF(args);
+                return NULL;
+            }
+        } else
+            Py_INCREF(elem);
+        PyTuple_SET_ITEM(args, i, elem);
+    }
+    return Py_BuildValue("ONO", Py_TYPE(lz), args, lz->fillvalue);
+}
+
+static PyObject *
+zip_longest_setstate(ziplongestobject *lz, PyObject *state)
+{
+    Py_CLEAR(lz->fillvalue);
+    lz->fillvalue = state;
+    Py_INCREF(lz->fillvalue);
+    Py_RETURN_NONE;
+}
+
+static PyMethodDef zip_longest_methods[] = {
+    {"__reduce__",      (PyCFunction)zip_longest_reduce,      METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)zip_longest_setstate,    METH_O,
+     setstate_doc},
+    {NULL,              NULL}   /* sentinel */
+};
+
 PyDoc_STRVAR(zip_longest_doc,
 "zip_longest(iter1 [,iter2 [...]], [fillvalue=None]) --> zip_longest object\n\
 \n\
@@ -3620,7 +4407,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)zip_longest_next,     /* tp_iternext */
-    0,                                  /* tp_methods */
+    zip_longest_methods,                /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -3708,6 +4495,9 @@
         &product_type,
         &repeat_type,
         &groupby_type,
+        &_grouper_type,
+        &tee_type,
+        &teedataobject_type,
         NULL
     };
 
@@ -3725,11 +4515,5 @@
         PyModule_AddObject(m, name+1, (PyObject *)typelist[i]);
     }
 
-    if (PyType_Ready(&teedataobject_type) < 0)
-        return NULL;
-    if (PyType_Ready(&tee_type) < 0)
-        return NULL;
-    if (PyType_Ready(&_grouper_type) < 0)
-        return NULL;
     return m;
 }
diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c
--- a/Objects/bytearrayobject.c
+++ b/Objects/bytearrayobject.c
@@ -3003,7 +3003,7 @@
 }
 
 static PyObject *
-bytesarrayiter_length_hint(bytesiterobject *it)
+bytearrayiter_length_hint(bytesiterobject *it)
 {
     Py_ssize_t len = 0;
     if (it->it_seq)
@@ -3014,9 +3014,41 @@
 PyDoc_STRVAR(length_hint_doc,
     "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+bytearrayiter_reduce(bytesiterobject *it)
+{
+    if (it->it_seq != NULL) {
+        return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
+                             it->it_seq, it->it_index);
+    } else {
+        PyObject *u = PyUnicode_FromUnicode(NULL, 0);
+        if (u == NULL)
+            return NULL;
+        return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u);
+    }
+}
+
+static PyObject *
+bytearrayiter_setstate(bytesiterobject *it, PyObject *state)
+{
+    Py_ssize_t index = PyLong_AsSsize_t(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (index < 0)
+        index = 0;
+    it->it_index = index;
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
 static PyMethodDef bytearrayiter_methods[] = {
-    {"__length_hint__", (PyCFunction)bytesarrayiter_length_hint, METH_NOARGS,
+    {"__length_hint__", (PyCFunction)bytearrayiter_length_hint, METH_NOARGS,
      length_hint_doc},
+     {"__reduce__",      (PyCFunction)bytearrayiter_reduce, METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)bytearrayiter_setstate, METH_O,
+     setstate_doc},
     {NULL, NULL} /* sentinel */
 };
 
diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -3074,9 +3074,43 @@
 PyDoc_STRVAR(length_hint_doc,
              "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+striter_reduce(striterobject *it)
+{
+    if (it->it_seq != NULL) {
+        return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
+                             it->it_seq, it->it_index);
+    } else {
+        PyObject *u = PyUnicode_FromUnicode(NULL, 0);
+        if (u == NULL)
+            return NULL;
+        return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u);
+    }
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
+static PyObject *
+striter_setstate(striterobject *it, PyObject *state)
+{
+    Py_ssize_t index = PyLong_AsSsize_t(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (index < 0)
+        index = 0;
+    it->it_index = index;
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
 static PyMethodDef striter_methods[] = {
     {"__length_hint__", (PyCFunction)striter_len, METH_NOARGS,
      length_hint_doc},
+    {"__reduce__",      (PyCFunction)striter_reduce, METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)striter_setstate, METH_O,
+     setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -2323,9 +2323,16 @@
 PyDoc_STRVAR(length_hint_doc,
              "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+dictiter_reduce(dictiterobject *di);
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
 static PyMethodDef dictiter_methods[] = {
     {"__length_hint__", (PyCFunction)dictiter_len, METH_NOARGS,
      length_hint_doc},
+     {"__reduce__", (PyCFunction)dictiter_reduce, METH_NOARGS,
+     reduce_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
@@ -2560,6 +2567,52 @@
 };
 
 
+static PyObject *
+dictiter_reduce(dictiterobject *di)
+{
+    PyObject *list;
+    dictiterobject tmp;
+
+    list = PyList_New(0);
+    if (!list)
+        return NULL;
+
+    /* copy the itertor state */
+    tmp = *di;
+    Py_XINCREF(tmp.di_dict);
+    
+    /* iterate the temporary into a list */
+    for(;;) {
+        PyObject *element = 0;
+        if (Py_TYPE(di) == &PyDictIterItem_Type)
+            element = dictiter_iternextitem(&tmp);
+        else if (Py_TYPE(di) == &PyDictIterKey_Type)
+            element = dictiter_iternextkey(&tmp);
+        else if (Py_TYPE(di) == &PyDictIterValue_Type)
+            element = dictiter_iternextvalue(&tmp);
+        else
+            assert(0);
+        if (element) {
+            if (PyList_Append(list, element)) {
+                Py_DECREF(element);
+                Py_DECREF(list);
+                Py_XDECREF(tmp.di_dict);
+                return NULL;
+            }
+            Py_DECREF(element);
+        } else
+            break;
+    }
+    Py_XDECREF(tmp.di_dict);
+    /* check for error */
+    if (tmp.di_dict != NULL) {
+        /* we have an error */
+        Py_DECREF(list);
+        return NULL;
+    }
+    return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list);
+}
+
 /***********************************************/
 /* View objects for keys(), items(), values(). */
 /***********************************************/
diff --git a/Objects/enumobject.c b/Objects/enumobject.c
--- a/Objects/enumobject.c
+++ b/Objects/enumobject.c
@@ -158,6 +158,22 @@
     return result;
 }
 
+static PyObject *
+enum_reduce(enumobject *en)
+{
+    if (en->en_longindex != NULL)
+        return Py_BuildValue("O(OO)", Py_TYPE(en), en->en_sit, en->en_longindex);
+    else
+        return Py_BuildValue("O(On)", Py_TYPE(en), en->en_sit, en->en_index);
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
+static PyMethodDef enum_methods[] = {
+    {"__reduce__", (PyCFunction)enum_reduce, METH_NOARGS, reduce_doc},
+    {NULL,              NULL}           /* sentinel */
+};
+
 PyDoc_STRVAR(enum_doc,
 "enumerate(iterable[, start]) -> iterator for index, value of iterable\n"
 "\n"
@@ -197,7 +213,7 @@
     0,                              /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)enum_next,        /* tp_iternext */
-    0,                              /* tp_methods */
+    enum_methods,                   /* tp_methods */
     0,                              /* tp_members */
     0,                              /* tp_getset */
     0,                              /* tp_base */
@@ -319,8 +335,40 @@
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+reversed_reduce(reversedobject *ro)
+{
+    if (ro->seq)
+        return Py_BuildValue("O(O)n", Py_TYPE(ro), ro->seq, ro->index);
+    else
+        return Py_BuildValue("O(())", Py_TYPE(ro));
+}
+
+static PyObject *
+reversed_setstate(reversedobject *ro, PyObject *state)
+{
+    Py_ssize_t index = PyLong_AsSsize_t(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (ro->seq != 0) {
+        Py_ssize_t n = PySequence_Size(ro->seq);
+        if (n < 0)
+            return NULL;
+        if (index < -1)
+            index = -1;
+        else if (index > n-1)
+            index = n-1;
+        ro->index = index;
+    }
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
 static PyMethodDef reversediter_methods[] = {
     {"__length_hint__", (PyCFunction)reversed_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__", (PyCFunction)reversed_reduce, METH_NOARGS, reduce_doc},
+    {"__setstate__", (PyCFunction)reversed_setstate, METH_O, setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
diff --git a/Objects/iterobject.c b/Objects/iterobject.c
--- a/Objects/iterobject.c
+++ b/Objects/iterobject.c
@@ -88,8 +88,38 @@
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+iter_reduce(seqiterobject *it)
+{
+    if (it->it_seq != NULL)
+        return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
+                             it->it_seq, it->it_index);
+    else
+        return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
+static PyObject *
+iter_setstate(seqiterobject *it, PyObject *state)
+{
+    Py_ssize_t index = PyLong_AsSsize_t(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (it->it_seq != NULL) {
+        if (index < 0)
+            index = 0;
+        it->it_index = index;
+    }
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
 static PyMethodDef seqiter_methods[] = {
     {"__length_hint__", (PyCFunction)iter_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__", (PyCFunction)iter_reduce, METH_NOARGS, reduce_doc},
+    {"__setstate__", (PyCFunction)iter_setstate, METH_O, setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
@@ -195,6 +225,21 @@
     return NULL;
 }
 
+static PyObject *
+calliter_reduce(calliterobject *it)
+{
+    if (it->it_callable != NULL && it->it_sentinel != NULL)
+        return Py_BuildValue("N(OO)", _PyObject_GetBuiltin("iter"),
+                             it->it_callable, it->it_sentinel);
+    else
+        return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
+}
+
+static PyMethodDef calliter_methods[] = {
+    {"__reduce__", (PyCFunction)calliter_reduce, METH_NOARGS, reduce_doc},
+    {NULL,              NULL}           /* sentinel */
+};
+
 PyTypeObject PyCallIter_Type = {
     PyVarObject_HEAD_INIT(&PyType_Type, 0)
     "callable_iterator",                        /* tp_name */
@@ -224,7 +269,7 @@
     0,                                          /* tp_weaklistoffset */
     PyObject_SelfIter,                          /* tp_iter */
     (iternextfunc)calliter_iternext,            /* tp_iternext */
-    0,                                          /* tp_methods */
+    calliter_methods,                           /* tp_methods */
 };
 
 
diff --git a/Objects/listobject.c b/Objects/listobject.c
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -2660,11 +2660,18 @@
 static int listiter_traverse(listiterobject *, visitproc, void *);
 static PyObject *listiter_next(listiterobject *);
 static PyObject *listiter_len(listiterobject *);
+static PyObject *listiter_reduce_general(void *_it, int forward);
+static PyObject *listiter_reduce(listiterobject *);
+static PyObject *listiter_setstate(listiterobject *, PyObject *state);
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
 
 static PyMethodDef listiter_methods[] = {
     {"__length_hint__", (PyCFunction)listiter_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__", (PyCFunction)listiter_reduce, METH_NOARGS, reduce_doc},
+    {"__setstate__", (PyCFunction)listiter_setstate, METH_O, setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
@@ -2771,6 +2778,27 @@
     }
     return PyLong_FromLong(0);
 }
+
+static PyObject *
+listiter_reduce(listiterobject *it)
+{
+    return listiter_reduce_general(it, 1);
+}
+
+static PyObject *
+listiter_setstate(listiterobject *it, PyObject *state)
+{
+    long index = PyLong_AsLong(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (it->it_seq != NULL) {
+        if (index < 0)
+            index = 0;
+        it->it_index = index;
+    }
+    Py_RETURN_NONE;
+}
+
 /*********************** List Reverse Iterator **************************/
 
 typedef struct {
@@ -2784,9 +2812,13 @@
 static int listreviter_traverse(listreviterobject *, visitproc, void *);
 static PyObject *listreviter_next(listreviterobject *);
 static PyObject *listreviter_len(listreviterobject *);
+static PyObject *listreviter_reduce(listreviterobject *);
+static PyObject *listreviter_setstate(listreviterobject *, PyObject *);
 
 static PyMethodDef listreviter_methods[] = {
     {"__length_hint__", (PyCFunction)listreviter_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__", (PyCFunction)listreviter_reduce, METH_NOARGS, reduce_doc},
+    {"__setstate__", (PyCFunction)listreviter_setstate, METH_O, setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
@@ -2883,3 +2915,51 @@
         len = 0;
     return PyLong_FromSsize_t(len);
 }
+
+static PyObject *
+listreviter_reduce(listreviterobject *it)
+{
+    return listiter_reduce_general(it, 0);
+}
+
+static PyObject *
+listreviter_setstate(listreviterobject *it, PyObject *state)
+{
+    Py_ssize_t index = PyLong_AsSsize_t(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (it->it_seq != NULL) {
+        if (index < -1)
+            index = -1;
+        else if (index > PyList_GET_SIZE(it->it_seq) - 1)
+            index = PyList_GET_SIZE(it->it_seq) - 1;
+        it->it_index = index;
+    }
+    Py_RETURN_NONE;
+}
+
+/* common pickling support */
+
+static PyObject *
+listiter_reduce_general(void *_it, int forward)
+{
+    PyObject *list;
+
+    /* the objects are not the same, index is of different types! */
+    if (forward) {
+        listiterobject *it = (listiterobject *)_it;
+        if (it->it_seq)
+            return Py_BuildValue("N(O)l", _PyObject_GetBuiltin("iter"),
+                                 it->it_seq, it->it_index);
+    } else {
+        listreviterobject *it = (listreviterobject *)_it;
+        if (it->it_seq)
+            return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("reversed"),
+                                 it->it_seq, it->it_index);
+    }
+    /* empty iterator, create an empty list */
+    list = PyList_New(0);
+    if (list == NULL)
+        return NULL;
+    return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list);
+}
diff --git a/Objects/object.c b/Objects/object.c
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -1026,6 +1026,19 @@
     return obj;
 }
 
+/* Convenience function to get a builtin from its name */
+PyObject *
+_PyObject_GetBuiltin(const char *name)
+{
+    PyObject *mod, *attr;
+    mod = PyImport_ImportModule("builtins");
+    if (mod == NULL)
+        return NULL;
+    attr = PyObject_GetAttrString(mod, name);
+    Py_DECREF(mod);
+    return attr;
+}
+
 /* Helper used when the __next__ method is removed from a type:
    tp_iternext is never NULL and can be safely called without checking
    on every iteration.
diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c
--- a/Objects/rangeobject.c
+++ b/Objects/rangeobject.c
@@ -964,9 +964,59 @@
 PyDoc_STRVAR(length_hint_doc,
              "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+rangeiter_reduce(rangeiterobject *r)
+{
+    PyObject *start=NULL, *stop=NULL, *step=NULL;
+    PyObject *range;
+    
+    /* create a range object for pickling */
+    start = PyLong_FromLong(r->start);
+    if (start == NULL)
+        goto err;
+    stop = PyLong_FromLong(r->start + r->len * r->step);
+    if (stop == NULL)
+        goto err;
+    step = PyLong_FromLong(r->step);
+    if (step == NULL)
+        goto err;
+    range = (PyObject*)make_range_object(&PyRange_Type,
+                               start, stop, step);
+    if (range == NULL)
+        goto err;
+    /* return the result */
+    return Py_BuildValue("N(N)i", _PyObject_GetBuiltin("iter"), range, r->index);
+err:
+    Py_XDECREF(start);
+    Py_XDECREF(stop);
+    Py_XDECREF(step);
+    return NULL;
+}
+
+static PyObject *
+rangeiter_setstate(rangeiterobject *r, PyObject *state)
+{
+    long index = PyLong_AsLong(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (index < 0 || index >= r->len) {
+        PyErr_SetString(PyExc_ValueError, "index out of range");
+        return NULL;
+    }
+    r->index = index;
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
 static PyMethodDef rangeiter_methods[] = {
     {"__length_hint__", (PyCFunction)rangeiter_len, METH_NOARGS,
         length_hint_doc},
+    {"__reduce__", (PyCFunction)rangeiter_reduce, METH_NOARGS,
+        reduce_doc},
+    {"__setstate__", (PyCFunction)rangeiter_setstate, METH_O,
+        setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
@@ -1095,9 +1145,51 @@
     return PyNumber_Subtract(r->len, r->index);
 }
 
+static PyObject *
+longrangeiter_reduce(longrangeiterobject *r)
+{
+    PyObject *product, *stop=NULL;
+    PyObject *range;
+
+    /* create a range object for pickling.  Must calculate the "stop" value */
+    product = PyNumber_Multiply(r->len, r->step);
+    if (product == NULL)
+        return NULL;
+    stop = PyNumber_Add(r->start, product);
+    Py_DECREF(product);
+    if (stop ==  NULL)
+        return NULL;
+    Py_INCREF(r->start);
+    Py_INCREF(r->step);
+    range =  (PyObject*)make_range_object(&PyRange_Type,
+                               r->start, stop, r->step);
+    if (range == NULL) {
+        Py_DECREF(r->start);
+        Py_DECREF(stop);
+        Py_DECREF(r->step);
+        return NULL;
+    }
+
+    /* return the result */
+    return Py_BuildValue("N(N)O", _PyObject_GetBuiltin("iter"), range, r->index);
+}
+
+static PyObject *
+longrangeiter_setstate(longrangeiterobject *r, PyObject *state)
+{
+    Py_CLEAR(r->index);
+    r->index = state;
+    Py_INCREF(r->index);
+    Py_RETURN_NONE;
+}
+
 static PyMethodDef longrangeiter_methods[] = {
     {"__length_hint__", (PyCFunction)longrangeiter_len, METH_NOARGS,
         length_hint_doc},
+    {"__reduce__", (PyCFunction)longrangeiter_reduce, METH_NOARGS,
+        reduce_doc},
+    {"__setstate__", (PyCFunction)longrangeiter_setstate, METH_O,
+        setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
diff --git a/Objects/setobject.c b/Objects/setobject.c
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -819,8 +819,51 @@
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
+static PyObject *setiter_iternext(setiterobject *si);
+
+static PyObject *
+setiter_reduce(setiterobject *si)
+{
+    PyObject *list;
+    setiterobject tmp;
+
+    list = PyList_New(0);
+    if (!list)
+        return NULL;
+
+    /* copy the itertor state */
+    tmp = *si;
+    Py_XINCREF(tmp.si_set);
+    
+    /* iterate the temporary into a list */
+    for(;;) {
+        PyObject *element = setiter_iternext(&tmp);
+        if (element) {
+            if (PyList_Append(list, element)) {
+                Py_DECREF(element);
+                Py_DECREF(list);
+                Py_XDECREF(tmp.si_set);
+                return NULL;
+            }
+            Py_DECREF(element);
+        } else
+            break;
+    }
+    Py_XDECREF(tmp.si_set);
+    /* check for error */
+    if (tmp.si_set != NULL) {
+        /* we have an error */
+        Py_DECREF(list);
+        return NULL;
+    }
+    return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list);
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
 static PyMethodDef setiter_methods[] = {
     {"__length_hint__", (PyCFunction)setiter_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__", (PyCFunction)setiter_reduce, METH_NOARGS, reduce_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
@@ -1964,8 +2007,6 @@
     return result;
 }
 
-PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
-
 static PyObject *
 set_sizeof(PySetObject *so)
 {
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -967,8 +967,39 @@
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+tupleiter_reduce(tupleiterobject *it)
+{
+    if (it->it_seq)
+        return Py_BuildValue("N(O)l", _PyObject_GetBuiltin("iter"),
+                             it->it_seq, it->it_index);
+    else
+        return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
+}
+
+static PyObject *
+tupleiter_setstate(tupleiterobject *it, PyObject *state)
+{
+    long index = PyLong_AsLong(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (it->it_seq != NULL) {
+        if (index < 0)
+            index = 0;
+        else if (it->it_seq != NULL && index > PyTuple_GET_SIZE(it->it_seq))
+            index = PyTuple_GET_SIZE(it->it_seq);
+        it->it_index = index;
+    }
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
 static PyMethodDef tupleiter_methods[] = {
     {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
+    {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
+    {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -5393,9 +5393,11 @@
 #if (SIZEOF_LONG == 8)
 # define FAST_CHAR_MASK         0x8000800080008000L
 # define SWAPPED_FAST_CHAR_MASK 0x0080008000800080L
+# define STRIPPED_MASK          0x00FF00FF00FF00FFL
 #elif (SIZEOF_LONG == 4)
 # define FAST_CHAR_MASK         0x80008000L
 # define SWAPPED_FAST_CHAR_MASK 0x00800080L
+# define STRIPPED_MASK          0x00FF00FFL
 #else
 # error C 'long' size should be either 4 or 8!
 #endif
@@ -5497,7 +5499,6 @@
             void *data = PyUnicode_DATA(unicode);
             while (_q < aligned_end) {
                 unsigned long block = * (unsigned long *) _q;
-                unsigned short *pblock = (unsigned short*)&block;
                 Py_UCS4 maxch;
                 if (native_ordering) {
                     /* Can use buffer directly */
@@ -5506,23 +5507,22 @@
                 }
                 else {
                     /* Need to byte-swap */
-                    unsigned char *_p = (unsigned char*)pblock;
                     if (block & SWAPPED_FAST_CHAR_MASK)
                         break;
-                    _p[0] = _q[1];
-                    _p[1] = _q[0];
-                    _p[2] = _q[3];
-                    _p[3] = _q[2];
-#if (SIZEOF_LONG == 8)
-                    _p[4] = _q[5];
-                    _p[5] = _q[4];
-                    _p[6] = _q[7];
-                    _p[7] = _q[6];
-#endif
-                }
-                maxch = Py_MAX(pblock[0], pblock[1]);
+                    block = ((block >> 8) & STRIPPED_MASK) |
+                            ((block & STRIPPED_MASK) << 8);
+                }
+                maxch = (Py_UCS2)(block & 0xFFFF);
 #if SIZEOF_LONG == 8
-                maxch = Py_MAX(maxch, Py_MAX(pblock[2], pblock[3]));
+                ch = (Py_UCS2)((block >> 16) & 0xFFFF);
+                maxch = Py_MAX(maxch, ch);
+                ch = (Py_UCS2)((block >> 32) & 0xFFFF);
+                maxch = Py_MAX(maxch, ch);
+                ch = (Py_UCS2)(block >> 48);
+                maxch = Py_MAX(maxch, ch);
+#else
+                ch = (Py_UCS2)(block >> 16);
+                maxch = Py_MAX(maxch, ch);
 #endif
                 if (maxch > PyUnicode_MAX_CHAR_VALUE(unicode)) {
                     if (unicode_widen(&unicode, maxch) < 0)
@@ -5530,11 +5530,24 @@
                     kind = PyUnicode_KIND(unicode);
                     data = PyUnicode_DATA(unicode);
                 }
-                PyUnicode_WRITE(kind, data, outpos++, pblock[0]);
-                PyUnicode_WRITE(kind, data, outpos++, pblock[1]);
+#ifdef BYTEORDER_IS_LITTLE_ENDIAN
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block & 0xFFFF));
 #if SIZEOF_LONG == 8
-                PyUnicode_WRITE(kind, data, outpos++, pblock[2]);
-                PyUnicode_WRITE(kind, data, outpos++, pblock[3]);
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 16) & 0xFFFF));
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 32) & 0xFFFF));
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 48)));
+#else
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block >> 16));
+#endif
+#else
+#if SIZEOF_LONG == 8
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 48)));
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 32) & 0xFFFF));
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 16) & 0xFFFF));
+#else
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block >> 16));
+#endif
+                PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block & 0xFFFF));
 #endif
                 _q += SIZEOF_LONG;
             }
@@ -14382,9 +14395,43 @@
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
+static PyObject *
+unicodeiter_reduce(unicodeiterobject *it)
+{
+    if (it->it_seq != NULL) {
+        return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
+                             it->it_seq, it->it_index);
+    } else {
+        PyObject *u = PyUnicode_FromUnicode(NULL, 0);
+        if (u == NULL)
+            return NULL;
+        return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u);
+    }
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
+static PyObject *
+unicodeiter_setstate(unicodeiterobject *it, PyObject *state)
+{
+    Py_ssize_t index = PyLong_AsSsize_t(state);
+    if (index == -1 && PyErr_Occurred())
+        return NULL;
+    if (index < 0)
+        index = 0;
+    it->it_index = index;
+    Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
+
 static PyMethodDef unicodeiter_methods[] = {
     {"__length_hint__", (PyCFunction)unicodeiter_len, METH_NOARGS,
      length_hint_doc},
+    {"__reduce__",      (PyCFunction)unicodeiter_reduce, METH_NOARGS,
+     reduce_doc},
+    {"__setstate__",    (PyCFunction)unicodeiter_setstate, METH_O,
+     setstate_doc},
     {NULL,      NULL}       /* sentinel */
 };
 
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -438,6 +438,19 @@
     }
 }
 
+static PyObject *
+filter_reduce(filterobject *lz)
+{
+    return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it);
+}
+
+PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
+
+static PyMethodDef filter_methods[] = {
+    {"__reduce__",   (PyCFunction)filter_reduce,   METH_NOARGS, reduce_doc},
+    {NULL,           NULL}           /* sentinel */
+};
+
 PyDoc_STRVAR(filter_doc,
 "filter(function or None, iterable) --> filter object\n\
 \n\
@@ -474,7 +487,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)filter_next,          /* tp_iternext */
-    0,                                  /* tp_methods */
+    filter_methods,                     /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -1054,6 +1067,31 @@
     return result;
 }
 
+static PyObject *
+map_reduce(mapobject *lz)
+{
+    Py_ssize_t numargs = PyTuple_GET_SIZE(lz->iters);
+    PyObject *args = PyTuple_New(numargs+1);
+    Py_ssize_t i;
+    if (args == NULL)
+        return NULL;
+    Py_INCREF(lz->func);
+    PyTuple_SET_ITEM(args, 0, lz->func);
+    for (i = 0; i<numargs; i++){
+        PyObject *it = PyTuple_GET_ITEM(lz->iters, i);
+        Py_INCREF(it);
+        PyTuple_SET_ITEM(args, i+1, it);
+    }
+
+    return Py_BuildValue("ON", Py_TYPE(lz), args);
+}
+
+static PyMethodDef map_methods[] = {
+    {"__reduce__",   (PyCFunction)map_reduce,   METH_NOARGS, reduce_doc},
+    {NULL,           NULL}           /* sentinel */
+};
+
+
 PyDoc_STRVAR(map_doc,
 "map(func, *iterables) --> map object\n\
 \n\
@@ -1090,7 +1128,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)map_next,     /* tp_iternext */
-    0,                                  /* tp_methods */
+    map_methods,                        /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
@@ -2238,6 +2276,18 @@
     return result;
 }
 
+static PyObject *
+zip_reduce(zipobject *lz)
+{
+    /* Just recreate the zip with the internal iterator tuple */
+    return Py_BuildValue("OO", Py_TYPE(lz), lz->ittuple);
+}
+
+static PyMethodDef zip_methods[] = {
+    {"__reduce__",   (PyCFunction)zip_reduce,   METH_NOARGS, reduce_doc},
+    {NULL,           NULL}           /* sentinel */
+};
+
 PyDoc_STRVAR(zip_doc,
 "zip(iter1 [,iter2 [...]]) --> zip object\n\
 \n\
@@ -2276,7 +2326,7 @@
     0,                                  /* tp_weaklistoffset */
     PyObject_SelfIter,                  /* tp_iter */
     (iternextfunc)zip_next,     /* tp_iternext */
-    0,                                  /* tp_methods */
+    zip_methods,                        /* tp_methods */
     0,                                  /* tp_members */
     0,                                  /* tp_getset */
     0,                                  /* tp_base */
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -1356,56 +1356,67 @@
     _Py_IDENTIFIER(offset);
     _Py_IDENTIFIER(text);
 
+    *message = NULL;
+
     /* new style errors.  `err' is an instance */
-
-    if (! (v = _PyObject_GetAttrId(err, &PyId_msg)))
-        goto finally;
-    *message = v;
-
-    if (!(v = _PyObject_GetAttrId(err, &PyId_filename)))
-        goto finally;
-    if (v == Py_None)
-        *filename = NULL;
-    else if (! (*filename = _PyUnicode_AsString(v)))
+    *message = _PyObject_GetAttrId(err, &PyId_msg);
+    if (!*message)
         goto finally;
 
-    Py_DECREF(v);
-    if (!(v = _PyObject_GetAttrId(err, &PyId_lineno)))
+    v = _PyObject_GetAttrId(err, &PyId_filename);
+    if (!v)
+        goto finally;
+    if (v == Py_None) {
+        Py_DECREF(v);
+        *filename = NULL;
+    }
+    else {
+        *filename = _PyUnicode_AsString(v);
+        Py_DECREF(v);
+        if (!*filename)
+            goto finally;
+    }
+
+    v = _PyObject_GetAttrId(err, &PyId_lineno);
+    if (!v)
         goto finally;
     hold = PyLong_AsLong(v);
     Py_DECREF(v);
-    v = NULL;
     if (hold < 0 && PyErr_Occurred())
         goto finally;
     *lineno = (int)hold;
 
-    if (!(v = _PyObject_GetAttrId(err, &PyId_offset)))
+    v = _PyObject_GetAttrId(err, &PyId_offset);
+    if (!v)
         goto finally;
     if (v == Py_None) {
         *offset = -1;
         Py_DECREF(v);
-        v = NULL;
     } else {
         hold = PyLong_AsLong(v);
         Py_DECREF(v);
-        v = NULL;
         if (hold < 0 && PyErr_Occurred())
             goto finally;
         *offset = (int)hold;
     }
 
-    if (!(v = _PyObject_GetAttrId(err, &PyId_text)))
+    v = _PyObject_GetAttrId(err, &PyId_text);
+    if (!v)
         goto finally;
-    if (v == Py_None)
+    if (v == Py_None) {
+        Py_DECREF(v);
         *text = NULL;
-    else if (!PyUnicode_Check(v) ||
-             !(*text = _PyUnicode_AsString(v)))
-        goto finally;
-    Py_DECREF(v);
+    }
+    else {
+        *text = _PyUnicode_AsString(v);
+        Py_DECREF(v);
+        if (!*text)
+            goto finally;
+    }
     return 1;
 
 finally:
-    Py_XDECREF(v);
+    Py_XDECREF(*message);
     return 0;
 }
 
diff --git a/Tools/scripts/abitype.py b/Tools/scripts/abitype.py
--- a/Tools/scripts/abitype.py
+++ b/Tools/scripts/abitype.py
@@ -3,34 +3,6 @@
 # Usage: abitype.py < old_code > new_code
 import re, sys
 
-############ Simplistic C scanner ##################################
-tokenizer = re.compile(
-    r"(?P<preproc>#.*\n)"
-    r"|(?P<comment>/\*.*?\*/)"
-    r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
-    r"|(?P<ws>[ \t\n]+)"
-    r"|(?P<other>.)",
-    re.MULTILINE)
-
-tokens = []
-source = sys.stdin.read()
-pos = 0
-while pos != len(source):
-    m = tokenizer.match(source, pos)
-    tokens.append([m.lastgroup, m.group()])
-    pos += len(tokens[-1][1])
-    if tokens[-1][0] == 'preproc':
-        # continuation lines are considered
-        # only in preprocess statements
-        while tokens[-1][1].endswith('\\\n'):
-            nl = source.find('\n', pos)
-            if nl == -1:
-                line = source[pos:]
-            else:
-                line = source[pos:nl+1]
-            tokens[-1][1] += line
-            pos += len(line)
-
 ###### Replacement of PyTypeObject static instances ##############
 
 # classify each token, giving it a one-letter code:
@@ -79,7 +51,7 @@
     while tokens[pos][0] in ('ws', 'comment'):
         pos += 1
     if tokens[pos][1] != 'PyVarObject_HEAD_INIT':
-        raise Exception, '%s has no PyVarObject_HEAD_INIT' % name
+        raise Exception('%s has no PyVarObject_HEAD_INIT' % name)
     while tokens[pos][1] != ')':
         pos += 1
     pos += 1
@@ -183,18 +155,48 @@
     return '\n'.join(res)
 
 
-# Main loop: replace all static PyTypeObjects until
-# there are none left.
-while 1:
-    c = classify()
-    m = re.search('(SW)?TWIW?=W?{.*?};', c)
-    if not m:
-        break
-    start = m.start()
-    end = m.end()
-    name, fields = get_fields(start, m)
-    tokens[start:end] = [('',make_slots(name, fields))]
+if __name__ == '__main__':
 
-# Output result to stdout
-for t, v in tokens:
-    sys.stdout.write(v)
+    ############ Simplistic C scanner ##################################
+    tokenizer = re.compile(
+        r"(?P<preproc>#.*\n)"
+        r"|(?P<comment>/\*.*?\*/)"
+        r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
+        r"|(?P<ws>[ \t\n]+)"
+        r"|(?P<other>.)",
+        re.MULTILINE)
+
+    tokens = []
+    source = sys.stdin.read()
+    pos = 0
+    while pos != len(source):
+        m = tokenizer.match(source, pos)
+        tokens.append([m.lastgroup, m.group()])
+        pos += len(tokens[-1][1])
+        if tokens[-1][0] == 'preproc':
+            # continuation lines are considered
+            # only in preprocess statements
+            while tokens[-1][1].endswith('\\\n'):
+                nl = source.find('\n', pos)
+                if nl == -1:
+                    line = source[pos:]
+                else:
+                    line = source[pos:nl+1]
+                tokens[-1][1] += line
+                pos += len(line)
+
+    # Main loop: replace all static PyTypeObjects until
+    # there are none left.
+    while 1:
+        c = classify()
+        m = re.search('(SW)?TWIW?=W?{.*?};', c)
+        if not m:
+            break
+        start = m.start()
+        end = m.end()
+        name, fields = get_fields(start, m)
+        tokens[start:end] = [('',make_slots(name, fields))]
+
+    # Output result to stdout
+    for t, v in tokens:
+        sys.stdout.write(v)
diff --git a/Tools/scripts/find_recursionlimit.py b/Tools/scripts/find_recursionlimit.py
--- a/Tools/scripts/find_recursionlimit.py
+++ b/Tools/scripts/find_recursionlimit.py
@@ -106,14 +106,16 @@
     else:
         print("Yikes!")
 
-limit = 1000
-while 1:
-    check_limit(limit, "test_recurse")
-    check_limit(limit, "test_add")
-    check_limit(limit, "test_repr")
-    check_limit(limit, "test_init")
-    check_limit(limit, "test_getattr")
-    check_limit(limit, "test_getitem")
-    check_limit(limit, "test_cpickle")
-    print("Limit of %d is fine" % limit)
-    limit = limit + 100
+if __name__ == '__main__':
+
+    limit = 1000
+    while 1:
+        check_limit(limit, "test_recurse")
+        check_limit(limit, "test_add")
+        check_limit(limit, "test_repr")
+        check_limit(limit, "test_init")
+        check_limit(limit, "test_getattr")
+        check_limit(limit, "test_getitem")
+        check_limit(limit, "test_cpickle")
+        print("Limit of %d is fine" % limit)
+        limit = limit + 100
diff --git a/Tools/scripts/findnocoding.py b/Tools/scripts/findnocoding.py
--- a/Tools/scripts/findnocoding.py
+++ b/Tools/scripts/findnocoding.py
@@ -76,29 +76,31 @@
     -c: recognize Python source files trying to compile them
     -d: debug output""" % sys.argv[0]
 
-try:
-    opts, args = getopt.getopt(sys.argv[1:], 'cd')
-except getopt.error as msg:
-    print(msg, file=sys.stderr)
-    print(usage, file=sys.stderr)
-    sys.exit(1)
+if __name__ == '__main__':
 
-is_python = pysource.looks_like_python
-debug = False
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'cd')
+    except getopt.error as msg:
+        print(msg, file=sys.stderr)
+        print(usage, file=sys.stderr)
+        sys.exit(1)
 
-for o, a in opts:
-    if o == '-c':
-        is_python = pysource.can_be_compiled
-    elif o == '-d':
-        debug = True
+    is_python = pysource.looks_like_python
+    debug = False
 
-if not args:
-    print(usage, file=sys.stderr)
-    sys.exit(1)
+    for o, a in opts:
+        if o == '-c':
+            is_python = pysource.can_be_compiled
+        elif o == '-d':
+            debug = True
 
-for fullpath in pysource.walk_python_files(args, is_python):
-    if debug:
-        print("Testing for coding: %s" % fullpath)
-    result = needs_declaration(fullpath)
-    if result:
-        print(fullpath)
+    if not args:
+        print(usage, file=sys.stderr)
+        sys.exit(1)
+
+    for fullpath in pysource.walk_python_files(args, is_python):
+        if debug:
+            print("Testing for coding: %s" % fullpath)
+        result = needs_declaration(fullpath)
+        if result:
+            print(fullpath)
diff --git a/Tools/scripts/fixcid.py b/Tools/scripts/fixcid.py
--- a/Tools/scripts/fixcid.py
+++ b/Tools/scripts/fixcid.py
@@ -292,7 +292,7 @@
         if not words: continue
         if len(words) == 3 and words[0] == 'struct':
             words[:2] = [words[0] + ' ' + words[1]]
-        elif len(words) <> 2:
+        elif len(words) != 2:
             err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line))
             continue
         if Reverse:
diff --git a/Tools/scripts/md5sum.py b/Tools/scripts/md5sum.py
--- a/Tools/scripts/md5sum.py
+++ b/Tools/scripts/md5sum.py
@@ -20,7 +20,7 @@
 import sys
 import os
 import getopt
-import md5
+from hashlib import md5
 
 def sum(*files):
     sts = 0
diff --git a/Tools/scripts/parseentities.py b/Tools/scripts/parseentities.py
--- a/Tools/scripts/parseentities.py
+++ b/Tools/scripts/parseentities.py
@@ -13,7 +13,6 @@
 
 """
 import re,sys
-import TextTools
 
 entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->')
 
@@ -45,7 +44,7 @@
                 charcode = repr(charcode)
         else:
             charcode = repr(charcode)
-        comment = TextTools.collapse(comment)
+        comment = ' '.join(comment.split())
         f.write("    '%s':\t%s,  \t# %s\n" % (name,charcode,comment))
     f.write('\n}\n')
 
diff --git a/Tools/scripts/pdeps.py b/Tools/scripts/pdeps.py
--- a/Tools/scripts/pdeps.py
+++ b/Tools/scripts/pdeps.py
@@ -76,10 +76,9 @@
             nextline = fp.readline()
             if not nextline: break
             line = line[:-1] + nextline
-        if m_import.match(line) >= 0:
-            (a, b), (a1, b1) = m_import.regs[:2]
-        elif m_from.match(line) >= 0:
-            (a, b), (a1, b1) = m_from.regs[:2]
+        m_found = m_import.match(line) or m_from.match(line)
+        if m_found:
+            (a, b), (a1, b1) = m_found.regs[:2]
         else: continue
         words = line[a1:b1].split(',')
         # print '#', line, words
@@ -87,6 +86,7 @@
             word = word.strip()
             if word not in list:
                 list.append(word)
+    fp.close()
 
 
 # Compute closure (this is in fact totally general)
@@ -123,7 +123,7 @@
 def inverse(table):
     inv = {}
     for key in table.keys():
-        if not inv.has_key(key):
+        if key not in inv:
             inv[key] = []
         for item in table[key]:
             store(inv, item, key)

-- 
Repository URL: http://hg.python.org/cpython


More information about the Python-checkins mailing list