[pypy-commit] pypy vendor/stdlib: update python 2 stdlib to tip (2.7.3+)

bdkearns noreply at buildbot.pypy.org
Tue Jan 29 13:14:31 CET 2013


Author: Brian Kearns <bdkearns at gmail.com>
Branch: vendor/stdlib
Changeset: r60666:8a3b5bfd266e
Date: 2013-01-29 02:32 -0500
http://bitbucket.org/pypy/pypy/changeset/8a3b5bfd266e/

Log:	update python 2 stdlib to tip (2.7.3+)

diff too long, truncating to 2000 out of 17105 lines

diff --git a/lib-python/2.7/BaseHTTPServer.py b/lib-python/2.7/BaseHTTPServer.py
--- a/lib-python/2.7/BaseHTTPServer.py
+++ b/lib-python/2.7/BaseHTTPServer.py
@@ -447,13 +447,13 @@
         specified as subsequent arguments (it's just like
         printf!).
 
-        The client host and current date/time are prefixed to
-        every message.
+        The client ip address and current date/time are prefixed to every
+        message.
 
         """
 
         sys.stderr.write("%s - - [%s] %s\n" %
-                         (self.address_string(),
+                         (self.client_address[0],
                           self.log_date_time_string(),
                           format%args))
 
diff --git a/lib-python/2.7/CGIHTTPServer.py b/lib-python/2.7/CGIHTTPServer.py
--- a/lib-python/2.7/CGIHTTPServer.py
+++ b/lib-python/2.7/CGIHTTPServer.py
@@ -84,9 +84,11 @@
         path begins with one of the strings in self.cgi_directories
         (and the next character is a '/' or the end of the string).
         """
-        splitpath = _url_collapse_path_split(self.path)
-        if splitpath[0] in self.cgi_directories:
-            self.cgi_info = splitpath
+        collapsed_path = _url_collapse_path(self.path)
+        dir_sep = collapsed_path.find('/', 1)
+        head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
+        if head in self.cgi_directories:
+            self.cgi_info = head, tail
             return True
         return False
 
@@ -298,44 +300,46 @@
                 self.log_message("CGI script exited OK")
 
 
-# TODO(gregory.p.smith): Move this into an appropriate library.
-def _url_collapse_path_split(path):
+def _url_collapse_path(path):
     """
     Given a URL path, remove extra '/'s and '.' path elements and collapse
-    any '..' references.
+    any '..' references and returns a colllapsed path.
 
     Implements something akin to RFC-2396 5.2 step 6 to parse relative paths.
+    The utility of this function is limited to is_cgi method and helps
+    preventing some security attacks.
 
     Returns: A tuple of (head, tail) where tail is everything after the final /
     and head is everything before it.  Head will always start with a '/' and,
     if it contains anything else, never have a trailing '/'.
 
     Raises: IndexError if too many '..' occur within the path.
+
     """
     # Similar to os.path.split(os.path.normpath(path)) but specific to URL
     # path semantics rather than local operating system semantics.
-    path_parts = []
-    for part in path.split('/'):
-        if part == '.':
-            path_parts.append('')
-        else:
-            path_parts.append(part)
-    # Filter out blank non trailing parts before consuming the '..'.
-    path_parts = [part for part in path_parts[:-1] if part] + path_parts[-1:]
+    path_parts = path.split('/')
+    head_parts = []
+    for part in path_parts[:-1]:
+        if part == '..':
+            head_parts.pop() # IndexError if more '..' than prior parts
+        elif part and part != '.':
+            head_parts.append( part )
     if path_parts:
         tail_part = path_parts.pop()
+        if tail_part:
+            if tail_part == '..':
+                head_parts.pop()
+                tail_part = ''
+            elif tail_part == '.':
+                tail_part = ''
     else:
         tail_part = ''
-    head_parts = []
-    for part in path_parts:
-        if part == '..':
-            head_parts.pop()
-        else:
-            head_parts.append(part)
-    if tail_part and tail_part == '..':
-        head_parts.pop()
-        tail_part = ''
-    return ('/' + '/'.join(head_parts), tail_part)
+
+    splitpath = ('/' + '/'.join(head_parts), tail_part)
+    collapsed_path = "/".join(splitpath)
+
+    return collapsed_path
 
 
 nobody = None
diff --git a/lib-python/2.7/Cookie.py b/lib-python/2.7/Cookie.py
--- a/lib-python/2.7/Cookie.py
+++ b/lib-python/2.7/Cookie.py
@@ -390,7 +390,7 @@
     from time import gmtime, time
     now = time()
     year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
-    return "%s, %02d-%3s-%4d %02d:%02d:%02d GMT" % \
+    return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \
            (weekdayname[wd], day, monthname[month], year, hh, mm, ss)
 
 
@@ -539,7 +539,7 @@
     r"(?P<val>"                   # Start of group 'val'
     r'"(?:[^\\"]|\\.)*"'            # Any doublequoted string
     r"|"                            # or
-    r"\w{3},\s[\w\d-]{9,11}\s[\d:]{8}\sGMT" # Special case for "expires" attr
+    r"\w{3},\s[\s\w\d-]{9,11}\s[\d:]{8}\sGMT" # Special case for "expires" attr
     r"|"                            # or
     ""+ _LegalCharsPatt +"*"        # Any word or empty string
     r")"                          # End of group 'val'
diff --git a/lib-python/2.7/HTMLParser.py b/lib-python/2.7/HTMLParser.py
--- a/lib-python/2.7/HTMLParser.py
+++ b/lib-python/2.7/HTMLParser.py
@@ -22,13 +22,13 @@
 starttagopen = re.compile('<[a-zA-Z]')
 piclose = re.compile('>')
 commentclose = re.compile(r'--\s*>')
-tagfind = re.compile('[a-zA-Z][-.a-zA-Z0-9:_]*')
+tagfind = re.compile('([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*')
 # see http://www.w3.org/TR/html5/tokenization.html#tag-open-state
 # and http://www.w3.org/TR/html5/tokenization.html#tag-name-state
 tagfind_tolerant = re.compile('[a-zA-Z][^\t\n\r\f />\x00]*')
 
 attrfind = re.compile(
-    r'[\s/]*((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*'
+    r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*'
     r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*')
 
 locatestarttagend = re.compile(r"""
@@ -289,7 +289,7 @@
         match = tagfind.match(rawdata, i+1)
         assert match, 'unexpected call to parse_starttag()'
         k = match.end()
-        self.lasttag = tag = rawdata[i+1:k].lower()
+        self.lasttag = tag = match.group(1).lower()
 
         while k < endpos:
             m = attrfind.match(rawdata, k)
diff --git a/lib-python/2.7/SimpleXMLRPCServer.py b/lib-python/2.7/SimpleXMLRPCServer.py
--- a/lib-python/2.7/SimpleXMLRPCServer.py
+++ b/lib-python/2.7/SimpleXMLRPCServer.py
@@ -1,4 +1,4 @@
-"""Simple XML-RPC Server.
+r"""Simple XML-RPC Server.
 
 This module can be used to create simple XML-RPC servers
 by creating a server and either installing functions, a
diff --git a/lib-python/2.7/SocketServer.py b/lib-python/2.7/SocketServer.py
--- a/lib-python/2.7/SocketServer.py
+++ b/lib-python/2.7/SocketServer.py
@@ -133,6 +133,7 @@
 import select
 import sys
 import os
+import errno
 try:
     import threading
 except ImportError:
@@ -147,6 +148,15 @@
                     "ThreadingUnixStreamServer",
                     "ThreadingUnixDatagramServer"])
 
+def _eintr_retry(func, *args):
+    """restart a system call interrupted by EINTR"""
+    while True:
+        try:
+            return func(*args)
+        except (OSError, select.error) as e:
+            if e.args[0] != errno.EINTR:
+                raise
+
 class BaseServer:
 
     """Base class for server classes.
@@ -222,7 +232,8 @@
                 # connecting to the socket to wake this up instead of
                 # polling. Polling reduces our responsiveness to a
                 # shutdown request and wastes cpu at all other times.
-                r, w, e = select.select([self], [], [], poll_interval)
+                r, w, e = _eintr_retry(select.select, [self], [], [],
+                                       poll_interval)
                 if self in r:
                     self._handle_request_noblock()
         finally:
@@ -262,7 +273,7 @@
             timeout = self.timeout
         elif self.timeout is not None:
             timeout = min(timeout, self.timeout)
-        fd_sets = select.select([self], [], [], timeout)
+        fd_sets = _eintr_retry(select.select, [self], [], [], timeout)
         if not fd_sets[0]:
             self.handle_timeout()
             return
@@ -690,7 +701,12 @@
 
     def finish(self):
         if not self.wfile.closed:
-            self.wfile.flush()
+            try:
+                self.wfile.flush()
+            except socket.error:
+                # An final socket error may have occurred here, such as
+                # the local error ECONNABORTED.
+                pass
         self.wfile.close()
         self.rfile.close()
 
diff --git a/lib-python/2.7/_LWPCookieJar.py b/lib-python/2.7/_LWPCookieJar.py
--- a/lib-python/2.7/_LWPCookieJar.py
+++ b/lib-python/2.7/_LWPCookieJar.py
@@ -48,7 +48,7 @@
 
 class LWPCookieJar(FileCookieJar):
     """
-    The LWPCookieJar saves a sequence of"Set-Cookie3" lines.
+    The LWPCookieJar saves a sequence of "Set-Cookie3" lines.
     "Set-Cookie3" is the format used by the libwww-perl libary, not known
     to be compatible with any browser, but which is easy to read and
     doesn't lose information about RFC 2965 cookies.
@@ -60,7 +60,7 @@
     """
 
     def as_lwp_str(self, ignore_discard=True, ignore_expires=True):
-        """Return cookies as a string of "\n"-separated "Set-Cookie3" headers.
+        """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers.
 
         ignore_discard and ignore_expires: see docstring for FileCookieJar.save
 
diff --git a/lib-python/2.7/__future__.py b/lib-python/2.7/__future__.py
--- a/lib-python/2.7/__future__.py
+++ b/lib-python/2.7/__future__.py
@@ -112,7 +112,7 @@
                     CO_FUTURE_DIVISION)
 
 absolute_import = _Feature((2, 5, 0, "alpha", 1),
-                           (2, 7, 0, "alpha", 0),
+                           (3, 0, 0, "alpha", 0),
                            CO_FUTURE_ABSOLUTE_IMPORT)
 
 with_statement = _Feature((2, 5, 0, "alpha", 1),
diff --git a/lib-python/2.7/_pyio.py b/lib-python/2.7/_pyio.py
--- a/lib-python/2.7/_pyio.py
+++ b/lib-python/2.7/_pyio.py
@@ -340,8 +340,10 @@
         This method has no effect if the file is already closed.
         """
         if not self.__closed:
-            self.flush()
-            self.__closed = True
+            try:
+                self.flush()
+            finally:
+                self.__closed = True
 
     def __del__(self):
         """Destructor.  Calls close()."""
@@ -883,12 +885,18 @@
         return pos
 
     def readable(self):
+        if self.closed:
+            raise ValueError("I/O operation on closed file.")
         return True
 
     def writable(self):
+        if self.closed:
+            raise ValueError("I/O operation on closed file.")
         return True
 
     def seekable(self):
+        if self.closed:
+            raise ValueError("I/O operation on closed file.")
         return True
 
 
@@ -1451,7 +1459,7 @@
     enabled.  With this enabled, on input, the lines endings '\n', '\r',
     or '\r\n' are translated to '\n' before being returned to the
     caller. Conversely, on output, '\n' is translated to the system
-    default line seperator, os.linesep. If newline is any other of its
+    default line separator, os.linesep. If newline is any other of its
     legal values, that newline becomes the newline when the file is read
     and it is returned untranslated. On output, '\n' is converted to the
     newline.
@@ -1546,6 +1554,8 @@
         return self._buffer
 
     def seekable(self):
+        if self.closed:
+            raise ValueError("I/O operation on closed file.")
         return self._seekable
 
     def readable(self):
@@ -1560,8 +1570,10 @@
 
     def close(self):
         if self.buffer is not None and not self.closed:
-            self.flush()
-            self.buffer.close()
+            try:
+                self.flush()
+            finally:
+                self.buffer.close()
 
     @property
     def closed(self):
diff --git a/lib-python/2.7/_strptime.py b/lib-python/2.7/_strptime.py
--- a/lib-python/2.7/_strptime.py
+++ b/lib-python/2.7/_strptime.py
@@ -326,7 +326,8 @@
     if len(data_string) != found.end():
         raise ValueError("unconverted data remains: %s" %
                           data_string[found.end():])
-    year = 1900
+
+    year = None
     month = day = 1
     hour = minute = second = fraction = 0
     tz = -1
@@ -425,6 +426,12 @@
                     else:
                         tz = value
                         break
+    leap_year_fix = False
+    if year is None and month == 2 and day == 29:
+        year = 1904  # 1904 is first leap year of 20th century
+        leap_year_fix = True
+    elif year is None:
+        year = 1900
     # If we know the week of the year and what day of that week, we can figure
     # out the Julian day of the year.
     if julian == -1 and week_of_year != -1 and weekday != -1:
@@ -446,6 +453,12 @@
         day = datetime_result.day
     if weekday == -1:
         weekday = datetime_date(year, month, day).weekday()
+    if leap_year_fix:
+        # the caller didn't supply a year but asked for Feb 29th. We couldn't
+        # use the default of 1900 for computations. We set it back to ensure
+        # that February 29th is smaller than March 1st.
+        year = 1900
+
     return (time.struct_time((year, month, day,
                               hour, minute, second,
                               weekday, julian, tz)), fraction)
diff --git a/lib-python/2.7/_weakrefset.py b/lib-python/2.7/_weakrefset.py
--- a/lib-python/2.7/_weakrefset.py
+++ b/lib-python/2.7/_weakrefset.py
@@ -63,7 +63,7 @@
                     yield item
 
     def __len__(self):
-        return sum(x() is not None for x in self.data)
+        return len(self.data) - len(self._pending_removals)
 
     def __contains__(self, item):
         try:
@@ -116,36 +116,21 @@
     def update(self, other):
         if self._pending_removals:
             self._commit_removals()
-        if isinstance(other, self.__class__):
-            self.data.update(other.data)
-        else:
-            for element in other:
-                self.add(element)
+        for element in other:
+            self.add(element)
 
     def __ior__(self, other):
         self.update(other)
         return self
 
-    # Helper functions for simple delegating methods.
-    def _apply(self, other, method):
-        if not isinstance(other, self.__class__):
-            other = self.__class__(other)
-        newdata = method(other.data)
-        newset = self.__class__()
-        newset.data = newdata
+    def difference(self, other):
+        newset = self.copy()
+        newset.difference_update(other)
         return newset
-
-    def difference(self, other):
-        return self._apply(other, self.data.difference)
     __sub__ = difference
 
     def difference_update(self, other):
-        if self._pending_removals:
-            self._commit_removals()
-        if self is other:
-            self.data.clear()
-        else:
-            self.data.difference_update(ref(item) for item in other)
+        self.__isub__(other)
     def __isub__(self, other):
         if self._pending_removals:
             self._commit_removals()
@@ -156,13 +141,11 @@
         return self
 
     def intersection(self, other):
-        return self._apply(other, self.data.intersection)
+        return self.__class__(item for item in other if item in self)
     __and__ = intersection
 
     def intersection_update(self, other):
-        if self._pending_removals:
-            self._commit_removals()
-        self.data.intersection_update(ref(item) for item in other)
+        self.__iand__(other)
     def __iand__(self, other):
         if self._pending_removals:
             self._commit_removals()
@@ -171,17 +154,17 @@
 
     def issubset(self, other):
         return self.data.issubset(ref(item) for item in other)
-    __lt__ = issubset
+    __le__ = issubset
 
-    def __le__(self, other):
-        return self.data <= set(ref(item) for item in other)
+    def __lt__(self, other):
+        return self.data < set(ref(item) for item in other)
 
     def issuperset(self, other):
         return self.data.issuperset(ref(item) for item in other)
-    __gt__ = issuperset
+    __ge__ = issuperset
 
-    def __ge__(self, other):
-        return self.data >= set(ref(item) for item in other)
+    def __gt__(self, other):
+        return self.data > set(ref(item) for item in other)
 
     def __eq__(self, other):
         if not isinstance(other, self.__class__):
@@ -189,27 +172,24 @@
         return self.data == set(ref(item) for item in other)
 
     def symmetric_difference(self, other):
-        return self._apply(other, self.data.symmetric_difference)
+        newset = self.copy()
+        newset.symmetric_difference_update(other)
+        return newset
     __xor__ = symmetric_difference
 
     def symmetric_difference_update(self, other):
-        if self._pending_removals:
-            self._commit_removals()
-        if self is other:
-            self.data.clear()
-        else:
-            self.data.symmetric_difference_update(ref(item) for item in other)
+        self.__ixor__(other)
     def __ixor__(self, other):
         if self._pending_removals:
             self._commit_removals()
         if self is other:
             self.data.clear()
         else:
-            self.data.symmetric_difference_update(ref(item) for item in other)
+            self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
         return self
 
     def union(self, other):
-        return self._apply(other, self.data.union)
+        return self.__class__(e for s in (self, other) for e in s)
     __or__ = union
 
     def isdisjoint(self, other):
diff --git a/lib-python/2.7/aifc.py b/lib-python/2.7/aifc.py
--- a/lib-python/2.7/aifc.py
+++ b/lib-python/2.7/aifc.py
@@ -732,22 +732,28 @@
             self._patchheader()
 
     def close(self):
-        self._ensure_header_written(0)
-        if self._datawritten & 1:
-            # quick pad to even size
-            self._file.write(chr(0))
-            self._datawritten = self._datawritten + 1
-        self._writemarkers()
-        if self._nframeswritten != self._nframes or \
-              self._datalength != self._datawritten or \
-              self._marklength:
-            self._patchheader()
-        if self._comp:
-            self._comp.CloseCompressor()
-            self._comp = None
-        # Prevent ref cycles
-        self._convert = None
-        self._file.close()
+        if self._file is None:
+            return
+        try:
+            self._ensure_header_written(0)
+            if self._datawritten & 1:
+                # quick pad to even size
+                self._file.write(chr(0))
+                self._datawritten = self._datawritten + 1
+            self._writemarkers()
+            if self._nframeswritten != self._nframes or \
+                  self._datalength != self._datawritten or \
+                  self._marklength:
+                self._patchheader()
+            if self._comp:
+                self._comp.CloseCompressor()
+                self._comp = None
+        finally:
+            # Prevent ref cycles
+            self._convert = None
+            f = self._file
+            self._file = None
+            f.close()
 
     #
     # Internal methods.
diff --git a/lib-python/2.7/argparse.py b/lib-python/2.7/argparse.py
--- a/lib-python/2.7/argparse.py
+++ b/lib-python/2.7/argparse.py
@@ -740,10 +740,10 @@
 
         - default -- The value to be produced if the option is not specified.
 
-        - type -- The type which the command-line arguments should be converted
-            to, should be one of 'string', 'int', 'float', 'complex' or a
-            callable object that accepts a single string argument. If None,
-            'string' is assumed.
+        - type -- A callable that accepts a single string argument, and
+            returns the converted value.  The standard Python types str, int,
+            float, and complex are useful examples of such callables.  If None,
+            str is used.
 
         - choices -- A container of values that should be allowed. If not None,
             after a command-line argument has been converted to the appropriate
@@ -1692,9 +1692,12 @@
         return args
 
     def parse_known_args(self, args=None, namespace=None):
-        # args default to the system args
         if args is None:
+            # args default to the system args
             args = _sys.argv[1:]
+        else:
+            # make sure that args are mutable
+            args = list(args)
 
         # default Namespace built from parser defaults
         if namespace is None:
@@ -1705,10 +1708,7 @@
             if action.dest is not SUPPRESS:
                 if not hasattr(namespace, action.dest):
                     if action.default is not SUPPRESS:
-                        default = action.default
-                        if isinstance(action.default, basestring):
-                            default = self._get_value(action, default)
-                        setattr(namespace, action.dest, default)
+                        setattr(namespace, action.dest, action.default)
 
         # add any parser defaults that aren't present
         for dest in self._defaults:
@@ -1936,12 +1936,23 @@
         if positionals:
             self.error(_('too few arguments'))
 
-        # make sure all required actions were present
+        # make sure all required actions were present, and convert defaults.
         for action in self._actions:
-            if action.required:
-                if action not in seen_actions:
+            if action not in seen_actions:
+                if action.required:
                     name = _get_action_name(action)
                     self.error(_('argument %s is required') % name)
+                else:
+                    # Convert action default now instead of doing it before
+                    # parsing arguments to avoid calling convert functions
+                    # twice (which may fail) if the argument was given, but
+                    # only if it was defined already in the namespace
+                    if (action.default is not None and
+                            isinstance(action.default, basestring) and
+                            hasattr(namespace, action.dest) and
+                            action.default is getattr(namespace, action.dest)):
+                        setattr(namespace, action.dest,
+                                self._get_value(action, action.default))
 
         # make sure all required groups had one option present
         for group in self._mutually_exclusive_groups:
@@ -1967,7 +1978,7 @@
         for arg_string in arg_strings:
 
             # for regular arguments, just add them back into the list
-            if arg_string[0] not in self.fromfile_prefix_chars:
+            if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
                 new_arg_strings.append(arg_string)
 
             # replace arguments referencing files with the file content
@@ -2174,9 +2185,12 @@
     # Value conversion methods
     # ========================
     def _get_values(self, action, arg_strings):
-        # for everything but PARSER args, strip out '--'
+        # for everything but PARSER, REMAINDER args, strip out first '--'
         if action.nargs not in [PARSER, REMAINDER]:
-            arg_strings = [s for s in arg_strings if s != '--']
+            try:
+                arg_strings.remove('--')
+            except ValueError:
+                pass
 
         # optional argument produces a default when not present
         if not arg_strings and action.nargs == OPTIONAL:
diff --git a/lib-python/2.7/asyncore.py b/lib-python/2.7/asyncore.py
--- a/lib-python/2.7/asyncore.py
+++ b/lib-python/2.7/asyncore.py
@@ -225,6 +225,7 @@
     debug = False
     connected = False
     accepting = False
+    connecting = False
     closing = False
     addr = None
     ignore_log_types = frozenset(['warning'])
@@ -248,7 +249,7 @@
             try:
                 self.addr = sock.getpeername()
             except socket.error, err:
-                if err.args[0] == ENOTCONN:
+                if err.args[0] in (ENOTCONN, EINVAL):
                     # To handle the case where we got an unconnected
                     # socket.
                     self.connected = False
@@ -342,9 +343,11 @@
 
     def connect(self, address):
         self.connected = False
+        self.connecting = True
         err = self.socket.connect_ex(address)
         if err in (EINPROGRESS, EALREADY, EWOULDBLOCK) \
         or err == EINVAL and os.name in ('nt', 'ce'):
+            self.addr = address
             return
         if err in (0, EISCONN):
             self.addr = address
@@ -390,7 +393,7 @@
             else:
                 return data
         except socket.error, why:
-            # winsock sometimes throws ENOTCONN
+            # winsock sometimes raises ENOTCONN
             if why.args[0] in _DISCONNECTED:
                 self.handle_close()
                 return ''
@@ -400,6 +403,7 @@
     def close(self):
         self.connected = False
         self.accepting = False
+        self.connecting = False
         self.del_channel()
         try:
             self.socket.close()
@@ -438,7 +442,8 @@
             # sockets that are connected
             self.handle_accept()
         elif not self.connected:
-            self.handle_connect_event()
+            if self.connecting:
+                self.handle_connect_event()
             self.handle_read()
         else:
             self.handle_read()
@@ -449,6 +454,7 @@
             raise socket.error(err, _strerror(err))
         self.handle_connect()
         self.connected = True
+        self.connecting = False
 
     def handle_write_event(self):
         if self.accepting:
@@ -457,12 +463,8 @@
             return
 
         if not self.connected:
-            #check for errors
-            err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
-            if err != 0:
-                raise socket.error(err, _strerror(err))
-
-            self.handle_connect_event()
+            if self.connecting:
+                self.handle_connect_event()
         self.handle_write()
 
     def handle_expt_event(self):
diff --git a/lib-python/2.7/bdb.py b/lib-python/2.7/bdb.py
--- a/lib-python/2.7/bdb.py
+++ b/lib-python/2.7/bdb.py
@@ -24,6 +24,7 @@
         self.skip = set(skip) if skip else None
         self.breaks = {}
         self.fncache = {}
+        self.frame_returning = None
 
     def canonic(self, filename):
         if filename == "<" + filename[1:-1] + ">":
@@ -82,7 +83,11 @@
 
     def dispatch_return(self, frame, arg):
         if self.stop_here(frame) or frame == self.returnframe:
-            self.user_return(frame, arg)
+            try:
+                self.frame_returning = frame
+                self.user_return(frame, arg)
+            finally:
+                self.frame_returning = None
             if self.quitting: raise BdbQuit
         return self.trace_dispatch
 
@@ -186,6 +191,14 @@
 
     def set_step(self):
         """Stop after one line of code."""
+        # Issue #13183: pdb skips frames after hitting a breakpoint and running
+        # step commands.
+        # Restore the trace function in the caller (that may not have been set
+        # for performance reasons) when returning from the current frame.
+        if self.frame_returning:
+            caller_frame = self.frame_returning.f_back
+            if caller_frame and not caller_frame.f_trace:
+                caller_frame.f_trace = self.trace_dispatch
         self._set_stopinfo(None, None)
 
     def set_next(self, frame):
diff --git a/lib-python/2.7/calendar.py b/lib-python/2.7/calendar.py
--- a/lib-python/2.7/calendar.py
+++ b/lib-python/2.7/calendar.py
@@ -161,7 +161,11 @@
         oneday = datetime.timedelta(days=1)
         while True:
             yield date
-            date += oneday
+            try:
+                date += oneday
+            except OverflowError:
+                # Adding one day could fail after datetime.MAXYEAR
+                break
             if date.month != month and date.weekday() == self.firstweekday:
                 break
 
diff --git a/lib-python/2.7/cgi.py b/lib-python/2.7/cgi.py
--- a/lib-python/2.7/cgi.py
+++ b/lib-python/2.7/cgi.py
@@ -37,7 +37,6 @@
 from operator import attrgetter
 import sys
 import os
-import urllib
 import UserDict
 import urlparse
 
diff --git a/lib-python/2.7/cgitb.py b/lib-python/2.7/cgitb.py
--- a/lib-python/2.7/cgitb.py
+++ b/lib-python/2.7/cgitb.py
@@ -295,14 +295,19 @@
         if self.logdir is not None:
             suffix = ['.txt', '.html'][self.format=="html"]
             (fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir)
+
             try:
                 file = os.fdopen(fd, 'w')
                 file.write(doc)
                 file.close()
-                msg = '<p> %s contains the description of this error.' % path
+                msg = '%s contains the description of this error.' % path
             except:
-                msg = '<p> Tried to save traceback to %s, but failed.' % path
-            self.file.write(msg + '\n')
+                msg = 'Tried to save traceback to %s, but failed.' % path
+
+            if self.format == 'html':
+                self.file.write('<p>%s</p>\n' % msg)
+            else:
+                self.file.write(msg + '\n')
         try:
             self.file.flush()
         except: pass
diff --git a/lib-python/2.7/cmd.py b/lib-python/2.7/cmd.py
--- a/lib-python/2.7/cmd.py
+++ b/lib-python/2.7/cmd.py
@@ -294,6 +294,7 @@
         return list(commands | topics)
 
     def do_help(self, arg):
+        'List available commands with "help" or detailed help with "help cmd".'
         if arg:
             # XXX check arg syntax
             try:
diff --git a/lib-python/2.7/collections.py b/lib-python/2.7/collections.py
--- a/lib-python/2.7/collections.py
+++ b/lib-python/2.7/collections.py
@@ -6,11 +6,12 @@
 __all__ += _abcoll.__all__
 
 from _collections import deque, defaultdict
-from operator import itemgetter as _itemgetter
+from operator import itemgetter as _itemgetter, eq as _eq
 from keyword import iskeyword as _iskeyword
 import sys as _sys
 import heapq as _heapq
 from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
+from itertools import imap as _imap
 
 try:
     from thread import get_ident as _get_ident
@@ -50,49 +51,45 @@
             self.__map = {}
         self.__update(*args, **kwds)
 
-    def __setitem__(self, key, value, PREV=0, NEXT=1, dict_setitem=dict.__setitem__):
+    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
         'od.__setitem__(i, y) <==> od[i]=y'
         # Setting a new item creates a new link at the end of the linked list,
         # and the inherited dictionary is updated with the new key/value pair.
         if key not in self:
             root = self.__root
-            last = root[PREV]
-            last[NEXT] = root[PREV] = self.__map[key] = [last, root, key]
-        dict_setitem(self, key, value)
+            last = root[0]
+            last[1] = root[0] = self.__map[key] = [last, root, key]
+        return dict_setitem(self, key, value)
 
-    def __delitem__(self, key, PREV=0, NEXT=1, dict_delitem=dict.__delitem__):
+    def __delitem__(self, key, dict_delitem=dict.__delitem__):
         'od.__delitem__(y) <==> del od[y]'
         # Deleting an existing item uses self.__map to find the link which gets
         # removed by updating the links in the predecessor and successor nodes.
         dict_delitem(self, key)
         link_prev, link_next, key = self.__map.pop(key)
-        link_prev[NEXT] = link_next
-        link_next[PREV] = link_prev
+        link_prev[1] = link_next                        # update link_prev[NEXT]
+        link_next[0] = link_prev                        # update link_next[PREV]
 
     def __iter__(self):
         'od.__iter__() <==> iter(od)'
         # Traverse the linked list in order.
-        NEXT, KEY = 1, 2
         root = self.__root
-        curr = root[NEXT]
+        curr = root[1]                                  # start at the first node
         while curr is not root:
-            yield curr[KEY]
-            curr = curr[NEXT]
+            yield curr[2]                               # yield the curr[KEY]
+            curr = curr[1]                              # move to next node
 
     def __reversed__(self):
         'od.__reversed__() <==> reversed(od)'
         # Traverse the linked list in reverse order.
-        PREV, KEY = 0, 2
         root = self.__root
-        curr = root[PREV]
+        curr = root[0]                                  # start at the last node
         while curr is not root:
-            yield curr[KEY]
-            curr = curr[PREV]
+            yield curr[2]                               # yield the curr[KEY]
+            curr = curr[0]                              # move to previous node
 
     def clear(self):
         'od.clear() -> None.  Remove all items from od.'
-        for node in self.__map.itervalues():
-            del node[:]
         root = self.__root
         root[:] = [root, root, None]
         self.__map.clear()
@@ -208,7 +205,7 @@
 
         '''
         if isinstance(other, OrderedDict):
-            return len(self)==len(other) and self.items() == other.items()
+            return dict.__eq__(self, other) and all(_imap(_eq, self, other))
         return dict.__eq__(self, other)
 
     def __ne__(self, other):
@@ -234,10 +231,60 @@
 ### namedtuple
 ################################################################################
 
+_class_template = '''\
+class {typename}(tuple):
+    '{typename}({arg_list})'
+
+    __slots__ = ()
+
+    _fields = {field_names!r}
+
+    def __new__(_cls, {arg_list}):
+        'Create new instance of {typename}({arg_list})'
+        return _tuple.__new__(_cls, ({arg_list}))
+
+    @classmethod
+    def _make(cls, iterable, new=tuple.__new__, len=len):
+        'Make a new {typename} object from a sequence or iterable'
+        result = new(cls, iterable)
+        if len(result) != {num_fields:d}:
+            raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result))
+        return result
+
+    def __repr__(self):
+        'Return a nicely formatted representation string'
+        return '{typename}({repr_fmt})' % self
+
+    def _asdict(self):
+        'Return a new OrderedDict which maps field names to their values'
+        return OrderedDict(zip(self._fields, self))
+
+    __dict__ = property(_asdict)
+
+    def _replace(_self, **kwds):
+        'Return a new {typename} object replacing specified fields with new values'
+        result = _self._make(map(kwds.pop, {field_names!r}, _self))
+        if kwds:
+            raise ValueError('Got unexpected field names: %r' % kwds.keys())
+        return result
+
+    def __getnewargs__(self):
+        'Return self as a plain tuple.  Used by copy and pickle.'
+        return tuple(self)
+
+{field_defs}
+'''
+
+_repr_template = '{name}=%r'
+
+_field_template = '''\
+    {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}')
+'''
+
 def namedtuple(typename, field_names, verbose=False, rename=False):
     """Returns a new subclass of tuple with named fields.
 
-    >>> Point = namedtuple('Point', 'x y')
+    >>> Point = namedtuple('Point', ['x', 'y'])
     >>> Point.__doc__                   # docstring for the new class
     'Point(x, y)'
     >>> p = Point(11, y=22)             # instantiate with positional args or keywords
@@ -258,83 +305,63 @@
 
     """
 
-    # Parse and validate the field names.  Validation serves two purposes,
-    # generating informative error messages and preventing template injection attacks.
+    # Validate the field names.  At the user's option, either generate an error
+    # message or automatically replace the field name with a valid name.
     if isinstance(field_names, basestring):
-        field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
-    field_names = tuple(map(str, field_names))
+        field_names = field_names.replace(',', ' ').split()
+    field_names = map(str, field_names)
     if rename:
-        names = list(field_names)
         seen = set()
-        for i, name in enumerate(names):
-            if (not all(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
-                or not name or name[0].isdigit() or name.startswith('_')
+        for index, name in enumerate(field_names):
+            if (not all(c.isalnum() or c=='_' for c in name)
+                or _iskeyword(name)
+                or not name
+                or name[0].isdigit()
+                or name.startswith('_')
                 or name in seen):
-                names[i] = '_%d' % i
+                field_names[index] = '_%d' % index
             seen.add(name)
-        field_names = tuple(names)
-    for name in (typename,) + field_names:
+    for name in [typename] + field_names:
         if not all(c.isalnum() or c=='_' for c in name):
-            raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
+            raise ValueError('Type names and field names can only contain '
+                             'alphanumeric characters and underscores: %r' % name)
         if _iskeyword(name):
-            raise ValueError('Type names and field names cannot be a keyword: %r' % name)
+            raise ValueError('Type names and field names cannot be a '
+                             'keyword: %r' % name)
         if name[0].isdigit():
-            raise ValueError('Type names and field names cannot start with a number: %r' % name)
-    seen_names = set()
+            raise ValueError('Type names and field names cannot start with '
+                             'a number: %r' % name)
+    seen = set()
     for name in field_names:
         if name.startswith('_') and not rename:
-            raise ValueError('Field names cannot start with an underscore: %r' % name)
-        if name in seen_names:
+            raise ValueError('Field names cannot start with an underscore: '
+                             '%r' % name)
+        if name in seen:
             raise ValueError('Encountered duplicate field name: %r' % name)
-        seen_names.add(name)
+        seen.add(name)
 
-    # Create and fill-in the class template
-    numfields = len(field_names)
-    argtxt = repr(field_names).replace("'", "")[1:-1]   # tuple repr without parens or quotes
-    reprtxt = ', '.join('%s=%%r' % name for name in field_names)
-    template = '''class %(typename)s(tuple):
-        '%(typename)s(%(argtxt)s)' \n
-        __slots__ = () \n
-        _fields = %(field_names)r \n
-        def __new__(_cls, %(argtxt)s):
-            'Create new instance of %(typename)s(%(argtxt)s)'
-            return _tuple.__new__(_cls, (%(argtxt)s)) \n
-        @classmethod
-        def _make(cls, iterable, new=tuple.__new__, len=len):
-            'Make a new %(typename)s object from a sequence or iterable'
-            result = new(cls, iterable)
-            if len(result) != %(numfields)d:
-                raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
-            return result \n
-        def __repr__(self):
-            'Return a nicely formatted representation string'
-            return '%(typename)s(%(reprtxt)s)' %% self \n
-        def _asdict(self):
-            'Return a new OrderedDict which maps field names to their values'
-            return OrderedDict(zip(self._fields, self)) \n
-        __dict__ = property(_asdict) \n
-        def _replace(_self, **kwds):
-            'Return a new %(typename)s object replacing specified fields with new values'
-            result = _self._make(map(kwds.pop, %(field_names)r, _self))
-            if kwds:
-                raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
-            return result \n
-        def __getnewargs__(self):
-            'Return self as a plain tuple.  Used by copy and pickle.'
-            return tuple(self) \n\n''' % locals()
-    for i, name in enumerate(field_names):
-        template += "        %s = _property(_itemgetter(%d), doc='Alias for field number %d')\n" % (name, i, i)
+    # Fill-in the class template
+    class_definition = _class_template.format(
+        typename = typename,
+        field_names = tuple(field_names),
+        num_fields = len(field_names),
+        arg_list = repr(tuple(field_names)).replace("'", "")[1:-1],
+        repr_fmt = ', '.join(_repr_template.format(name=name)
+                             for name in field_names),
+        field_defs = '\n'.join(_field_template.format(index=index, name=name)
+                               for index, name in enumerate(field_names))
+    )
     if verbose:
-        print template
+        print class_definition
 
-    # Execute the template string in a temporary namespace and
-    # support tracing utilities by setting a value for frame.f_globals['__name__']
+    # Execute the template string in a temporary namespace and support
+    # tracing utilities by setting a value for frame.f_globals['__name__']
     namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
                      OrderedDict=OrderedDict, _property=property, _tuple=tuple)
     try:
-        exec template in namespace
-    except SyntaxError, e:
-        raise SyntaxError(e.message + ':\n' + template)
+        exec class_definition in namespace
+    except SyntaxError as e:
+        raise SyntaxError(e.message + ':\n' + class_definition)
     result = namespace[typename]
 
     # For pickling to work, the __module__ variable needs to be set to the frame
diff --git a/lib-python/2.7/compiler/consts.py b/lib-python/2.7/compiler/consts.py
--- a/lib-python/2.7/compiler/consts.py
+++ b/lib-python/2.7/compiler/consts.py
@@ -5,7 +5,7 @@
 
 SC_LOCAL = 1
 SC_GLOBAL_IMPLICIT = 2
-SC_GLOBAL_EXPLICT = 3
+SC_GLOBAL_EXPLICIT = 3
 SC_FREE = 4
 SC_CELL = 5
 SC_UNKNOWN = 6
diff --git a/lib-python/2.7/compiler/pycodegen.py b/lib-python/2.7/compiler/pycodegen.py
--- a/lib-python/2.7/compiler/pycodegen.py
+++ b/lib-python/2.7/compiler/pycodegen.py
@@ -7,7 +7,7 @@
 
 from compiler import ast, parse, walk, syntax
 from compiler import pyassem, misc, future, symbols
-from compiler.consts import SC_LOCAL, SC_GLOBAL_IMPLICIT, SC_GLOBAL_EXPLICT, \
+from compiler.consts import SC_LOCAL, SC_GLOBAL_IMPLICIT, SC_GLOBAL_EXPLICIT, \
      SC_FREE, SC_CELL
 from compiler.consts import (CO_VARARGS, CO_VARKEYWORDS, CO_NEWLOCALS,
      CO_NESTED, CO_GENERATOR, CO_FUTURE_DIVISION,
@@ -283,7 +283,7 @@
                 self.emit(prefix + '_NAME', name)
             else:
                 self.emit(prefix + '_FAST', name)
-        elif scope == SC_GLOBAL_EXPLICT:
+        elif scope == SC_GLOBAL_EXPLICIT:
             self.emit(prefix + '_GLOBAL', name)
         elif scope == SC_GLOBAL_IMPLICIT:
             if not self.optimized:
diff --git a/lib-python/2.7/compiler/symbols.py b/lib-python/2.7/compiler/symbols.py
--- a/lib-python/2.7/compiler/symbols.py
+++ b/lib-python/2.7/compiler/symbols.py
@@ -1,7 +1,7 @@
 """Module symbol-table generator"""
 
 from compiler import ast
-from compiler.consts import SC_LOCAL, SC_GLOBAL_IMPLICIT, SC_GLOBAL_EXPLICT, \
+from compiler.consts import SC_LOCAL, SC_GLOBAL_IMPLICIT, SC_GLOBAL_EXPLICIT, \
     SC_FREE, SC_CELL, SC_UNKNOWN
 from compiler.misc import mangle
 import types
@@ -90,7 +90,7 @@
         The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
         """
         if name in self.globals:
-            return SC_GLOBAL_EXPLICT
+            return SC_GLOBAL_EXPLICIT
         if name in self.cells:
             return SC_CELL
         if name in self.defs:
diff --git a/lib-python/2.7/cookielib.py b/lib-python/2.7/cookielib.py
--- a/lib-python/2.7/cookielib.py
+++ b/lib-python/2.7/cookielib.py
@@ -1,4 +1,4 @@
-"""HTTP cookie handling for web clients.
+r"""HTTP cookie handling for web clients.
 
 This module has (now fairly distant) origins in Gisle Aas' Perl module
 HTTP::Cookies, from the libwww-perl library.
diff --git a/lib-python/2.7/ctypes/test/test_bitfields.py b/lib-python/2.7/ctypes/test/test_bitfields.py
--- a/lib-python/2.7/ctypes/test/test_bitfields.py
+++ b/lib-python/2.7/ctypes/test/test_bitfields.py
@@ -240,5 +240,25 @@
             _anonymous_ = ["_"]
             _fields_ = [("_", X)]
 
+    @unittest.skipUnless(hasattr(ctypes, "c_uint32"), "c_int32 is required")
+    def test_uint32(self):
+        class X(Structure):
+            _fields_ = [("a", c_uint32, 32)]
+        x = X()
+        x.a = 10
+        self.assertEqual(x.a, 10)
+        x.a = 0xFDCBA987
+        self.assertEqual(x.a, 0xFDCBA987)
+
+    @unittest.skipUnless(hasattr(ctypes, "c_uint64"), "c_int64 is required")
+    def test_uint64(self):
+        class X(Structure):
+            _fields_ = [("a", c_uint64, 64)]
+        x = X()
+        x.a = 10
+        self.assertEqual(x.a, 10)
+        x.a = 0xFEDCBA9876543211
+        self.assertEqual(x.a, 0xFEDCBA9876543211)
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/lib-python/2.7/ctypes/test/test_numbers.py b/lib-python/2.7/ctypes/test/test_numbers.py
--- a/lib-python/2.7/ctypes/test/test_numbers.py
+++ b/lib-python/2.7/ctypes/test/test_numbers.py
@@ -216,6 +216,16 @@
         # probably be changed:
         self.assertRaises(TypeError, c_int, c_long(42))
 
+    def test_float_overflow(self):
+        import sys
+        big_int = int(sys.float_info.max) * 2
+        for t in float_types + [c_longdouble]:
+            self.assertRaises(OverflowError, t, big_int)
+            if (hasattr(t, "__ctype_be__")):
+                self.assertRaises(OverflowError, t.__ctype_be__, big_int)
+            if (hasattr(t, "__ctype_le__")):
+                self.assertRaises(OverflowError, t.__ctype_le__, big_int)
+
 ##    def test_perf(self):
 ##        check_perf()
 
diff --git a/lib-python/2.7/ctypes/test/test_structures.py b/lib-python/2.7/ctypes/test/test_structures.py
--- a/lib-python/2.7/ctypes/test/test_structures.py
+++ b/lib-python/2.7/ctypes/test/test_structures.py
@@ -1,6 +1,7 @@
 import unittest
 from ctypes import *
 from struct import calcsize
+import _testcapi
 
 class SubclassesTest(unittest.TestCase):
     def test_subclass(self):
@@ -199,6 +200,14 @@
              "_pack_": -1}
         self.assertRaises(ValueError, type(Structure), "X", (Structure,), d)
 
+        # Issue 15989
+        d = {"_fields_": [("a", c_byte)],
+             "_pack_": _testcapi.INT_MAX + 1}
+        self.assertRaises(ValueError, type(Structure), "X", (Structure,), d)
+        d = {"_fields_": [("a", c_byte)],
+             "_pack_": _testcapi.UINT_MAX + 2}
+        self.assertRaises(ValueError, type(Structure), "X", (Structure,), d)
+
     def test_initializers(self):
         class Person(Structure):
             _fields_ = [("name", c_char*6),
diff --git a/lib-python/2.7/ctypes/test/test_win32.py b/lib-python/2.7/ctypes/test/test_win32.py
--- a/lib-python/2.7/ctypes/test/test_win32.py
+++ b/lib-python/2.7/ctypes/test/test_win32.py
@@ -3,6 +3,7 @@
 from ctypes import *
 from ctypes.test import is_resource_enabled
 import unittest, sys
+from test import test_support as support
 
 import _ctypes_test
 
@@ -60,7 +61,9 @@
 
         def test_COMError(self):
             from _ctypes import COMError
-            self.assertEqual(COMError.__doc__, "Raised when a COM method call failed.")
+            if support.HAVE_DOCSTRINGS:
+                self.assertEqual(COMError.__doc__,
+                                 "Raised when a COM method call failed.")
 
             ex = COMError(-1, "text", ("details",))
             self.assertEqual(ex.hresult, -1)
diff --git a/lib-python/2.7/curses/__init__.py b/lib-python/2.7/curses/__init__.py
--- a/lib-python/2.7/curses/__init__.py
+++ b/lib-python/2.7/curses/__init__.py
@@ -5,7 +5,7 @@
 
    import curses
    from curses import textpad
-   curses.initwin()
+   curses.initscr()
    ...
 
 """
diff --git a/lib-python/2.7/decimal.py b/lib-python/2.7/decimal.py
--- a/lib-python/2.7/decimal.py
+++ b/lib-python/2.7/decimal.py
@@ -1581,7 +1581,13 @@
 
     def __float__(self):
         """Float representation."""
-        return float(str(self))
+        if self._isnan():
+            if self.is_snan():
+                raise ValueError("Cannot convert signaling NaN to float")
+            s = "-nan" if self._sign else "nan"
+        else:
+            s = str(self)
+        return float(s)
 
     def __int__(self):
         """Converts self to an int, truncating if necessary."""
diff --git a/lib-python/2.7/distutils/command/bdist_rpm.py b/lib-python/2.7/distutils/command/bdist_rpm.py
--- a/lib-python/2.7/distutils/command/bdist_rpm.py
+++ b/lib-python/2.7/distutils/command/bdist_rpm.py
@@ -379,16 +379,28 @@
         self.spawn(rpm_cmd)
 
         if not self.dry_run:
+            if self.distribution.has_ext_modules():
+                pyversion = get_python_version()
+            else:
+                pyversion = 'any'
+
             if not self.binary_only:
                 srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
                 assert(os.path.exists(srpm))
                 self.move_file(srpm, self.dist_dir)
+                filename = os.path.join(self.dist_dir, source_rpm)
+                self.distribution.dist_files.append(
+                    ('bdist_rpm', pyversion, filename))
 
             if not self.source_only:
                 for rpm in binary_rpms:
                     rpm = os.path.join(rpm_dir['RPMS'], rpm)
                     if os.path.exists(rpm):
                         self.move_file(rpm, self.dist_dir)
+                        filename = os.path.join(self.dist_dir,
+                                                os.path.basename(rpm))
+                        self.distribution.dist_files.append(
+                            ('bdist_rpm', pyversion, filename))
     # run()
 
     def _dist_path(self, path):
diff --git a/lib-python/2.7/distutils/command/check.py b/lib-python/2.7/distutils/command/check.py
--- a/lib-python/2.7/distutils/command/check.py
+++ b/lib-python/2.7/distutils/command/check.py
@@ -26,6 +26,9 @@
 
         def system_message(self, level, message, *children, **kwargs):
             self.messages.append((level, message, children, kwargs))
+            return nodes.system_message(message, level=level,
+                                        type=self.levels[level],
+                                        *children, **kwargs)
 
     HAS_DOCUTILS = True
 except ImportError:
diff --git a/lib-python/2.7/distutils/config.py b/lib-python/2.7/distutils/config.py
--- a/lib-python/2.7/distutils/config.py
+++ b/lib-python/2.7/distutils/config.py
@@ -42,16 +42,11 @@
     def _store_pypirc(self, username, password):
         """Creates a default .pypirc file."""
         rc = self._get_rc_file()
-        f = open(rc, 'w')
+        f = os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0600), 'w')
         try:
             f.write(DEFAULT_PYPIRC % (username, password))
         finally:
             f.close()
-        try:
-            os.chmod(rc, 0600)
-        except OSError:
-            # should do something better here
-            pass
 
     def _read_pypirc(self):
         """Reads the .pypirc file."""
diff --git a/lib-python/2.7/distutils/dir_util.py b/lib-python/2.7/distutils/dir_util.py
--- a/lib-python/2.7/distutils/dir_util.py
+++ b/lib-python/2.7/distutils/dir_util.py
@@ -144,6 +144,10 @@
         src_name = os.path.join(src, n)
         dst_name = os.path.join(dst, n)
 
+        if n.startswith('.nfs'):
+            # skip NFS rename files
+            continue
+
         if preserve_symlinks and os.path.islink(src_name):
             link_dest = os.readlink(src_name)
             if verbose >= 1:
diff --git a/lib-python/2.7/distutils/tests/test_bdist_dumb.py b/lib-python/2.7/distutils/tests/test_bdist_dumb.py
--- a/lib-python/2.7/distutils/tests/test_bdist_dumb.py
+++ b/lib-python/2.7/distutils/tests/test_bdist_dumb.py
@@ -1,8 +1,10 @@
 """Tests for distutils.command.bdist_dumb."""
 
+import os
+import sys
+import zipfile
 import unittest
-import sys
-import os
+from test.test_support import run_unittest
 
 # zlib is not used here, but if it's not available
 # test_simple_built will fail
@@ -11,8 +13,6 @@
 except ImportError:
     zlib = None
 
-from test.test_support import run_unittest
-
 from distutils.core import Distribution
 from distutils.command.bdist_dumb import bdist_dumb
 from distutils.tests import support
@@ -73,15 +73,23 @@
 
         # see what we have
         dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
-        base = "%s.%s" % (dist.get_fullname(), cmd.plat_name)
+        base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name)
         if os.name == 'os2':
             base = base.replace(':', '-')
 
-        wanted = ['%s.zip' % base]
-        self.assertEqual(dist_created, wanted)
+        self.assertEqual(dist_created, [base])
 
         # now let's check what we have in the zip file
-        # XXX to be done
+        fp = zipfile.ZipFile(os.path.join('dist', base))
+        try:
+            contents = fp.namelist()
+        finally:
+            fp.close()
+
+        contents = sorted(os.path.basename(fn) for fn in contents)
+        wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2],
+                  'foo.py', 'foo.pyc']
+        self.assertEqual(contents, sorted(wanted))
 
     def test_finalize_options(self):
         pkg_dir, dist = self.create_dist()
diff --git a/lib-python/2.7/distutils/tests/test_bdist_msi.py b/lib-python/2.7/distutils/tests/test_bdist_msi.py
--- a/lib-python/2.7/distutils/tests/test_bdist_msi.py
+++ b/lib-python/2.7/distutils/tests/test_bdist_msi.py
@@ -1,12 +1,11 @@
 """Tests for distutils.command.bdist_msi."""
+import sys
 import unittest
-import sys
-
 from test.test_support import run_unittest
-
 from distutils.tests import support
 
- at unittest.skipUnless(sys.platform=="win32", "These tests are only for win32")
+
+ at unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows')
 class BDistMSITestCase(support.TempdirManager,
                        support.LoggingSilencer,
                        unittest.TestCase):
@@ -14,10 +13,11 @@
     def test_minimal(self):
         # minimal test XXX need more tests
         from distutils.command.bdist_msi import bdist_msi
-        pkg_pth, dist = self.create_dist()
+        project_dir, dist = self.create_dist()
         cmd = bdist_msi(dist)
         cmd.ensure_finalized()
 
+
 def test_suite():
     return unittest.makeSuite(BDistMSITestCase)
 
diff --git a/lib-python/2.7/distutils/tests/test_bdist_rpm.py b/lib-python/2.7/distutils/tests/test_bdist_rpm.py
--- a/lib-python/2.7/distutils/tests/test_bdist_rpm.py
+++ b/lib-python/2.7/distutils/tests/test_bdist_rpm.py
@@ -79,6 +79,10 @@
         dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
         self.assertTrue('foo-0.1-1.noarch.rpm' in dist_created)
 
+        # bug #2945: upload ignores bdist_rpm files
+        self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
+        self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)
+
     def test_no_optimize_flag(self):
 
         # XXX I am unable yet to make this test work without
@@ -118,6 +122,11 @@
 
         dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
         self.assertTrue('foo-0.1-1.noarch.rpm' in dist_created)
+
+        # bug #2945: upload ignores bdist_rpm files
+        self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
+        self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)
+
         os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm'))
 
 def test_suite():
diff --git a/lib-python/2.7/distutils/tests/test_build_ext.py b/lib-python/2.7/distutils/tests/test_build_ext.py
--- a/lib-python/2.7/distutils/tests/test_build_ext.py
+++ b/lib-python/2.7/distutils/tests/test_build_ext.py
@@ -77,8 +77,9 @@
         self.assertEqual(xx.foo(2, 5), 7)
         self.assertEqual(xx.foo(13,15), 28)
         self.assertEqual(xx.new().demo(), None)
-        doc = 'This is a template module just for instruction.'
-        self.assertEqual(xx.__doc__, doc)
+        if test_support.HAVE_DOCSTRINGS:
+            doc = 'This is a template module just for instruction.'
+            self.assertEqual(xx.__doc__, doc)
         self.assertTrue(isinstance(xx.Null(), xx.Null))
         self.assertTrue(isinstance(xx.Str(), xx.Str))
 
diff --git a/lib-python/2.7/distutils/tests/test_dir_util.py b/lib-python/2.7/distutils/tests/test_dir_util.py
--- a/lib-python/2.7/distutils/tests/test_dir_util.py
+++ b/lib-python/2.7/distutils/tests/test_dir_util.py
@@ -101,6 +101,24 @@
         remove_tree(self.root_target, verbose=0)
         remove_tree(self.target2, verbose=0)
 
+    def test_copy_tree_skips_nfs_temp_files(self):
+        mkpath(self.target, verbose=0)
+
+        a_file = os.path.join(self.target, 'ok.txt')
+        nfs_file = os.path.join(self.target, '.nfs123abc')
+        for f in a_file, nfs_file:
+            fh = open(f, 'w')
+            try:
+                fh.write('some content')
+            finally:
+                fh.close()
+
+        copy_tree(self.target, self.target2)
+        self.assertEqual(os.listdir(self.target2), ['ok.txt'])
+
+        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.target2, verbose=0)
+
     def test_ensure_relative(self):
         if os.sep == '/':
             self.assertEqual(ensure_relative('/home/foo'), 'home/foo')
diff --git a/lib-python/2.7/distutils/tests/test_install.py b/lib-python/2.7/distutils/tests/test_install.py
--- a/lib-python/2.7/distutils/tests/test_install.py
+++ b/lib-python/2.7/distutils/tests/test_install.py
@@ -86,19 +86,17 @@
         self.old_expand = os.path.expanduser
         os.path.expanduser = _expanduser
 
-        try:
-            # this is the actual test
-            self._test_user_site()
-        finally:
+        def cleanup():
             site.USER_BASE = self.old_user_base
             site.USER_SITE = self.old_user_site
             install_module.USER_BASE = self.old_user_base
             install_module.USER_SITE = self.old_user_site
             os.path.expanduser = self.old_expand
 
-    def _test_user_site(self):
+        self.addCleanup(cleanup)
+
         for key in ('nt_user', 'unix_user', 'os2_home'):
-            self.assertTrue(key in INSTALL_SCHEMES)
+            self.assertIn(key, INSTALL_SCHEMES)
 
         dist = Distribution({'name': 'xx'})
         cmd = install(dist)
@@ -106,14 +104,14 @@
         # making sure the user option is there
         options = [name for name, short, lable in
                    cmd.user_options]
-        self.assertTrue('user' in options)
+        self.assertIn('user', options)
 
         # setting a value
         cmd.user = 1
 
         # user base and site shouldn't be created yet
-        self.assertTrue(not os.path.exists(self.user_base))
-        self.assertTrue(not os.path.exists(self.user_site))
+        self.assertFalse(os.path.exists(self.user_base))
+        self.assertFalse(os.path.exists(self.user_site))
 
         # let's run finalize
         cmd.ensure_finalized()
@@ -122,8 +120,8 @@
         self.assertTrue(os.path.exists(self.user_base))
         self.assertTrue(os.path.exists(self.user_site))
 
-        self.assertTrue('userbase' in cmd.config_vars)
-        self.assertTrue('usersite' in cmd.config_vars)
+        self.assertIn('userbase', cmd.config_vars)
+        self.assertIn('usersite', cmd.config_vars)
 
     def test_handle_extra_path(self):
         dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
@@ -176,15 +174,16 @@
 
     def test_record(self):
         install_dir = self.mkdtemp()
-        project_dir, dist = self.create_dist(scripts=['hello'])
-        self.addCleanup(os.chdir, os.getcwd())
+        project_dir, dist = self.create_dist(py_modules=['hello'],
+                                             scripts=['sayhi'])
         os.chdir(project_dir)
-        self.write_file('hello', "print('o hai')")
+        self.write_file('hello.py', "def main(): print 'o hai'")
+        self.write_file('sayhi', 'from hello import main; main()')
 
         cmd = install(dist)
         dist.command_obj['install'] = cmd
         cmd.root = install_dir
-        cmd.record = os.path.join(project_dir, 'RECORD')
+        cmd.record = os.path.join(project_dir, 'filelist')
         cmd.ensure_finalized()
         cmd.run()
 
@@ -195,7 +194,7 @@
             f.close()
 
         found = [os.path.basename(line) for line in content.splitlines()]
-        expected = ['hello',
+        expected = ['hello.py', 'hello.pyc', 'sayhi',
                     'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]]
         self.assertEqual(found, expected)
 
@@ -203,7 +202,6 @@
         install_dir = self.mkdtemp()
         project_dir, dist = self.create_dist(ext_modules=[
             Extension('xx', ['xxmodule.c'])])
-        self.addCleanup(os.chdir, os.getcwd())
         os.chdir(project_dir)
         support.copy_xxmodule_c(project_dir)
 
@@ -215,7 +213,7 @@
         dist.command_obj['install'] = cmd
         dist.command_obj['build_ext'] = buildextcmd
         cmd.root = install_dir
-        cmd.record = os.path.join(project_dir, 'RECORD')
+        cmd.record = os.path.join(project_dir, 'filelist')
         cmd.ensure_finalized()
         cmd.run()
 
@@ -241,6 +239,7 @@
             install_module.DEBUG = False
         self.assertTrue(len(self.logs) > old_logs_len)
 
+
 def test_suite():
     return unittest.makeSuite(InstallTestCase)
 
diff --git a/lib-python/2.7/distutils/tests/test_msvc9compiler.py b/lib-python/2.7/distutils/tests/test_msvc9compiler.py
--- a/lib-python/2.7/distutils/tests/test_msvc9compiler.py
+++ b/lib-python/2.7/distutils/tests/test_msvc9compiler.py
@@ -104,7 +104,7 @@
                             unittest.TestCase):
 
     def test_no_compiler(self):
-        # makes sure query_vcvarsall throws
+        # makes sure query_vcvarsall raises
         # a DistutilsPlatformError if the compiler
         # is not found
         from distutils.msvc9compiler import query_vcvarsall
diff --git a/lib-python/2.7/distutils/tests/test_register.py b/lib-python/2.7/distutils/tests/test_register.py
--- a/lib-python/2.7/distutils/tests/test_register.py
+++ b/lib-python/2.7/distutils/tests/test_register.py
@@ -1,6 +1,5 @@
 # -*- encoding: utf8 -*-
 """Tests for distutils.command.register."""
-import sys
 import os
 import unittest
 import getpass
@@ -11,11 +10,14 @@
 
 from distutils.command import register as register_module
 from distutils.command.register import register
-from distutils.core import Distribution
 from distutils.errors import DistutilsSetupError
 
-from distutils.tests import support
-from distutils.tests.test_config import PYPIRC, PyPIRCCommandTestCase
+from distutils.tests.test_config import PyPIRCCommandTestCase
+
+try:
+    import docutils
+except ImportError:
+    docutils = None
 
 PYPIRC_NOPASSWORD = """\
 [distutils]
@@ -192,6 +194,7 @@
         self.assertEqual(headers['Content-length'], '290')
         self.assertTrue('tarek' in req.data)
 
+    @unittest.skipUnless(docutils is not None, 'needs docutils')
     def test_strict(self):
         # testing the script option
         # when on, the register command stops if
@@ -204,13 +207,6 @@
         cmd.strict = 1
         self.assertRaises(DistutilsSetupError, cmd.run)
 
-        # we don't test the reSt feature if docutils
-        # is not installed
-        try:
-            import docutils
-        except ImportError:
-            return
-
         # metadata are OK but long_description is broken
         metadata = {'url': 'xxx', 'author': 'xxx',
                     'author_email': u'éxéxé',
@@ -264,6 +260,21 @@
         finally:
             del register_module.raw_input
 
+    @unittest.skipUnless(docutils is not None, 'needs docutils')
+    def test_register_invalid_long_description(self):
+        description = ':funkie:`str`'  # mimic Sphinx-specific markup
+        metadata = {'url': 'xxx', 'author': 'xxx',
+                    'author_email': 'xxx',
+                    'name': 'xxx', 'version': 'xxx',
+                    'long_description': description}
+        cmd = self._get_cmd(metadata)
+        cmd.ensure_finalized()
+        cmd.strict = True
+        inputs = RawInputs('2', 'tarek', 'tarek at ziade.org')
+        register_module.raw_input = inputs
+        self.addCleanup(delattr, register_module, 'raw_input')
+        self.assertRaises(DistutilsSetupError, cmd.run)
+
     def test_check_metadata_deprecated(self):
         # makes sure make_metadata is deprecated
         cmd = self._get_cmd()
diff --git a/lib-python/2.7/distutils/tests/test_sdist.py b/lib-python/2.7/distutils/tests/test_sdist.py
--- a/lib-python/2.7/distutils/tests/test_sdist.py
+++ b/lib-python/2.7/distutils/tests/test_sdist.py
@@ -6,6 +6,7 @@
 import zipfile
 from os.path import join
 from textwrap import dedent
+from test.test_support import captured_stdout, check_warnings, run_unittest
 
 # zlib is not used here, but if it's not available
 # the tests that use zipfile may fail
@@ -21,7 +22,6 @@
 except ImportError:
     UID_GID_SUPPORT = False
 
-from test.test_support import captured_stdout, check_warnings, run_unittest
 
 from distutils.command.sdist import sdist, show_formats
 from distutils.core import Distribution
@@ -91,9 +91,8 @@
 
     @unittest.skipUnless(zlib, "requires zlib")
     def test_prune_file_list(self):
-        # this test creates a package with some vcs dirs in it
-        # and launch sdist to make sure they get pruned
-        # on all systems
+        # this test creates a project with some VCS dirs and an NFS rename
+        # file, then launches sdist to check they get pruned on all systems
 
         # creating VCS directories with some files in them
         os.mkdir(join(self.tmp_dir, 'somecode', '.svn'))
@@ -107,6 +106,8 @@
         self.write_file((self.tmp_dir, 'somecode', '.git',
                          'ok'), 'xxx')
 
+        self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx')
+
         # now building a sdist
         dist, cmd = self.get_cmd()
 
@@ -375,7 +376,7 @@
     # the following tests make sure there is a nice error message instead
     # of a traceback when parsing an invalid manifest template
 
-    def _test_template(self, content):
+    def _check_template(self, content):
         dist, cmd = self.get_cmd()
         os.chdir(self.tmp_dir)
         self.write_file('MANIFEST.in', content)
@@ -386,17 +387,17 @@
         self.assertEqual(len(warnings), 1)
 
     def test_invalid_template_unknown_command(self):
-        self._test_template('taunt knights *')
+        self._check_template('taunt knights *')
 
     def test_invalid_template_wrong_arguments(self):
         # this manifest command takes one argument
-        self._test_template('prune')
+        self._check_template('prune')
 
     @unittest.skipIf(os.name != 'nt', 'test relevant for Windows only')
     def test_invalid_template_wrong_path(self):
         # on Windows, trailing slashes are not allowed
         # this used to crash instead of raising a warning: #8286
-        self._test_template('include examples/')
+        self._check_template('include examples/')
 
     @unittest.skipUnless(zlib, "requires zlib")
     def test_get_file_list(self):
diff --git a/lib-python/2.7/doctest.py b/lib-python/2.7/doctest.py
--- a/lib-python/2.7/doctest.py
+++ b/lib-python/2.7/doctest.py
@@ -2314,7 +2314,8 @@
         return "Doctest: " + self._dt_test.name
 
 class SkipDocTestCase(DocTestCase):
-    def __init__(self):
+    def __init__(self, module):
+        self.module = module
         DocTestCase.__init__(self, None)
 
     def setUp(self):
@@ -2324,7 +2325,10 @@
         pass
 
     def shortDescription(self):
-        return "Skipping tests from %s" % module.__name__
+        return "Skipping tests from %s" % self.module.__name__
+
+    __str__ = shortDescription
+
 
 def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
                  **options):
@@ -2372,12 +2376,17 @@
     if not tests and sys.flags.optimize >=2:
         # Skip doctests when running with -O2
         suite = unittest.TestSuite()
-        suite.addTest(SkipDocTestCase())
+        suite.addTest(SkipDocTestCase(module))
         return suite
     elif not tests:
         # Why do we want to do this? Because it reveals a bug that might
         # otherwise be hidden.
-        raise ValueError(module, "has no tests")
+        # It is probably a bug that this exception is not also raised if the
+        # number of doctest examples in tests is zero (i.e. if no doctest
+        # examples were found).  However, we should probably not be raising
+        # an exception at all here, though it is too late to make this change
+        # for a maintenance release.  See also issue #14649.
+        raise ValueError(module, "has no docstrings")
 
     tests.sort()
     suite = unittest.TestSuite()
diff --git a/lib-python/2.7/email/_parseaddr.py b/lib-python/2.7/email/_parseaddr.py
--- a/lib-python/2.7/email/_parseaddr.py
+++ b/lib-python/2.7/email/_parseaddr.py
@@ -13,7 +13,7 @@
     'quote',
     ]
 
-import time
+import time, calendar
 
 SPACE = ' '
 EMPTYSTRING = ''
@@ -150,13 +150,13 @@
 
 
 def mktime_tz(data):
-    """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
+    """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp."""
     if data[9] is None:
         # No zone info, so localtime is better assumption than GMT
         return time.mktime(data[:8] + (-1,))
     else:
-        t = time.mktime(data[:8] + (0,))
-        return t - data[9] - time.timezone
+        t = calendar.timegm(data)
+        return t - data[9]
 
 
 def quote(str):
diff --git a/lib-python/2.7/email/base64mime.py b/lib-python/2.7/email/base64mime.py
--- a/lib-python/2.7/email/base64mime.py
+++ b/lib-python/2.7/email/base64mime.py
@@ -130,7 +130,7 @@
     verbatim (this is the default).
 
     Each line of encoded text will end with eol, which defaults to "\\n".  Set
-    this to "\r\n" if you will be using the result of this function directly
+    this to "\\r\\n" if you will be using the result of this function directly
     in an email.
     """
     if not s:
diff --git a/lib-python/2.7/email/feedparser.py b/lib-python/2.7/email/feedparser.py
--- a/lib-python/2.7/email/feedparser.py
+++ b/lib-python/2.7/email/feedparser.py
@@ -13,7 +13,7 @@
 data.  When you have no more data to push into the parser, call .close().
 This completes the parsing and returns the root message object.
 
-The other advantage of this parser is that it will never throw a parsing
+The other advantage of this parser is that it will never raise a parsing
 exception.  Instead, when it finds something unexpected, it adds a 'defect' to
 the current message.  Defects are just instances that live on the message
 object's .defects attribute.
@@ -214,7 +214,7 @@
         # supposed to see in the body of the message.
         self._parse_headers(headers)
         # Headers-only parsing is a backwards compatibility hack, which was
-        # necessary in the older parser, which could throw errors.  All
+        # necessary in the older parser, which could raise errors.  All
         # remaining lines in the input are thrown into the message body.
         if self._headersonly:
             lines = []
diff --git a/lib-python/2.7/email/generator.py b/lib-python/2.7/email/generator.py
--- a/lib-python/2.7/email/generator.py
+++ b/lib-python/2.7/email/generator.py
@@ -212,7 +212,11 @@
             msg.set_boundary(boundary)
         # If there's a preamble, write it out, with a trailing CRLF
         if msg.preamble is not None:
-            print >> self._fp, msg.preamble
+            if self._mangle_from_:
+                preamble = fcre.sub('>From ', msg.preamble)
+            else:
+                preamble = msg.preamble
+            print >> self._fp, preamble
         # dash-boundary transport-padding CRLF
         print >> self._fp, '--' + boundary
         # body-part
@@ -230,7 +234,11 @@
         self._fp.write('\n--' + boundary + '--')
         if msg.epilogue is not None:
             print >> self._fp
-            self._fp.write(msg.epilogue)
+            if self._mangle_from_:
+                epilogue = fcre.sub('>From ', msg.epilogue)
+            else:
+                epilogue = msg.epilogue
+            self._fp.write(epilogue)
 
     def _handle_multipart_signed(self, msg):
         # The contents of signed parts has to stay unmodified in order to keep
diff --git a/lib-python/2.7/email/test/test_email.py b/lib-python/2.7/email/test/test_email.py
--- a/lib-python/2.7/email/test/test_email.py
+++ b/lib-python/2.7/email/test/test_email.py
@@ -9,6 +9,7 @@
 import difflib
 import unittest
 import warnings
+import textwrap
 from cStringIO import StringIO
 
 import email
@@ -948,6 +949,28 @@
 Blah blah blah
 """)
 
+    def test_mangle_from_in_preamble_and_epilog(self):
+        s = StringIO()
+        g = Generator(s, mangle_from_=True)
+        msg = email.message_from_string(textwrap.dedent("""\
+            From: foo at bar.com
+            Mime-Version: 1.0
+            Content-Type: multipart/mixed; boundary=XXX
+
+            From somewhere unknown
+
+            --XXX
+            Content-Type: text/plain
+
+            foo
+
+            --XXX--
+
+            From somewhere unknowable
+            """))
+        g.flatten(msg)
+        self.assertEqual(len([1 for x in s.getvalue().split('\n')
+                                  if x.startswith('>From ')]), 2)
 
 
 # Test the basic MIMEAudio class
@@ -2262,6 +2285,12 @@
         eq(time.localtime(t)[:6], timetup[:6])
         eq(int(time.strftime('%Y', timetup[:9])), 2003)
 
+    def test_mktime_tz(self):
+        self.assertEqual(Utils.mktime_tz((1970, 1, 1, 0, 0, 0,
+                                          -1, -1, -1, 0)), 0)
+        self.assertEqual(Utils.mktime_tz((1970, 1, 1, 0, 0, 0,
+                                          -1, -1, -1, 1234)), -1234)
+
     def test_parsedate_y2k(self):
         """Test for parsing a date with a two-digit year.
 
diff --git a/lib-python/2.7/email/utils.py b/lib-python/2.7/email/utils.py
--- a/lib-python/2.7/email/utils.py
+++ b/lib-python/2.7/email/utils.py
@@ -63,7 +63,7 @@
     """Decodes a base64 string.
 
     This function is equivalent to base64.decodestring and it's retained only
-    for backward compatibility. It used to remove the last \n of the decoded
+    for backward compatibility. It used to remove the last \\n of the decoded
     string, if it had any (see issue 7143).
     """
     if not s:
@@ -73,7 +73,7 @@
 
 
 def fix_eols(s):
-    """Replace all line-ending characters with \r\n."""
+    """Replace all line-ending characters with \\r\\n."""
     # Fix newlines with no preceding carriage return
     s = re.sub(r'(?<!\r)\n', CRLF, s)
     # Fix carriage returns with no following newline
diff --git a/lib-python/2.7/ftplib.py b/lib-python/2.7/ftplib.py
--- a/lib-python/2.7/ftplib.py
+++ b/lib-python/2.7/ftplib.py
@@ -273,21 +273,24 @@
 
     def makeport(self):


More information about the pypy-commit mailing list