# DP: hg updates of the 3.2 release branch (until 2012-08-26).

# hg diff -r v3.2.3 | filterdiff --exclude=.hgignore --exclude=.hgeol --exclude=.hgtags --remove-timestamps

diff -r 3d0686d90f55 Doc/ACKS.txt
--- a/Doc/ACKS.txt
+++ b/Doc/ACKS.txt
@@ -120,6 +120,7 @@
    * Thomas Lamb
    * Detlef Lannert
    * Piers Lauder
+   * Julia Lawall
    * Glyph Lefkowitz
    * Robert Lehmann
    * Marc-André Lemburg
diff -r 3d0686d90f55 Doc/c-api/long.rst
--- a/Doc/c-api/long.rst
+++ b/Doc/c-api/long.rst
@@ -108,37 +108,55 @@
 
 
 .. XXX alias PyLong_AS_LONG (for now)
-.. c:function:: long PyLong_AsLong(PyObject *pylong)
+.. c:function:: long PyLong_AsLong(PyObject *obj)
 
    .. index::
       single: LONG_MAX
       single: OverflowError (built-in exception)
 
-   Return a C :c:type:`long` representation of the contents of *pylong*.  If
-   *pylong* is greater than :const:`LONG_MAX`, raise an :exc:`OverflowError`,
-   and return -1. Convert non-long objects automatically to long first,
-   and return -1 if that raises exceptions.
+   Return a C :c:type:`long` representation of *obj*.  If *obj* is not an
+   instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method
+   (if present) to convert it to a :c:type:`PyLongObject`.
 
-.. c:function:: long PyLong_AsLongAndOverflow(PyObject *pylong, int *overflow)
+   Raise :exc:`OverflowError` if the value of *obj* is out of range for a
+   :c:type:`long`.
 
-   Return a C :c:type:`long` representation of the contents of
-   *pylong*.  If *pylong* is greater than :const:`LONG_MAX` or less
-   than :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``,
-   respectively, and return ``-1``; otherwise, set *\*overflow* to
-   ``0``.  If any other exception occurs (for example a TypeError or
-   MemoryError), then ``-1`` will be returned and *\*overflow* will
-   be ``0``.
 
+.. c:function:: long PyLong_AsLongAndOverflow(PyObject *obj, int *overflow)
 
-.. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *pylong, int *overflow)
+   Return a C :c:type:`long` representation of *obj*.  If *obj* is not an
+   instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method
+   (if present) to convert it to a :c:type:`PyLongObject`.
 
-   Return a C :c:type:`long long` representation of the contents of
-   *pylong*.  If *pylong* is greater than :const:`PY_LLONG_MAX` or less
-   than :const:`PY_LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``,
-   respectively, and return ``-1``; otherwise, set *\*overflow* to
-   ``0``.  If any other exception occurs (for example a TypeError or
-   MemoryError), then ``-1`` will be returned and *\*overflow* will
-   be ``0``.
+   If the value of *obj* is greater than :const:`LONG_MAX` or less than
+   :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and
+   return ``-1``; otherwise, set *\*overflow* to ``0``.  If any other exception
+   occurs set *\*overflow* to ``0`` and return ``-1`` as usual.
+
+
+.. c:function:: PY_LONG_LONG PyLong_AsLongLong(PyObject *obj)
+
+   .. index::
+      single: OverflowError (built-in exception)
+
+   Return a C :c:type:`long long` representation of *obj*.  If *obj* is not an
+   instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method
+   (if present) to convert it to a :c:type:`PyLongObject`.
+
+   Raise :exc:`OverflowError` if the value of *obj* is out of range for a
+   :c:type:`long`.
+
+
+.. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *obj, int *overflow)
+
+   Return a C :c:type:`long long` representation of *obj*.  If *obj* is not an
+   instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method
+   (if present) to convert it to a :c:type:`PyLongObject`.
+
+   If the value of *obj* is greater than :const:`PY_LLONG_MAX` or less than
+   :const:`PY_LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively,
+   and return ``-1``; otherwise, set *\*overflow* to ``0``.  If any other
+   exception occurs set *\*overflow* to ``0`` and return ``-1`` as usual.
 
    .. versionadded:: 3.2
 
@@ -149,9 +167,11 @@
       single: PY_SSIZE_T_MAX
       single: OverflowError (built-in exception)
 
-   Return a C :c:type:`Py_ssize_t` representation of the contents of *pylong*.
-   If *pylong* is greater than :const:`PY_SSIZE_T_MAX`, an :exc:`OverflowError`
-   is raised and ``-1`` will be returned.
+   Return a C :c:type:`Py_ssize_t` representation of *pylong*.  *pylong* must
+   be an instance of :c:type:`PyLongObject`.
+
+   Raise :exc:`OverflowError` if the value of *pylong* is out of range for a
+   :c:type:`Py_ssize_t`.
 
 
 .. c:function:: unsigned long PyLong_AsUnsignedLong(PyObject *pylong)
@@ -160,26 +180,20 @@
       single: ULONG_MAX
       single: OverflowError (built-in exception)
 
-   Return a C :c:type:`unsigned long` representation of the contents of *pylong*.
-   If *pylong* is greater than :const:`ULONG_MAX`, an :exc:`OverflowError` is
-   raised.
+   Return a C :c:type:`unsigned long` representation of *pylong*.  *pylong*
+   must be an instance of :c:type:`PyLongObject`.
+
+   Raise :exc:`OverflowError` if the value of *pylong* is out of range for a
+   :c:type:`unsigned long`.
 
 
 .. c:function:: size_t PyLong_AsSize_t(PyObject *pylong)
 
-   Return a :c:type:`size_t` representation of the contents of *pylong*.  If
-   *pylong* is greater than the maximum value for a :c:type:`size_t`, an
-   :exc:`OverflowError` is raised.
+   Return a C :c:type:`size_t` representation of of *pylong*.  *pylong* must be
+   an instance of :c:type:`PyLongObject`.
 
-
-.. c:function:: PY_LONG_LONG PyLong_AsLongLong(PyObject *pylong)
-
-   .. index::
-      single: OverflowError (built-in exception)
-
-   Return a C :c:type:`long long` from a Python integer.  If *pylong*
-   cannot be represented as a :c:type:`long long`, an
-   :exc:`OverflowError` is raised and ``-1`` is returned.
+   Raise :exc:`OverflowError` if the value of *pylong* is out of range for a
+   :c:type:`size_t`.
 
 
 .. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLong(PyObject *pylong)
@@ -187,32 +201,43 @@
    .. index::
       single: OverflowError (built-in exception)
 
-   Return a C :c:type:`unsigned long long` from a Python integer. If
-   *pylong* cannot be represented as an :c:type:`unsigned long long`,
-   an :exc:`OverflowError` is raised and ``(unsigned long long)-1`` is
-   returned.
+   Return a C :c:type:`unsigned PY_LONG_LONG` representation of of *pylong*.
+   *pylong* must be an instance of :c:type:`PyLongObject`.
+
+   Raise :exc:`OverflowError` if the value of *pylong* is out of range for an
+   :c:type:`unsigned PY_LONG_LONG`.
 
    .. versionchanged:: 3.1
       A negative *pylong* now raises :exc:`OverflowError`, not :exc:`TypeError`.
 
 
-.. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *io)
+.. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *obj)
 
-   Return a C :c:type:`unsigned long` from a Python integer, without checking for
-   overflow.
+   Return a C :c:type:`unsigned long` representation of *obj*.  If *obj*
+   is not an instance of :c:type:`PyLongObject`, first call its :meth:`__int__`
+   method (if present) to convert it to a :c:type:`PyLongObject`.
 
+   If the value of *obj* is out of range for an :c:type:`unsigned long`,
+   return the reduction of that value modulo :const:`ULONG_MAX + 1`.
 
-.. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLongMask(PyObject *io)
 
-   Return a C :c:type:`unsigned long long` from a Python integer, without
-   checking for overflow.
+.. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLongMask(PyObject *obj)
+
+   Return a C :c:type:`unsigned long long` representation of *obj*.  If *obj*
+   is not an instance of :c:type:`PyLongObject`, first call its :meth:`__int__`
+   method (if present) to convert it to a :c:type:`PyLongObject`.
+
+   If the value of *obj* is out of range for an :c:type:`unsigned long long`,
+   return the reduction of that value modulo :const:`PY_ULLONG_MAX + 1`.
 
 
 .. c:function:: double PyLong_AsDouble(PyObject *pylong)
 
-   Return a C :c:type:`double` representation of the contents of *pylong*.  If
-   *pylong* cannot be approximately represented as a :c:type:`double`, an
-   :exc:`OverflowError` exception is raised and ``-1.0`` will be returned.
+   Return a C :c:type:`double` representation of *pylong*.  *pylong* must be
+   an instance of :c:type:`PyLongObject`.
+
+   Raise :exc:`OverflowError` if the value of *pylong* is out of range for a
+   :c:type:`double`.
 
 
 .. c:function:: void* PyLong_AsVoidPtr(PyObject *pylong)
diff -r 3d0686d90f55 Doc/c-api/memory.rst
--- a/Doc/c-api/memory.rst
+++ b/Doc/c-api/memory.rst
@@ -98,7 +98,7 @@
 
    Allocates *n* bytes and returns a pointer of type :c:type:`void\*` to the
    allocated memory, or *NULL* if the request fails. Requesting zero bytes returns
-   a distinct non-*NULL* pointer if possible, as if :c:func:`PyMem_Malloc(1)` had
+   a distinct non-*NULL* pointer if possible, as if ``PyMem_Malloc(1)`` had
    been called instead. The memory will not have been initialized in any way.
 
 
@@ -106,7 +106,7 @@
 
    Resizes the memory block pointed to by *p* to *n* bytes. The contents will be
    unchanged to the minimum of the old and the new sizes. If *p* is *NULL*, the
-   call is equivalent to :c:func:`PyMem_Malloc(n)`; else if *n* is equal to zero,
+   call is equivalent to ``PyMem_Malloc(n)``; else if *n* is equal to zero,
    the memory block is resized but is not freed, and the returned pointer is
    non-*NULL*.  Unless *p* is *NULL*, it must have been returned by a previous call
    to :c:func:`PyMem_Malloc` or :c:func:`PyMem_Realloc`. If the request fails,
@@ -118,7 +118,7 @@
 
    Frees the memory block pointed to by *p*, which must have been returned by a
    previous call to :c:func:`PyMem_Malloc` or :c:func:`PyMem_Realloc`.  Otherwise, or
-   if :c:func:`PyMem_Free(p)` has been called before, undefined behavior occurs. If
+   if ``PyMem_Free(p)`` has been called before, undefined behavior occurs. If
    *p* is *NULL*, no operation is performed.
 
 The following type-oriented macros are provided for convenience.  Note  that
diff -r 3d0686d90f55 Doc/c-api/unicode.rst
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -5,7 +5,7 @@
 Unicode Objects and Codecs
 --------------------------
 
-.. sectionauthor:: Marc-Andre Lemburg <mal@lemburg.com>
+.. sectionauthor:: Marc-André Lemburg <mal@lemburg.com>
 
 Unicode Objects
 ^^^^^^^^^^^^^^^
diff -r 3d0686d90f55 Doc/conf.py
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -65,9 +65,12 @@
 # Options for HTML output
 # -----------------------
 
-html_theme = 'default'
+html_theme = 'pydoctheme'
+html_theme_path = ['tools/sphinxext']
 html_theme_options = {'collapsiblesidebar': True}
 
+html_short_title = '%s Documentation' % release
+
 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
 # using the given strftime format.
 html_last_updated_fmt = '%b %d, %Y'
@@ -88,7 +91,7 @@
 }
 
 # Output an OpenSearch description file.
-html_use_opensearch = 'http://docs.python.org/dev/py3k'
+html_use_opensearch = 'http://docs.python.org/3.2'
 
 # Additional static files.
 html_static_path = ['tools/sphinxext/static']
diff -r 3d0686d90f55 Doc/distutils/uploading.rst
--- a/Doc/distutils/uploading.rst
+++ b/Doc/distutils/uploading.rst
@@ -73,4 +73,8 @@
     $ python setup.py --long-description | rst2html.py > output.html
 
 :mod:`docutils` will display a warning if there's something wrong with your
-syntax.
+syntax.  Because PyPI applies additional checks (e.g. by passing ``--no-raw``
+to ``rst2html.py`` in the command above), being able to run the command above
+without warnings does not guarantee that PyPI will convert the content
+successfully.
+
diff -r 3d0686d90f55 Doc/extending/embedding.rst
--- a/Doc/extending/embedding.rst
+++ b/Doc/extending/embedding.rst
@@ -155,13 +155,13 @@
 interesting part with respect to embedding Python starts with ::
 
    Py_Initialize();
-   pName = PyString_FromString(argv[1]);
+   pName = PyUnicode_FromString(argv[1]);
    /* Error checking of pName left out */
    pModule = PyImport_Import(pName);
 
 After initializing the interpreter, the script is loaded using
 :c:func:`PyImport_Import`.  This routine needs a Python string as its argument,
-which is constructed using the :c:func:`PyString_FromString` data conversion
+which is constructed using the :c:func:`PyUnicode_FromString` data conversion
 routine. ::
 
    pFunc = PyObject_GetAttrString(pModule, argv[2]);
diff -r 3d0686d90f55 Doc/extending/extending.rst
--- a/Doc/extending/extending.rst
+++ b/Doc/extending/extending.rst
@@ -735,13 +735,18 @@
        {NULL, NULL, 0, NULL}   /* sentinel */
    };
 
-::
+   static struct PyModuleDef keywdargmodule = {
+       PyModuleDef_HEAD_INIT,
+       "keywdarg",
+       NULL,
+       -1,
+       keywdarg_methods
+   };
 
-   void
-   initkeywdarg(void)
+   PyMODINIT_FUNC
+   PyInit_keywdarg(void)
    {
-     /* Create the module and add the functions */
-     Py_InitModule("keywdarg", keywdarg_methods);
+       return PyModule_Create(&keywdargmodule);
    }
 
 
diff -r 3d0686d90f55 Doc/extending/newtypes.rst
--- a/Doc/extending/newtypes.rst
+++ b/Doc/extending/newtypes.rst
@@ -1459,9 +1459,8 @@
    }
 
 The only further addition is that the destructor needs to call the weak
-reference manager to clear any weak references.  This should be done before any
-other parts of the destruction have occurred, but is only required if the weak
-reference list is non-*NULL*::
+reference manager to clear any weak references.  This is only required if the
+weak reference list is non-*NULL*::
 
    static void
    instance_dealloc(PyInstanceObject *inst)
diff -r 3d0686d90f55 Doc/faq/design.rst
--- a/Doc/faq/design.rst
+++ b/Doc/faq/design.rst
@@ -43,56 +43,45 @@
 See the next question.
 
 
-Why are floating point calculations so inaccurate?
+Why are floating-point calculations so inaccurate?
 --------------------------------------------------
 
-People are often very surprised by results like this::
+Users are often surprised by results like this::
 
-   >>> 1.2 - 1.0
-   0.199999999999999996
+    >>> 1.2 - 1.0
+    0.199999999999999996
 
-and think it is a bug in Python. It's not.  This has nothing to do with Python,
-but with how the underlying C platform handles floating point numbers, and
-ultimately with the inaccuracies introduced when writing down numbers as a
-string of a fixed number of digits.
+and think it is a bug in Python.  It's not.  This has little to do with Python,
+and much more to do with how the underlying platform handles floating-point
+numbers.
 
-The internal representation of floating point numbers uses a fixed number of
-binary digits to represent a decimal number.  Some decimal numbers can't be
-represented exactly in binary, resulting in small roundoff errors.
+The :class:`float` type in CPython uses a C ``double`` for storage.  A
+:class:`float` object's value is stored in binary floating-point with a fixed
+precision (typically 53 bits) and Python uses C operations, which in turn rely
+on the hardware implementation in the processor, to perform floating-point
+operations. This means that as far as floating-point operations are concerned,
+Python behaves like many popular languages including C and Java.
 
-In decimal math, there are many numbers that can't be represented with a fixed
-number of decimal digits, e.g.  1/3 = 0.3333333333.......
+Many numbers that can be written easily in decimal notation cannot be expressed
+exactly in binary floating-point.  For example, after::
 
-In base 2, 1/2 = 0.1, 1/4 = 0.01, 1/8 = 0.001, etc.  .2 equals 2/10 equals 1/5,
-resulting in the binary fractional number 0.001100110011001...
+    >>> x = 1.2
 
-Floating point numbers only have 32 or 64 bits of precision, so the digits are
-cut off at some point, and the resulting number is 0.199999999999999996 in
-decimal, not 0.2.
+the value stored for ``x`` is a (very good) approximation to the decimal value
+``1.2``, but is not exactly equal to it.  On a typical machine, the actual
+stored value is::
 
-A floating point number's ``repr()`` function prints as many digits are
-necessary to make ``eval(repr(f)) == f`` true for any float f.  The ``str()``
-function prints fewer digits and this often results in the more sensible number
-that was probably intended::
+    1.0011001100110011001100110011001100110011001100110011 (binary)
 
-   >>> 1.1 - 0.9
-   0.20000000000000007
-   >>> print(1.1 - 0.9)
-   0.2
+which is exactly::
 
-One of the consequences of this is that it is error-prone to compare the result
-of some computation to a float with ``==``. Tiny inaccuracies may mean that
-``==`` fails.  Instead, you have to check that the difference between the two
-numbers is less than a certain threshold::
+    1.1999999999999999555910790149937383830547332763671875 (decimal)
 
-   epsilon = 0.0000000000001  # Tiny allowed error
-   expected_result = 0.4
+The typical precision of 53 bits provides Python floats with 15-16
+decimal digits of accuracy.
 
-   if expected_result-epsilon <= computation() <= expected_result+epsilon:
-       ...
-
-Please see the chapter on :ref:`floating point arithmetic <tut-fp-issues>` in
-the Python tutorial for more information.
+For a fuller explanation, please see the :ref:`floating point arithmetic
+<tut-fp-issues>` chapter in the Python tutorial.
 
 
 Why are Python strings immutable?
@@ -284,8 +273,9 @@
 How fast are exceptions?
 ------------------------
 
-A try/except block is extremely efficient.  Actually catching an exception is
-expensive.  In versions of Python prior to 2.0 it was common to use this idiom::
+A try/except block is extremely efficient if no exceptions are raised.  Actually
+catching an exception is expensive.  In versions of Python prior to 2.0 it was
+common to use this idiom::
 
    try:
        value = mydict[key]
@@ -296,11 +286,10 @@
 This only made sense when you expected the dict to have the key almost all the
 time.  If that wasn't the case, you coded it like this::
 
-   if mydict.has_key(key):
+   if key in mydict:
        value = mydict[key]
    else:
-       mydict[key] = getvalue(key)
-       value = mydict[key]
+       value = mydict[key] = getvalue(key)
 
 For this specific case, you could also use ``value = dict.setdefault(key,
 getvalue(key))``, but only if the ``getvalue()`` call is cheap enough because it
diff -r 3d0686d90f55 Doc/faq/library.rst
--- a/Doc/faq/library.rst
+++ b/Doc/faq/library.rst
@@ -14,7 +14,7 @@
 
 Check :ref:`the Library Reference <library-index>` to see if there's a relevant
 standard library module.  (Eventually you'll learn what's in the standard
-library and will able to skip this step.)
+library and will be able to skip this step.)
 
 For third-party packages, search the `Python Package Index
 <http://pypi.python.org/pypi>`_ or try `Google <http://www.google.com>`_ or
@@ -28,7 +28,7 @@
 If you can't find a source file for a module it may be a built-in or
 dynamically loaded module implemented in C, C++ or other compiled language.
 In this case you may not have the source file or it may be something like
-mathmodule.c, somewhere in a C source directory (not on the Python Path).
+:file:`mathmodule.c`, somewhere in a C source directory (not on the Python Path).
 
 There are (at least) three kinds of modules in Python:
 
@@ -60,18 +60,18 @@
 interpreter is installed on your platform.
 
 If you would like the script to be independent of where the Python interpreter
-lives, you can use the "env" program.  Almost all Unix variants support the
-following, assuming the Python interpreter is in a directory on the user's
-$PATH::
+lives, you can use the :program:`env` program.  Almost all Unix variants support
+the following, assuming the Python interpreter is in a directory on the user's
+:envvar:`PATH`::
 
   #!/usr/bin/env python
 
-*Don't* do this for CGI scripts.  The $PATH variable for CGI scripts is often
-very minimal, so you need to use the actual absolute pathname of the
+*Don't* do this for CGI scripts.  The :envvar:`PATH` variable for CGI scripts is
+often very minimal, so you need to use the actual absolute pathname of the
 interpreter.
 
-Occasionally, a user's environment is so full that the /usr/bin/env program
-fails; or there's no env program at all.  In that case, you can try the
+Occasionally, a user's environment is so full that the :program:`/usr/bin/env`
+program fails; or there's no env program at all.  In that case, you can try the
 following hack (due to Alex Rezinsky)::
 
    #! /bin/sh
@@ -92,11 +92,11 @@
 .. XXX curses *is* built by default, isn't it?
 
 For Unix variants: The standard Python source distribution comes with a curses
-module in the ``Modules/`` subdirectory, though it's not compiled by default
-(note that this is not available in the Windows distribution -- there is no
-curses module for Windows).
+module in the :source:`Modules` subdirectory, though it's not compiled by default.
+(Note that this is not available in the Windows distribution -- there is no
+curses module for Windows.)
 
-The curses module supports basic curses features as well as many additional
+The :mod:`curses` module supports basic curses features as well as many additional
 functions from ncurses and SYSV curses such as colour, alternative character set
 support, pads, and mouse support. This means the module isn't compatible with
 operating systems that only have BSD curses, but there don't seem to be any
@@ -110,7 +110,7 @@
 -------------------------------------------------
 
 The :mod:`atexit` module provides a register function that is similar to C's
-onexit.
+:c:func:`onexit`.
 
 
 Why don't my signal handlers work?
@@ -140,8 +140,8 @@
 The :mod:`unittest` module is a fancier testing framework modelled on Java and
 Smalltalk testing frameworks.
 
-For testing, it helps to write the program so that it may be easily tested by
-using good modular design.  Your program should have almost all functionality
+To make testing easier, you should use good modular design in your program.
+Your program should have almost all functionality
 encapsulated in either functions or class methods -- and this sometimes has the
 surprising and delightful effect of making the program run faster (because local
 variable accesses are faster than global accesses).  Furthermore the program
@@ -157,7 +157,7 @@
 
 Once your program is organized as a tractable collection of functions and class
 behaviours you should write test functions that exercise the behaviours.  A test
-suite can be associated with each module which automates a sequence of tests.
+suite that automates a sequence of tests can be associated with each module.
 This sounds like a lot of work, but since Python is so terse and flexible it's
 surprisingly easy.  You can make coding much more pleasant and fun by writing
 your test functions in parallel with the "production code", since this makes it
@@ -186,7 +186,7 @@
 How do I get a single keypress at a time?
 -----------------------------------------
 
-For Unix variants: There are several solutions.  It's straightforward to do this
+For Unix variants there are several solutions.  It's straightforward to do this
 using curses, but curses is a fairly large module to learn.
 
 .. XXX this doesn't work out of the box, some IO expert needs to check why
@@ -275,7 +275,7 @@
 
    time.sleep(10)
 
-Instead of trying to guess how long a :func:`time.sleep` delay will be enough,
+Instead of trying to guess a good delay value for :func:`time.sleep`,
 it's better to use some kind of semaphore mechanism.  One idea is to use the
 :mod:`queue` module to create a queue object, let each thread append a token to
 the queue when it finishes, and let the main thread read as many tokens from the
@@ -291,9 +291,9 @@
 Or, if you want fine control over the dispatching algorithm, you can write
 your own logic manually.  Use the :mod:`queue` module to create a queue
 containing a list of jobs.  The :class:`~queue.Queue` class maintains a
-list of objects with ``.put(obj)`` to add an item to the queue and ``.get()``
-to return an item.  The class will take care of the locking necessary to
-ensure that each job is handed out exactly once.
+list of objects and has a ``.put(obj)`` method that adds items to the queue and
+a ``.get()`` method to return them.  The class will take care of the locking
+necessary to ensure that each job is handed out exactly once.
 
 Here's a trivial example::
 
@@ -302,7 +302,7 @@
    # The worker thread gets jobs off the queue.  When the queue is empty, it
    # assumes there will be no more work and exits.
    # (Realistically workers will run until terminated.)
-   def worker ():
+   def worker():
        print('Running worker')
        time.sleep(0.1)
        while True:
@@ -333,7 +333,9 @@
    print('Main thread sleeping')
    time.sleep(5)
 
-When run, this will produce the following output::
+When run, this will produce the following output:
+
+.. code-block:: none
 
    Running worker
    Running worker
@@ -349,8 +351,8 @@
    Worker <Thread(worker 1, started 130283832797456)> running with argument 5
    ...
 
-Consult the module's documentation for more details; the ``Queue`` class
-provides a featureful interface.
+Consult the module's documentation for more details; the :class:`~queue.Queue``
+class provides a featureful interface.
 
 
 What kinds of global value mutation are thread-safe?
@@ -467,7 +469,7 @@
 To truncate a file, open it using ``f = open(filename, "rb+")``, and use
 ``f.truncate(offset)``; offset defaults to the current seek position.  There's
 also ``os.ftruncate(fd, offset)`` for files opened with :func:`os.open`, where
-``fd`` is the file descriptor (a small integer).
+*fd* is the file descriptor (a small integer).
 
 The :mod:`shutil` module also contains a number of functions to work on files
 including :func:`~shutil.copyfile`, :func:`~shutil.copytree`, and
@@ -501,15 +503,15 @@
 "short integer" (2 bytes), and 'l' reads one "long integer" (4 bytes) from the
 string.
 
-For data that is more regular (e.g. a homogeneous list of ints or thefloats),
+For data that is more regular (e.g. a homogeneous list of ints or floats),
 you can also use the :mod:`array` module.
 
-   .. note::
-      To read and write binary data, it is mandatory to open the file in
-      binary mode (here, passing ``"rb"`` to :func:`open`).  If you use
-      ``"r"`` instead (the default), the file will be open in text mode
-      and ``f.read()`` will return :class:`str` objects rather than
-      :class:`bytes` objects.
+.. note::
+   To read and write binary data, it is mandatory to open the file in
+   binary mode (here, passing ``"rb"`` to :func:`open`).  If you use
+   ``"r"`` instead (the default), the file will be open in text mode
+   and ``f.read()`` will return :class:`str` objects rather than
+   :class:`bytes` objects.
 
 
 I can't seem to use os.read() on a pipe created with os.popen(); why?
@@ -518,7 +520,7 @@
 :func:`os.read` is a low-level function which takes a file descriptor, a small
 integer representing the opened file.  :func:`os.popen` creates a high-level
 file object, the same type returned by the built-in :func:`open` function.
-Thus, to read n bytes from a pipe p created with :func:`os.popen`, you need to
+Thus, to read *n* bytes from a pipe *p* created with :func:`os.popen`, you need to
 use ``p.read(n)``.
 
 
@@ -538,8 +540,8 @@
    Warning: in general it is unwise to do this because you can easily cause a
    deadlock where your process is blocked waiting for output from the child
    while the child is blocked waiting for input from you.  This can be caused
-   because the parent expects the child to output more text than it does, or it
-   can be caused by data being stuck in stdio buffers due to lack of flushing.
+   by the parent expecting the child to output more text than it does or
+   by data being stuck in stdio buffers due to lack of flushing.
    The Python parent can of course explicitly flush the data it sends to the
    child before it reads any output, but if the child is a naive C program it
    may have been written to never explicitly flush its output, even if it is
@@ -561,7 +563,7 @@
    get the result back.  Unless the amount of data is very large, the easiest
    way to do this is to write it to a temporary file and run the command with
    that temporary file as input.  The standard module :mod:`tempfile` exports a
-   ``mktemp()`` function to generate unique temporary file names. ::
+   :func:`~tempfile.mktemp` function to generate unique temporary file names. ::
 
       import tempfile
       import os
@@ -681,8 +683,8 @@
    msg, hdrs = req.read(), req.info()
 
 Note that in general for percent-encoded POST operations, query strings must be
-quoted using :func:`urllib.parse.urlencode`.  For example to send name="Guy Steele,
-Jr."::
+quoted using :func:`urllib.parse.urlencode`.  For example, to send
+``name=Guy Steele, Jr.``::
 
    >>> import urllib.parse
    >>> urllib.parse.urlencode({'name': 'Guy Steele, Jr.'})
@@ -696,19 +698,8 @@
 
 .. XXX add modern template languages
 
-There are many different modules available:
-
-* HTMLgen is a class library of objects corresponding to all the HTML 3.2 markup
-  tags. It's used when you are writing in Python and wish to synthesize HTML
-  pages for generating a web or for CGI forms, etc.
-
-* DocumentTemplate and Zope Page Templates are two different systems that are
-  part of Zope.
-
-* Quixote's PTL uses Python syntax to assemble strings of text.
-
-Consult the `Web Programming wiki pages
-<http://wiki.python.org/moin/WebProgramming>`_ for more links.
+You can find a collection of useful links on the `Web Programming wiki page
+<http://wiki.python.org/moin/WebProgramming>`_.
 
 
 How do I send mail from a Python script?
@@ -737,7 +728,7 @@
    server.quit()
 
 A Unix-only alternative uses sendmail.  The location of the sendmail program
-varies between systems; sometimes it is ``/usr/lib/sendmail``, sometime
+varies between systems; sometimes it is ``/usr/lib/sendmail``, sometimes
 ``/usr/sbin/sendmail``.  The sendmail manual page will help you out.  Here's
 some sample code::
 
@@ -805,14 +796,6 @@
 :mod:`shelve` library module uses pickle and (g)dbm to create persistent
 mappings containing arbitrary Python objects.
 
-A more awkward way of doing things is to use pickle's little sister, marshal.
-The :mod:`marshal` module provides very fast ways to store noncircular basic
-Python types to files and strings, and back again.  Although marshal does not do
-fancy things like store instances or handle shared references properly, it does
-run extremely fast.  For example loading a half megabyte of data may take less
-than a third of a second.  This often beats doing something more complex and
-general such as using gdbm with pickle/shelve.
-
 
 Mathematics and Numerics
 ========================
diff -r 3d0686d90f55 Doc/faq/programming.rst
--- a/Doc/faq/programming.rst
+++ b/Doc/faq/programming.rst
@@ -794,9 +794,9 @@
 That's a tough one, in general.  First, here are a list of things to
 remember before diving further:
 
-* Performance characteristics vary accross Python implementations.  This FAQ
+* Performance characteristics vary across Python implementations.  This FAQ
   focusses on :term:`CPython`.
-* Behaviour can vary accross operating systems, especially when talking about
+* Behaviour can vary across operating systems, especially when talking about
   I/O or multi-threading.
 * You should always find the hot spots in your program *before* attempting to
   optimize any code (see the :mod:`profile` module).
diff -r 3d0686d90f55 Doc/glossary.rst
--- a/Doc/glossary.rst
+++ b/Doc/glossary.rst
@@ -146,9 +146,9 @@
       For more information about descriptors' methods, see :ref:`descriptors`.
 
    dictionary
-      An associative array, where arbitrary keys are mapped to values.  The keys
-      can be any object with :meth:`__hash__` function and :meth:`__eq__`
-      methods. Called a hash in Perl.
+      An associative array, where arbitrary keys are mapped to values.  The
+      keys can be any object with :meth:`__hash__` and :meth:`__eq__` methods.
+      Called a hash in Perl.
 
    docstring
       A string literal which appears as the first expression in a class,
@@ -194,7 +194,7 @@
       An object exposing a file-oriented API (with methods such as
       :meth:`read()` or :meth:`write()`) to an underlying resource.  Depending
       on the way it was created, a file object can mediate access to a real
-      on-disk file or to another other type of storage or communication device
+      on-disk file or to another type of storage or communication device
       (for example standard input/output, in-memory buffers, sockets, pipes,
       etc.).  File objects are also called :dfn:`file-like objects` or
       :dfn:`streams`.
@@ -385,7 +385,7 @@
       :meth:`str.lower` method can serve as a key function for case insensitive
       sorts.  Alternatively, an ad-hoc key function can be built from a
       :keyword:`lambda` expression such as ``lambda r: (r[0], r[2])``.  Also,
-      the :mod:`operator` module provides three key function constuctors:
+      the :mod:`operator` module provides three key function constructors:
       :func:`~operator.attrgetter`, :func:`~operator.itemgetter`, and
       :func:`~operator.methodcaller`.  See the :ref:`Sorting HOW TO
       <sortinghowto>` for examples of how to create and use key functions.
@@ -600,6 +600,13 @@
       object has a type.  An object's type is accessible as its
       :attr:`__class__` attribute or can be retrieved with ``type(obj)``.
 
+   universal newlines
+      A manner of interpreting text streams in which all of the following are
+      recognized as ending a line: the Unix end-of-line convention ``'\n'``,
+      the Windows convention ``'\r\n'``, and the old Macintosh convention
+      ``'\r'``.  See :pep:`278` and :pep:`3116`, as well as
+      :func:`str.splitlines` for an additional use.
+
    view
       The objects returned from :meth:`dict.keys`, :meth:`dict.values`, and
       :meth:`dict.items` are called dictionary views.  They are lazy sequences
diff -r 3d0686d90f55 Doc/howto/advocacy.rst
--- a/Doc/howto/advocacy.rst
+++ b/Doc/howto/advocacy.rst
@@ -264,8 +264,7 @@
 
 **What are the restrictions on Python's use?**
 
-They're practically nonexistent.  Consult the :file:`Misc/COPYRIGHT` file in the
-source distribution, or the section :ref:`history-and-license` for the full
+They're practically nonexistent.  Consult :ref:`history-and-license` for the full
 language, but it boils down to three conditions:
 
 * You have to leave the copyright notice on the software; if you don't include
diff -r 3d0686d90f55 Doc/howto/argparse.rst
--- /dev/null
+++ b/Doc/howto/argparse.rst
@@ -0,0 +1,764 @@
+*****************
+Argparse Tutorial
+*****************
+
+:author: Tshepang Lekhonkhobe <tshepang@gmail.com>
+
+.. _argparse-tutorial:
+
+This tutorial is intended to be a gentle introduction to :mod:`argparse`, the
+recommended command-line parsing module in the Python standard library.
+
+.. note::
+
+   There's two other modules that fulfill the same task, namely
+   :mod:`getopt` (an equivalent for :c:func:`getopt` from the C
+   language) and the deprecated :mod:`optparse`.
+   Note also that :mod:`argparse` is based on :mod:`optparse`,
+   and therefore very similar in terms of usage.
+
+
+Concepts
+========
+
+Let's show the sort of functionality that we are going to explore in this
+introductory tutorial by making use of the :command:`ls` command:
+
+.. code-block:: sh
+
+   $ ls
+   cpython  devguide  prog.py  pypy  rm-unused-function.patch
+   $ ls pypy
+   ctypes_configure  demo  dotviewer  include  lib_pypy  lib-python ...
+   $ ls -l
+   total 20
+   drwxr-xr-x 19 wena wena 4096 Feb 18 18:51 cpython
+   drwxr-xr-x  4 wena wena 4096 Feb  8 12:04 devguide
+   -rwxr-xr-x  1 wena wena  535 Feb 19 00:05 prog.py
+   drwxr-xr-x 14 wena wena 4096 Feb  7 00:59 pypy
+   -rw-r--r--  1 wena wena  741 Feb 18 01:01 rm-unused-function.patch
+   $ ls --help
+   Usage: ls [OPTION]... [FILE]...
+   List information about the FILEs (the current directory by default).
+   Sort entries alphabetically if none of -cftuvSUX nor --sort is specified.
+   ...
+
+A few concepts we can learn from the four commands:
+
+* The :command:`ls` command is useful when run without any options at all. It defaults
+  to displaying the contents of the current directory.
+
+* If we want beyond what it provides by default, we tell it a bit more. In
+  this case, we want it to display a different directory, ``pypy``.
+  What we did is specify what is known as a positional argument. It's named so
+  because the program should know what to do with the value, solely based on
+  where it appears on the command line. This concept is more relevant
+  to a command like :command:`cp`, whose most basic usage is ``cp SRC DEST``.
+  The first position is *what you want copied,* and the second
+  position is *where you want it copied to*.
+
+* Now, say we want to change behaviour of the program. In our example,
+  we display more info for each file instead of just showing the file names.
+  The ``-l`` in that case is known as an optional argument.
+
+* That's a snippet of the help text. It's very useful in that you can
+  come across a program you have never used before, and can figure out
+  how it works simply by reading it's help text.
+
+
+The basics
+==========
+
+Let us start with a very simple example which does (almost) nothing::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.parse_args()
+
+Following is a result of running the code:
+
+.. code-block:: sh
+
+   $ python3 prog.py
+   $ python3 prog.py --help
+   usage: prog.py [-h]
+
+   optional arguments:
+     -h, --help  show this help message and exit
+   $ python3 prog.py --verbose
+   usage: prog.py [-h]
+   prog.py: error: unrecognized arguments: --verbose
+   $ python3 prog.py foo
+   usage: prog.py [-h]
+   prog.py: error: unrecognized arguments: foo
+
+Here is what is happening:
+
+* Running the script without any options results in nothing displayed to
+  stdout. Not so useful.
+
+* The second one starts to display the usefulness of the :mod:`argparse`
+  module. We have done almost nothing, but already we get a nice help message.
+
+* The ``--help`` option, which can also be shortened to ``-h``, is the only
+  option we get for free (i.e. no need to specify it). Specifying anything
+  else results in an error. But even then, we do get a useful usage message,
+  also for free.
+
+
+Introducing Positional arguments
+================================
+
+An example::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("echo")
+   args = parser.parse_args()
+   print(args.echo)
+
+And running the code:
+
+.. code-block:: sh
+
+   $ python3 prog.py
+   usage: prog.py [-h] echo
+   prog.py: error: the following arguments are required: echo
+   $ python3 prog.py --help
+   usage: prog.py [-h] echo
+
+   positional arguments:
+     echo
+
+   optional arguments:
+     -h, --help  show this help message and exit
+   $ python3 prog.py foo
+   foo
+
+Here is what's happening:
+
+* We've added the :meth:`add_argument` method, which is what we use to specify
+  which command-line options the program is willing to accept. In this case,
+  I've named it ``echo`` so that it's in line with its function.
+
+* Calling our program now requires us to specify an option.
+
+* The :meth:`parse_args` method actually returns some data from the
+  options specified, in this case, ``echo``.
+
+* The variable is some form of 'magic' that :mod:`argparse` performs for free
+  (i.e. no need to specify which variable that value is stored in).
+  You will also notice that its name matches the string argument given
+  to the method, ``echo``.
+
+Note however that, although the help display looks nice and all, it currently
+is not as helpful as it can be. For example we see that we got ``echo`` as a
+positional argument, but we don't know what it does, other than by guessing or
+by reading the source code. So, let's make it a bit more useful::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("echo", help="echo the string you use here")
+   args = parser.parse_args()
+   print(args.echo)
+
+And we get:
+
+.. code-block:: sh
+
+   $ python3 prog.py -h
+   usage: prog.py [-h] echo
+
+   positional arguments:
+     echo        echo the string you use here
+
+   optional arguments:
+     -h, --help  show this help message and exit
+
+Now, how about doing something even more useful::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", help="display a square of a given number")
+   args = parser.parse_args()
+   print(args.square**2)
+
+Following is a result of running the code:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4
+   Traceback (most recent call last):
+     File "prog.py", line 5, in <module>
+       print(args.square**2)
+   TypeError: unsupported operand type(s) for ** or pow(): 'str' and 'int'
+
+That didn't go so well. That's because :mod:`argparse` treats the options we
+give it as strings, unless we tell it otherwise. So, let's tell
+:mod:`argparse` to treat that input as an integer::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", help="display a square of a given number",
+                       type=int)
+   args = parser.parse_args()
+   print(args.square**2)
+
+Following is a result of running the code:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4
+   16
+   $ python3 prog.py four
+   usage: prog.py [-h] square
+   prog.py: error: argument square: invalid int value: 'four'
+
+That went well. The program now even helpfully quits on bad illegal input
+before proceeding.
+
+
+Introducing Optional arguments
+==============================
+
+So far we, have been playing with positional arguments. Let us
+have a look on how to add optional ones::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("--verbosity", help="increase output verbosity")
+   args = parser.parse_args()
+   if args.verbosity:
+       print("verbosity turned on")
+
+And the output:
+
+.. code-block:: sh
+
+   $ python3 prog.py --verbosity 1
+   verbosity turned on
+   $ python3 prog.py
+   $ python3 prog.py --help
+   usage: prog.py [-h] [--verbosity VERBOSITY]
+
+   optional arguments:
+     -h, --help            show this help message and exit
+     --verbosity VERBOSITY
+                           increase output verbosity
+   $ python3 prog.py --verbosity
+   usage: prog.py [-h] [--verbosity VERBOSITY]
+   prog.py: error: argument --verbosity: expected one argument
+
+Here is what is happening:
+
+* The program is written so as to display something when ``--verbosity`` is
+  specified and display nothing when not.
+
+* To show that the option is actually optional, there is no error when running
+  the program without it. Note that by default, if an optional argument isn't
+  used, the relevant variable, in this case :attr:`args.verbosity`, is
+  given ``None`` as a value, which is the reason it fails the truth
+  test of the :keyword:`if` statement.
+
+* The help message is a bit different.
+
+* When using the ``--verbosity`` option, one must also specify some value,
+  any value.
+
+The above example accepts arbitrary integer values for ``--verbosity``, but for
+our simple program, only two values are actually useful, ``True`` or ``False``.
+Let's modify the code accordingly::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("--verbose", help="increase output verbosity",
+                       action="store_true")
+   args = parser.parse_args()
+   if args.verbose:
+       print("verbosity turned on")
+
+And the output:
+
+.. code-block:: sh
+
+   $ python3 prog.py --verbose
+   verbosity turned on
+   $ python3 prog.py --verbose 1
+   usage: prog.py [-h] [--verbose]
+   prog.py: error: unrecognized arguments: 1
+   $ python3 prog.py --help
+   usage: prog.py [-h] [--verbose]
+
+   optional arguments:
+     -h, --help  show this help message and exit
+     --verbose   increase output verbosity
+
+Here is what is happening:
+
+* The option is now more of a flag than something that requires a value.
+  We even changed the name of the option to match that idea.
+  Note that we now specify a new keyword, ``action``, and give it the value
+  ``"store_true"``. This means that, if the option is specified,
+  assign the value ``True`` to :data:`args.verbose`.
+  Not specifying it implies ``False``.
+
+* It complains when you specify a value, in true spirit of what flags
+  actually are.
+
+* Notice the different help text.
+
+
+Short options
+-------------
+
+If you are familiar with command line usage,
+you will notice that I haven't yet touched on the topic of short
+versions of the options. It's quite simple::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("-v", "--verbose", help="increase output verbosity",
+                       action="store_true")
+   args = parser.parse_args()
+   if args.verbose:
+       print("verbosity turned on")
+
+And here goes:
+
+.. code-block:: sh
+
+   $ python3 prog.py -v
+   verbosity turned on
+   $ python3 prog.py --help
+   usage: prog.py [-h] [-v]
+
+   optional arguments:
+     -h, --help     show this help message and exit
+     -v, --verbose  increase output verbosity
+
+Note that the new ability is also reflected in the help text.
+
+
+Combining Positional and Optional arguments
+===========================================
+
+Our program keeps growing in complexity::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", type=int,
+                       help="display a square of a given number")
+   parser.add_argument("-v", "--verbose", action="store_true",
+                       help="increase output verbosity")
+   args = parser.parse_args()
+   answer = args.square**2
+   if args.verbose:
+       print("the square of {} equals {}".format(args.square, answer))
+   else:
+       print(answer)
+
+And now the output:
+
+.. code-block:: sh
+
+   $ python3 prog.py
+   usage: prog.py [-h] [-v] square
+   prog.py: error: the following arguments are required: square
+   $ python3 prog.py 4
+   16
+   $ python3 prog.py 4 --verbose
+   the square of 4 equals 16
+   $ python3 prog.py --verbose 4
+   the square of 4 equals 16
+
+* We've brought back a positional argument, hence the complaint.
+
+* Note that the order does not matter.
+
+How about we give this program of ours back the ability to have
+multiple verbosity values, and actually get to use them::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", type=int,
+                       help="display a square of a given number")
+   parser.add_argument("-v", "--verbosity", type=int,
+                       help="increase output verbosity")
+   args = parser.parse_args()
+   answer = args.square**2
+   if args.verbosity == 2:
+       print("the square of {} equals {}".format(args.square, answer))
+   elif args.verbosity == 1:
+       print("{}^2 == {}".format(args.square, answer))
+   else:
+       print(answer)
+
+And the output:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4
+   16
+   $ python3 prog.py 4 -v
+   usage: prog.py [-h] [-v VERBOSITY] square
+   prog.py: error: argument -v/--verbosity: expected one argument
+   $ python3 prog.py 4 -v 1
+   4^2 == 16
+   $ python3 prog.py 4 -v 2
+   the square of 4 equals 16
+   $ python3 prog.py 4 -v 3
+   16
+
+These all look good except the last one, which exposes a bug in our program.
+Let's fix it by restricting the values the ``--verbosity`` option can accept::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", type=int,
+                       help="display a square of a given number")
+   parser.add_argument("-v", "--verbosity", type=int, choices=[0, 1, 2],
+                       help="increase output verbosity")
+   args = parser.parse_args()
+   answer = args.square**2
+   if args.verbosity == 2:
+       print("the square of {} equals {}".format(args.square, answer))
+   elif args.verbosity == 1:
+       print("{}^2 == {}".format(args.square, answer))
+   else:
+       print(answer)
+
+And the output:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4 -v 3
+   usage: prog.py [-h] [-v {0,1,2}] square
+   prog.py: error: argument -v/--verbosity: invalid choice: 3 (choose from 0, 1, 2)
+   $ python3 prog.py 4 -h
+   usage: prog.py [-h] [-v {0,1,2}] square
+
+   positional arguments:
+     square                display a square of a given number
+
+   optional arguments:
+     -h, --help            show this help message and exit
+     -v {0,1,2}, --verbosity {0,1,2}
+                           increase output verbosity
+
+Note that the change also reflects both in the error message as well as the
+help string.
+
+Now, let's use a different approach of playing with verbosity, which is pretty
+common. It also matches the way the CPython executable handles its own
+verbosity argument (check the output of ``python --help``)::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", type=int,
+                       help="display the square of a given number")
+   parser.add_argument("-v", "--verbosity", action="count",
+                       help="increase output verbosity")
+   args = parser.parse_args()
+   answer = args.square**2
+   if args.verbosity == 2:
+       print("the square of {} equals {}".format(args.square, answer))
+   elif args.verbosity == 1:
+       print("{}^2 == {}".format(args.square, answer))
+   else:
+       print(answer)
+
+We have introduced another action, "count",
+to count the number of occurences of a specific optional arguments:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4
+   16
+   $ python3 prog.py 4 -v
+   4^2 == 16
+   $ python3 prog.py 4 -vv
+   the square of 4 equals 16
+   $ python3 prog.py 4 --verbosity --verbosity
+   the square of 4 equals 16
+   $ python3 prog.py 4 -v 1
+   usage: prog.py [-h] [-v] square
+   prog.py: error: unrecognized arguments: 1
+   $ python3 prog.py 4 -h
+   usage: prog.py [-h] [-v] square
+
+   positional arguments:
+     square           display a square of a given number
+
+   optional arguments:
+     -h, --help       show this help message and exit
+     -v, --verbosity  increase output verbosity
+   $ python3 prog.py 4 -vvv
+   16
+
+* Yes, it's now more of a flag (similar to ``action="store_true"``) in the
+  previous version of our script. That should explain the complaint.
+
+* It also behaves similar to "store_true" action.
+
+* Now here's a demonstration of what the "count" action gives. You've probably
+  seen this sort of usage before.
+
+* And, just like the "store_true" action, if you don't specify the ``-v`` flag,
+  that flag is considered to have ``None`` value.
+
+* As should be expected, specifying the long form of the flag, we should get
+  the same output.
+
+* Sadly, our help output isn't very informative on the new ability our script
+  has acquired, but that can always be fixed by improving the documentation for
+  out script (e.g. via the ``help`` keyword argument).
+
+* That last output exposes a bug in our program.
+
+
+Let's fix::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", type=int,
+                       help="display a square of a given number")
+   parser.add_argument("-v", "--verbosity", action="count",
+                       help="increase output verbosity")
+   args = parser.parse_args()
+   answer = args.square**2
+
+   # bugfix: replace == with >=
+   if args.verbosity >= 2:
+       print("the square of {} equals {}".format(args.square, answer))
+   elif args.verbosity >= 1:
+       print("{}^2 == {}".format(args.square, answer))
+   else:
+       print(answer)
+
+And this is what it gives:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4 -vvv
+   the square of 4 equals 16
+   $ python3 prog.py 4 -vvvv
+   the square of 4 equals 16
+   $ python3 prog.py 4
+   Traceback (most recent call last):
+     File "prog.py", line 11, in <module>
+       if args.verbosity >= 2:
+   TypeError: unorderable types: NoneType() >= int()
+
+* First output went well, and fixes the bug we had before.
+  That is, we want any value >= 2 to be as verbose as possible.
+
+* Third output not so good.
+
+Let's fix that bug::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("square", type=int,
+                       help="display a square of a given number")
+   parser.add_argument("-v", "--verbosity", action="count", default=0,
+                       help="increase output verbosity")
+   args = parser.parse_args()
+   answer = args.square**2
+   if args.verbosity >= 2:
+       print("the square of {} equals {}".format(args.square, answer))
+   elif args.verbosity >= 1:
+       print("{}^2 == {}".format(args.square, answer))
+   else:
+       print(answer)
+
+We've just introduced yet another keyword, ``default``.
+We've set it to ``0`` in order to make it comparable to the other int values.
+Remember that by default,
+if an optional argument isn't specified,
+it gets the ``None`` value, and that cannot be compared to an int value
+(hence the :exc:`TypeError` exception).
+
+And:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4
+   16
+
+You can go quite far just with what we've learned so far,
+and we have only scratched the surface.
+The :mod:`argparse` module is very powerful,
+and we'll explore a bit more of it before we end this tutorial.
+
+
+Getting a little more advanced
+==============================
+
+What if we wanted to expand our tiny program to perform other powers,
+not just squares::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("x", type=int, help="the base")
+   parser.add_argument("y", type=int, help="the exponent")
+   parser.add_argument("-v", "--verbosity", action="count", default=0)
+   args = parser.parse_args()
+   answer = args.x**args.y
+   if args.verbosity >= 2:
+       print("{} to the power {} equals {}".format(args.x, args.y, answer))
+   elif args.verbosity >= 1:
+       print("{}^{} == {}".format(args.x, args.y, answer))
+   else:
+       print(answer)
+
+Output:
+
+.. code-block:: sh
+
+   $ python3 prog.py
+   usage: prog.py [-h] [-v] x y
+   prog.py: error: the following arguments are required: x, y
+   $ python3 prog.py -h
+   usage: prog.py [-h] [-v] x y
+
+   positional arguments:
+     x                the base
+     y                the exponent
+
+   optional arguments:
+     -h, --help       show this help message and exit
+     -v, --verbosity
+   $ python3 prog.py 4 2 -v
+   4^2 == 16
+
+
+Notice that so far we've been using verbosity level to *change* the text
+that gets displayed. The following example instead uses verbosity level
+to display *more* text instead::
+
+   import argparse
+   parser = argparse.ArgumentParser()
+   parser.add_argument("x", type=int, help="the base")
+   parser.add_argument("y", type=int, help="the exponent")
+   parser.add_argument("-v", "--verbosity", action="count", default=0)
+   args = parser.parse_args()
+   answer = args.x**args.y
+   if args.verbosity >= 2:
+       print("Running '{}'".format(__file__))
+   if args.verbosity >= 1:
+       print("{}^{} == ".format(args.x, args.y), end="")
+   print(answer)
+
+Output:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4 2
+   16
+   $ python3 prog.py 4 2 -v
+   4^2 == 16
+   $ python3 prog.py 4 2 -vv
+   Running 'prog.py'
+   4^2 == 16
+
+
+Conflicting options
+-------------------
+
+So far, we have been working with two methods of an
+:class:`argparse.ArgumentParser` instance. Let's introduce a third one,
+:meth:`add_mutually_exclusive_group`. It allows for us to specify options that
+conflict with each other. Let's also change the rest of the program make the
+new functionality makes more sense:
+we'll introduce the ``--quiet`` option,
+which will be the opposite of the ``--verbose`` one::
+
+   import argparse
+
+   parser = argparse.ArgumentParser()
+   group = parser.add_mutually_exclusive_group()
+   group.add_argument("-v", "--verbose", action="store_true")
+   group.add_argument("-q", "--quiet", action="store_true")
+   parser.add_argument("x", type=int, help="the base")
+   parser.add_argument("y", type=int, help="the exponent")
+   args = parser.parse_args()
+   answer = args.x**args.y
+
+   if args.quiet:
+       print(answer)
+   elif args.verbose:
+       print("{} to the power {} equals {}".format(args.x, args.y, answer))
+   else:
+       print("{}^{} == {}".format(args.x, args.y, answer))
+
+Our program is now simpler, and we've lost some functionality for the sake of
+demonstration. Anyways, here's the output:
+
+.. code-block:: sh
+
+   $ python3 prog.py 4 2
+   4^2 == 16
+   $ python3 prog.py 4 2 -q
+   16
+   $ python3 prog.py 4 2 -v
+   4 to the power 2 equals 16
+   $ python3 prog.py 4 2 -vq
+   usage: prog.py [-h] [-v | -q] x y
+   prog.py: error: argument -q/--quiet: not allowed with argument -v/--verbose
+   $ python3 prog.py 4 2 -v --quiet
+   usage: prog.py [-h] [-v | -q] x y
+   prog.py: error: argument -q/--quiet: not allowed with argument -v/--verbose
+
+That should be easy to follow. I've added that last output so you can see the
+sort of flexibility you get, i.e. mixing long form options with short form
+ones.
+
+Before we conclude, you probably want to tell your users the main purpose of
+your program, just in case they don't know::
+
+   import argparse
+
+   parser = argparse.ArgumentParser(description="calculate X to the power of Y")
+   group = parser.add_mutually_exclusive_group()
+   group.add_argument("-v", "--verbose", action="store_true")
+   group.add_argument("-q", "--quiet", action="store_true")
+   parser.add_argument("x", type=int, help="the base")
+   parser.add_argument("y", type=int, help="the exponent")
+   args = parser.parse_args()
+   answer = args.x**args.y
+
+   if args.quiet:
+       print(answer)
+   elif args.verbose:
+       print("{} to the power {} equals {}".format(args.x, args.y, answer))
+   else:
+       print("{}^{} == {}".format(args.x, args.y, answer))
+
+Note that slight difference in the usage text. Note the ``[-v | -q]``,
+which tells us that we can either use ``-v`` or ``-q``,
+but not both at the same time:
+
+.. code-block:: sh
+
+   $ python3 prog.py --help
+   usage: prog.py [-h] [-v | -q] x y
+
+   calculate X to the power of Y
+
+   positional arguments:
+     x              the base
+     y              the exponent
+
+   optional arguments:
+     -h, --help     show this help message and exit
+     -v, --verbose
+     -q, --quiet
+
+
+Conclusion
+==========
+
+The :mod:`argparse` module offers a lot more than shown here.
+Its docs are quite detailed and thorough, and full of examples.
+Having gone through this tutorial, you should easily digest them
+without feeling overwhelmed.
diff -r 3d0686d90f55 Doc/howto/cporting.rst
--- a/Doc/howto/cporting.rst
+++ b/Doc/howto/cporting.rst
@@ -2,27 +2,28 @@
 
 .. _cporting-howto:
 
-********************************
-Porting Extension Modules to 3.0
-********************************
+*************************************
+Porting Extension Modules to Python 3
+*************************************
 
 :author: Benjamin Peterson
 
 
 .. topic:: Abstract
 
-   Although changing the C-API was not one of Python 3.0's objectives, the many
-   Python level changes made leaving 2.x's API intact impossible.  In fact, some
-   changes such as :func:`int` and :func:`long` unification are more obvious on
-   the C level.  This document endeavors to document incompatibilities and how
-   they can be worked around.
+   Although changing the C-API was not one of Python 3's objectives,
+   the many Python-level changes made leaving Python 2's API intact
+   impossible.  In fact, some changes such as :func:`int` and
+   :func:`long` unification are more obvious on the C level.  This
+   document endeavors to document incompatibilities and how they can
+   be worked around.
 
 
 Conditional compilation
 =======================
 
-The easiest way to compile only some code for 3.0 is to check if
-:c:macro:`PY_MAJOR_VERSION` is greater than or equal to 3. ::
+The easiest way to compile only some code for Python 3 is to check
+if :c:macro:`PY_MAJOR_VERSION` is greater than or equal to 3. ::
 
    #if PY_MAJOR_VERSION >= 3
    #define IS_PY3K
@@ -35,7 +36,7 @@
 Changes to Object APIs
 ======================
 
-Python 3.0 merged together some types with similar functions while cleanly
+Python 3 merged together some types with similar functions while cleanly
 separating others.
 
 
@@ -43,14 +44,14 @@
 -----------------------
 
 
-Python 3.0's :func:`str` (``PyString_*`` functions in C) type is equivalent to
-2.x's :func:`unicode` (``PyUnicode_*``).  The old 8-bit string type has become
-:func:`bytes`.  Python 2.6 and later provide a compatibility header,
+Python 3's :func:`str` (``PyString_*`` functions in C) type is equivalent to
+Python 2's :func:`unicode` (``PyUnicode_*``).  The old 8-bit string type has
+become :func:`bytes`.  Python 2.6 and later provide a compatibility header,
 :file:`bytesobject.h`, mapping ``PyBytes`` names to ``PyString`` ones.  For best
-compatibility with 3.0, :c:type:`PyUnicode` should be used for textual data and
+compatibility with Python 3, :c:type:`PyUnicode` should be used for textual data and
 :c:type:`PyBytes` for binary data.  It's also important to remember that
-:c:type:`PyBytes` and :c:type:`PyUnicode` in 3.0 are not interchangeable like
-:c:type:`PyString` and :c:type:`PyUnicode` are in 2.x.  The following example
+:c:type:`PyBytes` and :c:type:`PyUnicode` in Python 3 are not interchangeable like
+:c:type:`PyString` and :c:type:`PyUnicode` are in Python 2.  The following example
 shows best practices with regards to :c:type:`PyUnicode`, :c:type:`PyString`,
 and :c:type:`PyBytes`. ::
 
@@ -94,10 +95,12 @@
 long/int Unification
 --------------------
 
-In Python 3.0, there is only one integer type.  It is called :func:`int` on the
-Python level, but actually corresponds to 2.x's :func:`long` type.  In the
-C-API, ``PyInt_*`` functions are replaced by their ``PyLong_*`` neighbors.  The
-best course of action here is using the ``PyInt_*`` functions aliased to
+Python 3 has only one integer type, :func:`int`.  But it actually
+corresponds to Python 2's :func:`long` type--the :func:`int` type
+used in Python 2 was removed.  In the C-API, ``PyInt_*`` functions
+are replaced by their ``PyLong_*`` equivalents.
+
+The best course of action here is using the ``PyInt_*`` functions aliased to
 ``PyLong_*`` found in :file:`intobject.h`.  The abstract ``PyNumber_*`` APIs
 can also be used in some cases. ::
 
@@ -120,10 +123,11 @@
 Module initialization and state
 ===============================
 
-Python 3.0 has a revamped extension module initialization system.  (See
-:pep:`3121`.)  Instead of storing module state in globals, they should be stored
-in an interpreter specific structure.  Creating modules that act correctly in
-both 2.x and 3.0 is tricky.  The following simple example demonstrates how. ::
+Python 3 has a revamped extension module initialization system.  (See
+:pep:`3121`.)  Instead of storing module state in globals, they should
+be stored in an interpreter specific structure.  Creating modules that
+act correctly in both Python 2 and Python 3 is tricky.  The following
+simple example demonstrates how. ::
 
    #include "Python.h"
 
@@ -209,10 +213,65 @@
    }
 
 
+CObject replaced with Capsule
+=============================
+
+The :c:type:`Capsule` object was introduced in Python 3.1 and 2.7 to replace
+:c:type:`CObject`.  CObjects were useful,
+but the :c:type:`CObject` API was problematic: it didn't permit distinguishing
+between valid CObjects, which allowed mismatched CObjects to crash the
+interpreter, and some of its APIs relied on undefined behavior in C.
+(For further reading on the rationale behind Capsules, please see :issue:`5630`.)
+
+If you're currently using CObjects, and you want to migrate to 3.1 or newer,
+you'll need to switch to Capsules.
+:c:type:`CObject` was deprecated in 3.1 and 2.7 and completely removed in
+Python 3.2.  If you only support 2.7, or 3.1 and above, you
+can simply switch to :c:type:`Capsule`.  If you need to support Python 3.0,
+or versions of Python earlier than 2.7,
+you'll have to support both CObjects and Capsules.
+(Note that Python 3.0 is no longer supported, and it is not recommended
+for production use.)
+
+The following example header file :file:`capsulethunk.h` may
+solve the problem for you.  Simply write your code against the
+:c:type:`Capsule` API and include this header file after
+:file:`Python.h`.  Your code will automatically use Capsules
+in versions of Python with Capsules, and switch to CObjects
+when Capsules are unavailable.
+
+:file:`capsulethunk.h` simulates Capsules using CObjects.  However,
+:c:type:`CObject` provides no place to store the capsule's "name".  As a
+result the simulated :c:type:`Capsule` objects created by :file:`capsulethunk.h`
+behave slightly differently from real Capsules.  Specifically:
+
+  * The name parameter passed in to :c:func:`PyCapsule_New` is ignored.
+
+  * The name parameter passed in to :c:func:`PyCapsule_IsValid` and
+    :c:func:`PyCapsule_GetPointer` is ignored, and no error checking
+    of the name is performed.
+
+  * :c:func:`PyCapsule_GetName` always returns NULL.
+
+  * :c:func:`PyCapsule_SetName` always throws an exception and
+    returns failure.  (Since there's no way to store a name
+    in a CObject, noisy failure of :c:func:`PyCapsule_SetName`
+    was deemed preferable to silent failure here.  If this is
+    inconvenient, feel free to modify your local
+    copy as you see fit.)
+
+You can find :file:`capsulethunk.h` in the Python source distribution
+as :source:`Doc/includes/capsulethunk.h`.  We also include it here for
+your convenience:
+
+.. literalinclude:: ../includes/capsulethunk.h
+
+
+
 Other options
 =============
 
 If you are writing a new extension module, you might consider `Cython
 <http://www.cython.org>`_.  It translates a Python-like language to C.  The
-extension modules it creates are compatible with Python 3.x and 2.x.
+extension modules it creates are compatible with Python 3 and Python 2.
 
diff -r 3d0686d90f55 Doc/howto/curses.rst
--- a/Doc/howto/curses.rst
+++ b/Doc/howto/curses.rst
@@ -118,7 +118,7 @@
 A common problem when debugging a curses application is to get your terminal
 messed up when the application dies without restoring the terminal to its
 previous state.  In Python this commonly happens when your code is buggy and
-raises an uncaught exception.  Keys are no longer be echoed to the screen when
+raises an uncaught exception.  Keys are no longer echoed to the screen when
 you type them, for example, which makes using the shell difficult.
 
 In Python you can avoid these complications and make debugging much easier by
@@ -271,7 +271,7 @@
 highlight certain words.  curses supports this by allowing you to specify an
 attribute for each cell on the screen.
 
-An attribute is a integer, each bit representing a different attribute.  You can
+An attribute is an integer, each bit representing a different attribute.  You can
 try to display text with multiple attribute bits set, but curses doesn't
 guarantee that all the possible combinations are available, or that they're all
 visually distinct.  That depends on the ability of the terminal being used, so
@@ -300,7 +300,7 @@
                  curses.A_REVERSE)
    stdscr.refresh()
 
-The curses library also supports color on those terminals that provide it, The
+The curses library also supports color on those terminals that provide it. The
 most common such terminal is probably the Linux console, followed by color
 xterms.
 
diff -r 3d0686d90f55 Doc/howto/functional.rst
--- a/Doc/howto/functional.rst
+++ b/Doc/howto/functional.rst
@@ -246,9 +246,9 @@
 iterator argument and will return the largest or smallest element.  The ``"in"``
 and ``"not in"`` operators also support iterators: ``X in iterator`` is true if
 X is found in the stream returned by the iterator.  You'll run into obvious
-problems if the iterator is infinite; ``max()``, ``min()``, and ``"not in"``
+problems if the iterator is infinite; ``max()``, ``min()``
 will never return, and if the element X never appears in the stream, the
-``"in"`` operator won't return either.
+``"in"`` and ``"not in"`` operators won't return either.
 
 Note that you can only go forward in an iterator; there's no way to get the
 previous element, reset the iterator, or make a copy of it.  Iterator objects
diff -r 3d0686d90f55 Doc/howto/index.rst
--- a/Doc/howto/index.rst
+++ b/Doc/howto/index.rst
@@ -27,4 +27,5 @@
    unicode.rst
    urllib2.rst
    webservers.rst
+   argparse.rst
 
diff -r 3d0686d90f55 Doc/howto/logging-cookbook.rst
--- a/Doc/howto/logging-cookbook.rst
+++ b/Doc/howto/logging-cookbook.rst
@@ -268,12 +268,12 @@
 .. currentmodule:: logging.handlers
 
 Sometimes you have to get your logging handlers to do their work without
-blocking the thread you’re logging from. This is common in Web applications,
+blocking the thread you're logging from. This is common in Web applications,
 though of course it also occurs in other scenarios.
 
 A common culprit which demonstrates sluggish behaviour is the
 :class:`SMTPHandler`: sending emails can take a long time, for a
-number of reasons outside the developer’s control (for example, a poorly
+number of reasons outside the developer's control (for example, a poorly
 performing mail or network infrastructure). But almost any network-based
 handler can block: Even a :class:`SocketHandler` operation may do a
 DNS query under the hood which is too slow (and this query can be deep in the
@@ -292,7 +292,7 @@
 
 The second part of the solution is :class:`QueueListener`, which has been
 designed as the counterpart to :class:`QueueHandler`.  A
-:class:`QueueListener` is very simple: it’s passed a queue and some handlers,
+:class:`QueueListener` is very simple: it's passed a queue and some handlers,
 and it fires up an internal thread which listens to its queue for LogRecords
 sent from ``QueueHandlers`` (or any other source of ``LogRecords``, for that
 matter). The ``LogRecords`` are removed from the queue and passed to the
@@ -745,7 +745,7 @@
                 raise
             except:
                 import sys, traceback
-                print >> sys.stderr, 'Whoops! Problem:'
+                print('Whoops! Problem:', file=sys.stderr)
                 traceback.print_exc(file=sys.stderr)
 
     # Arrays used for random selections in this demo
@@ -964,6 +964,219 @@
 Obviously this example sets the log length much too small as an extreme
 example.  You would want to set *maxBytes* to an appropriate value.
 
+.. _format-styles:
+
+Use of alternative formatting styles
+------------------------------------
+
+When logging was added to the Python standard library, the only way of
+formatting messages with variable content was to use the %-formatting
+method. Since then, Python has gained two new formatting approaches:
+:class:`string.Template` (added in Python 2.4) and :meth:`str.format`
+(added in Python 2.6).
+
+Logging (as of 3.2) provides improved support for these two additional
+formatting styles. The :class:`Formatter` class been enhanced to take an
+additional, optional keyword parameter named ``style``. This defaults to
+``'%'``, but other possible values are ``'{'`` and ``'$'``, which correspond
+to the other two formatting styles. Backwards compatibility is maintained by
+default (as you would expect), but by explicitly specifying a style parameter,
+you get the ability to specify format strings which work with
+:meth:`str.format` or :class:`string.Template`. Here's an example console
+session to show the possibilities:
+
+.. code-block:: pycon
+
+    >>> import logging
+    >>> root = logging.getLogger()
+    >>> root.setLevel(logging.DEBUG)
+    >>> handler = logging.StreamHandler()
+    >>> bf = logging.Formatter('{asctime} {name} {levelname:8s} {message}',
+    ...                        style='{')
+    >>> handler.setFormatter(bf)
+    >>> root.addHandler(handler)
+    >>> logger = logging.getLogger('foo.bar')
+    >>> logger.debug('This is a DEBUG message')
+    2010-10-28 15:11:55,341 foo.bar DEBUG    This is a DEBUG message
+    >>> logger.critical('This is a CRITICAL message')
+    2010-10-28 15:12:11,526 foo.bar CRITICAL This is a CRITICAL message
+    >>> df = logging.Formatter('$asctime $name ${levelname} $message',
+    ...                        style='$')
+    >>> handler.setFormatter(df)
+    >>> logger.debug('This is a DEBUG message')
+    2010-10-28 15:13:06,924 foo.bar DEBUG This is a DEBUG message
+    >>> logger.critical('This is a CRITICAL message')
+    2010-10-28 15:13:11,494 foo.bar CRITICAL This is a CRITICAL message
+    >>>
+
+Note that the formatting of logging messages for final output to logs is
+completely independent of how an individual logging message is constructed.
+That can still use %-formatting, as shown here::
+
+    >>> logger.error('This is an%s %s %s', 'other,', 'ERROR,', 'message')
+    2010-10-28 15:19:29,833 foo.bar ERROR This is another, ERROR, message
+    >>>
+
+Logging calls (``logger.debug()``, ``logger.info()`` etc.) only take
+positional parameters for the actual logging message itself, with keyword
+parameters used only for determining options for how to handle the actual
+logging call (e.g. the ``exc_info`` keyword parameter to indicate that
+traceback information should be logged, or the ``extra`` keyword parameter
+to indicate additional contextual information to be added to the log). So
+you cannot directly make logging calls using :meth:`str.format` or
+:class:`string.Template` syntax, because internally the logging package
+uses %-formatting to merge the format string and the variable arguments.
+There would no changing this while preserving backward compatibility, since
+all logging calls which are out there in existing code will be using %-format
+strings.
+
+There is, however, a way that you can use {}- and $- formatting to construct
+your individual log messages. Recall that for a message you can use an
+arbitrary object as a message format string, and that the logging package will
+call ``str()`` on that object to get the actual format string. Consider the
+following two classes::
+
+    class BraceMessage(object):
+        def __init__(self, fmt, *args, **kwargs):
+            self.fmt = fmt
+            self.args = args
+            self.kwargs = kwargs
+
+        def __str__(self):
+            return self.fmt.format(*self.args, **self.kwargs)
+
+    class DollarMessage(object):
+        def __init__(self, fmt, **kwargs):
+            self.fmt = fmt
+            self.kwargs = kwargs
+
+        def __str__(self):
+            from string import Template
+            return Template(self.fmt).substitute(**self.kwargs)
+
+Either of these can be used in place of a format string, to allow {}- or
+$-formatting to be used to build the actual "message" part which appears in the
+formatted log output in place of "%(message)s" or "{message}" or "$message".
+It's a little unwieldy to use the class names whenever you want to log
+something, but it's quite palatable if you use an alias such as __ (double
+underscore – not to be confused with _, the single underscore used as a
+synonym/alias for :func:`gettext.gettext` or its brethren).
+
+The above classes are not included in Python, though they're easy enough to
+copy and paste into your own code. They can be used as follows (assuming that
+they're declared in a module called ``wherever``):
+
+.. code-block:: pycon
+
+    >>> from wherever import BraceMessage as __
+    >>> print(__('Message with {0} {name}', 2, name='placeholders'))
+    Message with 2 placeholders
+    >>> class Point: pass
+    ...
+    >>> p = Point()
+    >>> p.x = 0.5
+    >>> p.y = 0.5
+    >>> print(__('Message with coordinates: ({point.x:.2f}, {point.y:.2f})',
+    ...       point=p))
+    Message with coordinates: (0.50, 0.50)
+    >>> from wherever import DollarMessage as __
+    >>> print(__('Message with $num $what', num=2, what='placeholders'))
+    Message with 2 placeholders
+    >>>
+
+While the above examples use ``print()`` to show how the formatting works, you
+would of course use ``logger.debug()`` or similar to actually log using this
+approach.
+
+One thing to note is that you pay no significant performance penalty with this
+approach: the actual formatting happens not when you make the logging call, but
+when (and if) the logged message is actually about to be output to a log by a
+handler. So the only slightly unusual thing which might trip you up is that the
+parentheses go around the format string and the arguments, not just the format
+string. That's because the __ notation is just syntax sugar for a constructor
+call to one of the XXXMessage classes.
+
+
+.. currentmodule:: logging
+
+.. _custom-logrecord:
+
+Customising ``LogRecord``
+-------------------------
+
+Every logging event is represented by a :class:`LogRecord` instance.
+When an event is logged and not filtered out by a logger's level, a
+:class:`LogRecord` is created, populated with information about the event and
+then passed to the handlers for that logger (and its ancestors, up to and
+including the logger where further propagation up the hierarchy is disabled).
+Before Python 3.2, there were only two places where this creation was done:
+
+* :meth:`Logger.makeRecord`, which is called in the normal process of
+  logging an event. This invoked :class:`LogRecord` directly to create an
+  instance.
+* :func:`makeLogRecord`, which is called with a dictionary containing
+  attributes to be added to the LogRecord. This is typically invoked when a
+  suitable dictionary has been received over the network (e.g. in pickle form
+  via a :class:`~handlers.SocketHandler`, or in JSON form via an
+  :class:`~handlers.HTTPHandler`).
+
+This has usually meant that if you need to do anything special with a
+:class:`LogRecord`, you've had to do one of the following.
+
+* Create your own :class:`Logger` subclass, which overrides
+  :meth:`Logger.makeRecord`, and set it using :func:`~logging.setLoggerClass`
+  before any loggers that you care about are instantiated.
+* Add a :class:`Filter` to a logger or handler, which does the
+  necessary special manipulation you need when its
+  :meth:`~Filter.filter` method is called.
+
+The first approach would be a little unwieldy in the scenario where (say)
+several different libraries wanted to do different things. Each would attempt
+to set its own :class:`Logger` subclass, and the one which did this last would
+win.
+
+The second approach works reasonably well for many cases, but does not allow
+you to e.g. use a specialized subclass of :class:`LogRecord`. Library
+developers can set a suitable filter on their loggers, but they would have to
+remember to do this every time they introduced a new logger (which they would
+do simply by adding new packages or modules and doing ::
+
+   logger = logging.getLogger(__name__)
+
+at module level). It's probably one too many things to think about. Developers
+could also add the filter to a :class:`~logging.NullHandler` attached to their
+top-level logger, but this would not be invoked if an application developer
+attached a handler to a lower-level library logger – so output from that
+handler would not reflect the intentions of the library developer.
+
+In Python 3.2 and later, :class:`~logging.LogRecord` creation is done through a
+factory, which you can specify. The factory is just a callable you can set with
+:func:`~logging.setLogRecordFactory`, and interrogate with
+:func:`~logging.getLogRecordFactory`. The factory is invoked with the same
+signature as the :class:`~logging.LogRecord` constructor, as :class:`LogRecord`
+is the default setting for the factory.
+
+This approach allows a custom factory to control all aspects of LogRecord
+creation. For example, you could return a subclass, or just add some additional
+attributes to the record once created, using a pattern similar to this::
+
+    old_factory = logging.getLogRecordFactory()
+
+    def record_factory(*args, **kwargs):
+        record = old_factory(*args, **kwargs)
+        record.custom_attribute = 0xdecafbad
+        return record
+
+    logging.setLogRecordFactory(record_factory)
+
+This pattern allows different libraries to chain factories together, and as
+long as they don't overwrite each other's attributes or unintentionally
+overwrite the attributes provided as standard, there should be no surprises.
+However, it should be borne in mind that each link in the chain adds run-time
+overhead to all logging operations, and the technique should only be used when
+the use of a :class:`Filter` does not provide the desired result.
+
+
 .. _zeromq-handlers:
 
 Subclassing QueueHandler - a ZeroMQ example
@@ -1102,3 +1315,276 @@
 For more information about this configuration, you can see the `relevant
 section <https://docs.djangoproject.com/en/1.3/topics/logging/#configuring-logging>`_
 of the Django documentation.
+
+A more elaborate multiprocessing example
+----------------------------------------
+
+The following working example shows how logging can be used with multiprocessing
+using configuration files. The configurations are fairly simple, but serve to
+illustrate how more complex ones could be implemented in a real multiprocessing
+scenario.
+
+In the example, the main process spawns a listener process and some worker
+processes. Each of the main process, the listener and the workers have three
+separate configurations (the workers all share the same configuration). We can
+see logging in the main process, how the workers log to a QueueHandler and how
+the listener implements a QueueListener and a more complex logging
+configuration, and arranges to dispatch events received via the queue to the
+handlers specified in the configuration. Note that these configurations are
+purely illustrative, but you should be able to adapt this example to your own
+scenario.
+
+Here's the script - the docstrings and the comments hopefully explain how it
+works::
+
+    import logging
+    import logging.config
+    import logging.handlers
+    from multiprocessing import Process, Queue, Event, current_process
+    import os
+    import random
+    import time
+
+    class MyHandler(object):
+        """
+        A simple handler for logging events. It runs in the listener process and
+        dispatches events to loggers based on the name in the received record,
+        which then get dispatched, by the logging system, to the handlers
+        configured for those loggers.
+        """
+        def handle(self, record):
+            logger = logging.getLogger(record.name)
+            # The process name is transformed just to show that it's the listener
+            # doing the logging to files and console
+            record.processName = '%s (for %s)' % (current_process().name, record.processName)
+            logger.handle(record)
+
+    def listener_process(q, stop_event, config):
+        """
+        This could be done in the main process, but is just done in a separate
+        process for illustrative purposes.
+
+        This initialises logging according to the specified configuration,
+        starts the listener and waits for the main process to signal completion
+        via the event. The listener is then stopped, and the process exits.
+        """
+        logging.config.dictConfig(config)
+        listener = logging.handlers.QueueListener(q, MyHandler())
+        listener.start()
+        if os.name == 'posix':
+            # On POSIX, the setup logger will have been configured in the
+            # parent process, but should have been disabled following the
+            # dictConfig call.
+            # On Windows, since fork isn't used, the setup logger won't
+            # exist in the child, so it would be created and the message
+            # would appear - hence the "if posix" clause.
+            logger = logging.getLogger('setup')
+            logger.critical('Should not appear, because of disabled logger ...')
+        stop_event.wait()
+        listener.stop()
+
+    def worker_process(config):
+        """
+        A number of these are spawned for the purpose of illustration. In
+        practice, they could be a heterogenous bunch of processes rather than
+        ones which are identical to each other.
+
+        This initialises logging according to the specified configuration,
+        and logs a hundred messages with random levels to randomly selected
+        loggers.
+
+        A small sleep is added to allow other processes a chance to run. This
+        is not strictly needed, but it mixes the output from the different
+        processes a bit more than if it's left out.
+        """
+        logging.config.dictConfig(config)
+        levels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
+                  logging.CRITICAL]
+        loggers = ['foo', 'foo.bar', 'foo.bar.baz',
+                   'spam', 'spam.ham', 'spam.ham.eggs']
+        if os.name == 'posix':
+            # On POSIX, the setup logger will have been configured in the
+            # parent process, but should have been disabled following the
+            # dictConfig call.
+            # On Windows, since fork isn't used, the setup logger won't
+            # exist in the child, so it would be created and the message
+            # would appear - hence the "if posix" clause.
+            logger = logging.getLogger('setup')
+            logger.critical('Should not appear, because of disabled logger ...')
+        for i in range(100):
+            lvl = random.choice(levels)
+            logger = logging.getLogger(random.choice(loggers))
+            logger.log(lvl, 'Message no. %d', i)
+            time.sleep(0.01)
+
+    def main():
+        q = Queue()
+        # The main process gets a simple configuration which prints to the console.
+        config_initial = {
+            'version': 1,
+            'formatters': {
+                'detailed': {
+                    'class': 'logging.Formatter',
+                    'format': '%(asctime)s %(name)-15s %(levelname)-8s %(processName)-10s %(message)s'
+                }
+            },
+            'handlers': {
+                'console': {
+                    'class': 'logging.StreamHandler',
+                    'level': 'INFO',
+                },
+            },
+            'root': {
+                'level': 'DEBUG',
+                'handlers': ['console']
+            },
+        }
+        # The worker process configuration is just a QueueHandler attached to the
+        # root logger, which allows all messages to be sent to the queue.
+        # We disable existing loggers to disable the "setup" logger used in the
+        # parent process. This is needed on POSIX because the logger will
+        # be there in the child following a fork().
+        config_worker = {
+            'version': 1,
+            'disable_existing_loggers': True,
+            'handlers': {
+                'queue': {
+                    'class': 'logging.handlers.QueueHandler',
+                    'queue': q,
+                },
+            },
+            'root': {
+                'level': 'DEBUG',
+                'handlers': ['queue']
+            },
+        }
+        # The listener process configuration shows that the full flexibility of
+        # logging configuration is available to dispatch events to handlers however
+        # you want.
+        # We disable existing loggers to disable the "setup" logger used in the
+        # parent process. This is needed on POSIX because the logger will
+        # be there in the child following a fork().
+        config_listener = {
+            'version': 1,
+            'disable_existing_loggers': True,
+            'formatters': {
+                'detailed': {
+                    'class': 'logging.Formatter',
+                    'format': '%(asctime)s %(name)-15s %(levelname)-8s %(processName)-10s %(message)s'
+                },
+                'simple': {
+                    'class': 'logging.Formatter',
+                    'format': '%(name)-15s %(levelname)-8s %(processName)-10s %(message)s'
+                }
+            },
+            'handlers': {
+                'console': {
+                    'class': 'logging.StreamHandler',
+                    'level': 'INFO',
+                    'formatter': 'simple',
+                },
+                'file': {
+                    'class': 'logging.FileHandler',
+                    'filename': 'mplog.log',
+                    'mode': 'w',
+                    'formatter': 'detailed',
+                },
+                'foofile': {
+                    'class': 'logging.FileHandler',
+                    'filename': 'mplog-foo.log',
+                    'mode': 'w',
+                    'formatter': 'detailed',
+                },
+                'errors': {
+                    'class': 'logging.FileHandler',
+                    'filename': 'mplog-errors.log',
+                    'mode': 'w',
+                    'level': 'ERROR',
+                    'formatter': 'detailed',
+                },
+            },
+            'loggers': {
+                'foo': {
+                    'handlers' : ['foofile']
+                }
+            },
+            'root': {
+                'level': 'DEBUG',
+                'handlers': ['console', 'file', 'errors']
+            },
+        }
+        # Log some initial events, just to show that logging in the parent works
+        # normally.
+        logging.config.dictConfig(config_initial)
+        logger = logging.getLogger('setup')
+        logger.info('About to create workers ...')
+        workers = []
+        for i in range(5):
+            wp = Process(target=worker_process, name='worker %d' % (i + 1),
+                         args=(config_worker,))
+            workers.append(wp)
+            wp.start()
+            logger.info('Started worker: %s', wp.name)
+        logger.info('About to create listener ...')
+        stop_event = Event()
+        lp = Process(target=listener_process, name='listener',
+                     args=(q, stop_event, config_listener))
+        lp.start()
+        logger.info('Started listener')
+        # We now hang around for the workers to finish their work.
+        for wp in workers:
+            wp.join()
+        # Workers all done, listening can now stop.
+        # Logging in the parent still works normally.
+        logger.info('Telling listener to stop ...')
+        stop_event.set()
+        lp.join()
+        logger.info('All done.')
+
+    if __name__ == '__main__':
+        main()
+
+
+Inserting a BOM into messages sent to a SysLogHandler
+-----------------------------------------------------
+
+`RFC 5424 <http://tools.ietf.org/html/rfc5424>`_ requires that a
+Unicode message be sent to a syslog daemon as a set of bytes which have the
+following structure: an optional pure-ASCII component, followed by a UTF-8 Byte
+Order Mark (BOM), followed by Unicode encoded using UTF-8. (See the `relevant
+section of the specification <http://tools.ietf.org/html/rfc5424#section-6>`_.)
+
+In Python 3.1, code was added to
+:class:`~logging.handlers.SysLogHandler` to insert a BOM into the message, but
+unfortunately, it was implemented incorrectly, with the BOM appearing at the
+beginning of the message and hence not allowing any pure-ASCII component to
+appear before it.
+
+As this behaviour is broken, the incorrect BOM insertion code is being removed
+from Python 3.2.4 and later. However, it is not being replaced, and if you
+want to produce RFC 5424-compliant messages which include a BOM, an optional
+pure-ASCII sequence before it and arbitrary Unicode after it, encoded using
+UTF-8, then you need to do the following:
+
+#. Attach a :class:`~logging.Formatter` instance to your
+   :class:`~logging.handlers.SysLogHandler` instance, with a format string
+   such as::
+
+      'ASCII section\ufeffUnicode section'
+
+   The Unicode code point ``'\feff```, when encoded using UTF-8, will be
+   encoded as a UTF-8 BOM -- the byte-string ``b'\xef\xbb\xbf'``.
+
+#. Replace the ASCII section with whatever placeholders you like, but make sure
+   that the data that appears in there after substitution is always ASCII (that
+   way, it will remain unchanged after UTF-8 encoding).
+
+#. Replace the Unicode section with whatever placeholders you like; if the data
+   which appears there after substitution contains characters outside the ASCII
+   range, that's fine -- it will be encoded using UTF-8.
+
+The formatted message *will* be encoded using UTF-8 encoding by
+``SysLogHandler``. If you follow the above rules, you should be able to produce
+RFC 5424-compliant messages. If you don't, logging may not complain, but your
+messages will not be RFC 5424-compliant, and your syslog daemon may complain.
+
diff -r 3d0686d90f55 Doc/howto/logging.rst
--- a/Doc/howto/logging.rst
+++ b/Doc/howto/logging.rst
@@ -651,6 +651,22 @@
 code approach, mainly separation of configuration and code and the ability of
 noncoders to easily modify the logging properties.
 
+.. warning:: The :func:`fileConfig` function takes a default parameter,
+   ``disable_existing_loggers``, which defaults to ``True`` for reasons of
+   backward compatibility. This may or may not be what you want, since it
+   will cause any loggers existing before the :func:`fileConfig` call to
+   be disabled unless they (or an ancestor) are explicitly named in the
+   configuration.  Please refer to the reference documentation for more
+   information, and specify ``False`` for this parameter if you wish.
+
+   The dictionary passed to :func:`dictConfig` can also specify a Boolean
+   value with key ``disable_existing_loggers``, which if not specified
+   explicitly in the dictionary also defaults to being interpreted as
+   ``True``.  This leads to the logger-disabling behaviour described above,
+   which may not be what you want - in which case, provide the key
+   explicitly with a value of ``False``.
+
+
 .. currentmodule:: logging
 
 Note that the class names referenced in config files need to be either relative
diff -r 3d0686d90f55 Doc/howto/pyporting.rst
--- a/Doc/howto/pyporting.rst
+++ b/Doc/howto/pyporting.rst
@@ -39,7 +39,7 @@
 Finally, you do have the option of :ref:`using 2to3 <use_2to3>` to translate
 Python 2 code into Python 3 code (with some manual help). This can take the
 form of branching your code and using 2to3 to start a Python 3 branch. You can
-also have users perform the translation as installation time automatically so
+also have users perform the translation at installation time automatically so
 that you only have to maintain a Python 2 codebase.
 
 Regardless of which approach you choose, porting is not as hard or
@@ -234,7 +234,7 @@
 ``b'py'[1:2]`` is ``'y'`` in Python 2 and ``b'y'`` in Python 3 (i.e., close
 enough).
 
-You cannot concatenate bytes and strings in Python 3. But since in Python
+You cannot concatenate bytes and strings in Python 3. But since Python
 2 has bytes aliased to ``str``, it will succeed: ``b'a' + u'b'`` works in
 Python 2, but ``b'a' + 'b'`` in Python 3 is a :exc:`TypeError`. A similar issue
 also comes about when doing comparisons between bytes and strings.
@@ -328,7 +328,7 @@
 textual data, people have over the years been rather loose in their delineation
 of what ``str`` instances held text compared to bytes. In Python 3 you cannot
 be so care-free anymore and need to properly handle the difference. The key
-handling this issue to make sure that **every** string literal in your
+handling this issue is to make sure that **every** string literal in your
 Python 2 code is either syntactically of functionally marked as either bytes or
 text data. After this is done you then need to make sure your APIs are designed
 to either handle a specific type or made to be properly polymorphic.
@@ -343,7 +343,7 @@
 and then designating textual data with a ``u`` prefix or using the
 ``unicode_literals`` future statement.
 
-If your project supports versions of Python pre-dating 2.6, then you should use
+If your project supports versions of Python predating 2.6, then you should use
 the six_ project and its ``b()`` function to denote bytes literals. For text
 literals you can either use six's ``u()`` function or use a ``u`` prefix.
 
@@ -439,7 +439,7 @@
 There are two ways to solve this issue. One is to use a custom 2to3 fixer. The
 blog post at http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/
 specifies how to do this. That will allow 2to3 to change all instances of ``def
-__unicode(self): ...`` to ``def __str__(self): ...``. This does require you
+__unicode(self): ...`` to ``def __str__(self): ...``. This does require that you
 define your ``__str__()`` method in Python 2 before your ``__unicode__()``
 method.
 
diff -r 3d0686d90f55 Doc/howto/regex.rst
--- a/Doc/howto/regex.rst
+++ b/Doc/howto/regex.rst
@@ -360,7 +360,7 @@
 
 You can learn about this by interactively experimenting with the :mod:`re`
 module.  If you have :mod:`tkinter` available, you may also want to look at
-:file:`Tools/demo/redemo.py`, a demonstration program included with the
+:source:`Tools/demo/redemo.py`, a demonstration program included with the
 Python distribution.  It allows you to enter REs and strings, and displays
 whether the RE matches or fails. :file:`redemo.py` can be quite useful when
 trying to debug a complicated RE.  Phil Schwartz's `Kodos
@@ -495,7 +495,7 @@
 the same ones in several locations, then it might be worthwhile to collect all
 the definitions in one place, in a section of code that compiles all the REs
 ahead of time.  To take an example from the standard library, here's an extract
-from the now deprecated :file:`xmllib.py`::
+from the now-defunct Python 2 standard :mod:`xmllib` module::
 
    ref = re.compile( ... )
    entityref = re.compile( ... )
diff -r 3d0686d90f55 Doc/howto/sockets.rst
--- a/Doc/howto/sockets.rst
+++ b/Doc/howto/sockets.rst
@@ -153,7 +153,7 @@
 there, you may wait forever for the reply, because the request may still be in
 your output buffer.
 
-Now we come the major stumbling block of sockets - ``send`` and ``recv`` operate
+Now we come to the major stumbling block of sockets - ``send`` and ``recv`` operate
 on the network buffers. They do not necessarily handle all the bytes you hand
 them (or expect from them), because their major focus is handling the network
 buffers. In general, they return when the associated network buffers have been
@@ -164,7 +164,7 @@
 When a ``recv`` returns 0 bytes, it means the other side has closed (or is in
 the process of closing) the connection.  You will not receive any more data on
 this connection. Ever.  You may be able to send data successfully; I'll talk
-about that some on the next page.
+more about this later.
 
 A protocol like HTTP uses a socket for only one transfer. The client sends a
 request, then reads a reply.  That's it. The socket is discarded. This means that
diff -r 3d0686d90f55 Doc/howto/sorting.rst
--- a/Doc/howto/sorting.rst
+++ b/Doc/howto/sorting.rst
@@ -42,7 +42,7 @@
 Key Functions
 =============
 
-Both :meth:`list.sort` and :func:`sorted` have *key* parameter to specify a
+Both :meth:`list.sort` and :func:`sorted` have a *key* parameter to specify a
 function to be called on each list element prior to making comparisons.
 
 For example, here's a case-insensitive string comparison:
@@ -89,7 +89,7 @@
 The key-function patterns shown above are very common, so Python provides
 convenience functions to make accessor functions easier and faster. The
 :mod:`operator` module has :func:`~operator.itemgetter`,
-:func:`~operator.attrgetter`, and an :func:`~operator.methodcaller` function.
+:func:`~operator.attrgetter`, and a :func:`~operator.methodcaller` function.
 
 Using those functions, the above examples become simpler and faster:
 
@@ -114,7 +114,7 @@
 ========================
 
 Both :meth:`list.sort` and :func:`sorted` accept a *reverse* parameter with a
-boolean value. This is using to flag descending sorts. For example, to get the
+boolean value. This is used to flag descending sorts. For example, to get the
 student data in reverse *age* order:
 
     >>> sorted(student_tuples, key=itemgetter(2), reverse=True)
diff -r 3d0686d90f55 Doc/howto/urllib2.rst
--- a/Doc/howto/urllib2.rst
+++ b/Doc/howto/urllib2.rst
@@ -108,6 +108,7 @@
               'language' : 'Python' }
 
     data = urllib.parse.urlencode(values)
+    data = data.encode('utf-8') # data should be bytes
     req = urllib.request.Request(url, data)
     response = urllib.request.urlopen(req)
     the_page = response.read()
@@ -172,7 +173,8 @@
               'language' : 'Python' }
     headers = { 'User-Agent' : user_agent }
 
-    data = urllib.parse.urlencode(values)
+    data  = urllib.parse.urlencode(values)
+    data = data.encode('utf-8')
     req = urllib.request.Request(url, data, headers)
     response = urllib.request.urlopen(req)
     the_page = response.read()
@@ -446,12 +448,12 @@
 
 When authentication is required, the server sends a header (as well as the 401
 error code) requesting authentication.  This specifies the authentication scheme
-and a 'realm'. The header looks like : ``Www-authenticate: SCHEME
+and a 'realm'. The header looks like : ``WWW-Authenticate: SCHEME
 realm="REALM"``.
 
 e.g. ::
 
-    Www-authenticate: Basic realm="cPanel Users"
+    WWW-Authenticate: Basic realm="cPanel Users"
 
 
 The client should then retry the request with the appropriate name and password
diff -r 3d0686d90f55 Doc/includes/capsulethunk.h
--- /dev/null
+++ b/Doc/includes/capsulethunk.h
@@ -0,0 +1,134 @@
+#ifndef __CAPSULETHUNK_H
+#define __CAPSULETHUNK_H
+
+#if (    (PY_VERSION_HEX <  0x02070000) \
+     || ((PY_VERSION_HEX >= 0x03000000) \
+      && (PY_VERSION_HEX <  0x03010000)) )
+
+#define __PyCapsule_GetField(capsule, field, default_value) \
+    ( PyCapsule_CheckExact(capsule) \
+        ? (((PyCObject *)capsule)->field) \
+        : (default_value) \
+    ) \
+
+#define __PyCapsule_SetField(capsule, field, value) \
+    ( PyCapsule_CheckExact(capsule) \
+        ? (((PyCObject *)capsule)->field = value), 1 \
+        : 0 \
+    ) \
+
+
+#define PyCapsule_Type PyCObject_Type
+
+#define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule))
+#define PyCapsule_IsValid(capsule, name) (PyCObject_Check(capsule))
+
+
+#define PyCapsule_New(pointer, name, destructor) \
+    (PyCObject_FromVoidPtr(pointer, destructor))
+
+
+#define PyCapsule_GetPointer(capsule, name) \
+    (PyCObject_AsVoidPtr(capsule))
+
+/* Don't call PyCObject_SetPointer here, it fails if there's a destructor */
+#define PyCapsule_SetPointer(capsule, pointer) \
+    __PyCapsule_SetField(capsule, cobject, pointer)
+
+
+#define PyCapsule_GetDestructor(capsule) \
+    __PyCapsule_GetField(capsule, destructor)
+
+#define PyCapsule_SetDestructor(capsule, dtor) \
+    __PyCapsule_SetField(capsule, destructor, dtor)
+
+
+/*
+ * Sorry, there's simply no place
+ * to store a Capsule "name" in a CObject.
+ */
+#define PyCapsule_GetName(capsule) NULL
+
+static int
+PyCapsule_SetName(PyObject *capsule, const char *unused)
+{
+    unused = unused;
+    PyErr_SetString(PyExc_NotImplementedError,
+        "can't use PyCapsule_SetName with CObjects");
+    return 1;
+}
+
+
+
+#define PyCapsule_GetContext(capsule) \
+    __PyCapsule_GetField(capsule, descr)
+
+#define PyCapsule_SetContext(capsule, context) \
+    __PyCapsule_SetField(capsule, descr, context)
+
+
+static void *
+PyCapsule_Import(const char *name, int no_block)
+{
+    PyObject *object = NULL;
+    void *return_value = NULL;
+    char *trace;
+    size_t name_length = (strlen(name) + 1) * sizeof(char);
+    char *name_dup = (char *)PyMem_MALLOC(name_length);
+
+    if (!name_dup) {
+        return NULL;
+    }
+
+    memcpy(name_dup, name, name_length);
+
+    trace = name_dup;
+    while (trace) {
+        char *dot = strchr(trace, '.');
+        if (dot) {
+            *dot++ = '\0';
+        }
+
+        if (object == NULL) {
+            if (no_block) {
+                object = PyImport_ImportModuleNoBlock(trace);
+            } else {
+                object = PyImport_ImportModule(trace);
+                if (!object) {
+                    PyErr_Format(PyExc_ImportError,
+                        "PyCapsule_Import could not "
+                        "import module \"%s\"", trace);
+                }
+            }
+        } else {
+            PyObject *object2 = PyObject_GetAttrString(object, trace);
+            Py_DECREF(object);
+            object = object2;
+        }
+        if (!object) {
+            goto EXIT;
+        }
+
+        trace = dot;
+    }
+
+    if (PyCObject_Check(object)) {
+        PyCObject *cobject = (PyCObject *)object;
+        return_value = cobject->cobject;
+    } else {
+        PyErr_Format(PyExc_AttributeError,
+            "PyCapsule_Import \"%s\" is not valid",
+            name);
+    }
+
+EXIT:
+    Py_XDECREF(object);
+    if (name_dup) {
+        PyMem_FREE(name_dup);
+    }
+    return return_value;
+}
+
+#endif /* #if PY_VERSION_HEX < 0x02070000 */
+
+#endif /* __CAPSULETHUNK_H */
diff -r 3d0686d90f55 Doc/library/__future__.rst
--- a/Doc/library/__future__.rst
+++ b/Doc/library/__future__.rst
@@ -75,7 +75,7 @@
 | division         | 2.2.0a2     | 3.0          | :pep:`238`:                                 |
 |                  |             |              | *Changing the Division Operator*            |
 +------------------+-------------+--------------+---------------------------------------------+
-| absolute_import  | 2.5.0a1     | 2.7          | :pep:`328`:                                 |
+| absolute_import  | 2.5.0a1     | 3.0          | :pep:`328`:                                 |
 |                  |             |              | *Imports: Multi-Line and Absolute/Relative* |
 +------------------+-------------+--------------+---------------------------------------------+
 | with_statement   | 2.5.0a1     | 2.6          | :pep:`343`:                                 |
diff -r 3d0686d90f55 Doc/library/argparse.rst
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -12,6 +12,12 @@
 
 --------------
 
+.. sidebar:: Tutorial
+
+   This page contains the API reference information. For a more gentle
+   introduction to Python command-line parsing, have a look at the
+   :ref:`argparse tutorial <argparse-tutorial>`.
+
 The :mod:`argparse` module makes it easy to write user-friendly command-line
 interfaces. The program defines what arguments it requires, and :mod:`argparse`
 will figure out how to parse those out of :data:`sys.argv`.  The :mod:`argparse`
@@ -741,7 +747,7 @@
 
 * ``values`` - The associated command-line arguments, with any type conversions
   applied.  (Type conversions are specified with the type_ keyword argument to
-  :meth:`~ArgumentParser.add_argument`.
+  :meth:`~ArgumentParser.add_argument`.)
 
 * ``option_string`` - The option string that was used to invoke this action.
   The ``option_string`` argument is optional, and will be absent if the action
@@ -1076,6 +1082,9 @@
    optional arguments:
     -h, --help  show this help message and exit
 
+As the help string supports %-formatting, if you want a literal ``%`` to appear
+in the help string, you must escape it as ``%%``.
+
 :mod:`argparse` supports silencing the help entry for certain options, by
 setting the ``help`` value to ``argparse.SUPPRESS``::
 
@@ -1642,8 +1651,8 @@
 
        --bar BAR  bar help
 
-   Note that any arguments not your user defined groups will end up back in the
-   usual "positional arguments" and "optional arguments" sections.
+   Note that any arguments not in your user-defined groups will end up back
+   in the usual "positional arguments" and "optional arguments" sections.
 
 
 Mutual exclusion
@@ -1833,9 +1842,10 @@
 * Replace all :meth:`optparse.OptionParser.add_option` calls with
   :meth:`ArgumentParser.add_argument` calls.
 
-* Replace ``options, args = parser.parse_args()`` with ``args =
+* Replace ``(options, args) = parser.parse_args()`` with ``args =
   parser.parse_args()`` and add additional :meth:`ArgumentParser.add_argument`
-  calls for the positional arguments.
+  calls for the positional arguments. Keep in mind that what was previously
+  called ``options``, now in :mod:`argparse` context is called ``args``.
 
 * Replace callback actions and the ``callback_*`` keyword arguments with
   ``type`` or ``action`` arguments.
diff -r 3d0686d90f55 Doc/library/base64.rst
--- a/Doc/library/base64.rst
+++ b/Doc/library/base64.rst
@@ -140,6 +140,8 @@
    encoded data, and return a byte string containing the resulting binary data.
    ``decodestring`` is a deprecated alias.
 
+   .. versionadded:: 3.1
+
 
 .. function:: encode(input, output)
 
diff -r 3d0686d90f55 Doc/library/bz2.rst
--- a/Doc/library/bz2.rst
+++ b/Doc/library/bz2.rst
@@ -40,6 +40,9 @@
 Handling of compressed files is offered by the :class:`BZ2File` class.
 
 
+.. index::
+   single: universal newlines; bz2.BZ2File class
+
 .. class:: BZ2File(filename, mode='r', buffering=0, compresslevel=9)
 
    Open a bz2 file. Mode can be either ``'r'`` or ``'w'``, for reading (default)
@@ -48,7 +51,7 @@
    unbuffered, and larger numbers specify the buffer size; the default is
    ``0``. If *compresslevel* is given, it must be a number between ``1`` and
    ``9``; the default is ``9``. Add a ``'U'`` to mode to open the file for input
-   with universal newline support. Any line ending in the input file will be
+   in :term:`universal newlines` mode.  Any line ending in the input file will be
    seen as a ``'\n'`` in Python.  Also, a file so opened gains the attribute
    :attr:`newlines`; the value for this attribute is one of ``None`` (no newline
    read yet), ``'\r'``, ``'\n'``, ``'\r\n'`` or a tuple containing all the
diff -r 3d0686d90f55 Doc/library/cgi.rst
--- a/Doc/library/cgi.rst
+++ b/Doc/library/cgi.rst
@@ -86,11 +86,14 @@
 tracking down bugs.  You can always remove the ``cgitb`` line later when you
 have tested your script and are confident that it works correctly.
 
-To get at submitted form data, use the :class:`FieldStorage` class.  Instantiate
-it exactly once, without arguments.  This reads the form contents from standard
-input or the environment (depending on the value of various environment
-variables set according to the CGI standard).  Since it may consume standard
-input, it should be instantiated only once.
+To get at submitted form data, use the :class:`FieldStorage` class. If the form
+contains non-ASCII characters, use the *encoding* keyword parameter set to the
+value of the encoding defined for the document. It is usually contained in the
+META tag in the HEAD section of the HTML document or by the
+:mailheader:`Content-Type` header).  This reads the form contents from the
+standard input or the environment (depending on the value of various
+environment variables set according to the CGI standard).  Since it may consume
+standard input, it should be instantiated only once.
 
 The :class:`FieldStorage` instance can be indexed like a Python dictionary.
 It allows membership testing with the :keyword:`in` operator, and also supports
@@ -136,10 +139,10 @@
 
 If a field represents an uploaded file, accessing the value via the
 :attr:`value` attribute or the :func:`getvalue` method reads the entire file in
-memory as a string.  This may not be what you want. You can test for an uploaded
+memory as bytes.  This may not be what you want. You can test for an uploaded
 file by testing either the :attr:`filename` attribute or the :attr:`!file`
 attribute.  You can then read the data at leisure from the :attr:`!file`
-attribute::
+attribute (the :func:`read` and :func:`readline` methods will return bytes)::
 
    fileitem = form["userfile"]
    if fileitem.file:
diff -r 3d0686d90f55 Doc/library/cmd.rst
--- a/Doc/library/cmd.rst
+++ b/Doc/library/cmd.rst
@@ -276,7 +276,7 @@
             print('Thank you for using Turtle')
             self.close()
             bye()
-            sys.exit(0)
+            return True
 
         # ----- record and playback -----
         def do_record(self, arg):
diff -r 3d0686d90f55 Doc/library/codecs.rst
--- a/Doc/library/codecs.rst
+++ b/Doc/library/codecs.rst
@@ -3,8 +3,8 @@
 
 .. module:: codecs
    :synopsis: Encode and decode data and streams.
-.. moduleauthor:: Marc-Andre Lemburg <mal@lemburg.com>
-.. sectionauthor:: Marc-Andre Lemburg <mal@lemburg.com>
+.. moduleauthor:: Marc-André Lemburg <mal@lemburg.com>
+.. sectionauthor:: Marc-André Lemburg <mal@lemburg.com>
 .. sectionauthor:: Martin v. Löwis <martin@v.loewis.de>
 
 
diff -r 3d0686d90f55 Doc/library/constants.rst
--- a/Doc/library/constants.rst
+++ b/Doc/library/constants.rst
@@ -19,7 +19,7 @@
 
 .. data:: None
 
-   The sole value of :attr:`types.NoneType`.  ``None`` is frequently used to
+   The sole value of the type ``NoneType``.  ``None`` is frequently used to
    represent the absence of a value, as when default arguments are not passed to a
    function. Assignments to ``None`` are illegal and raise a :exc:`SyntaxError`.
 
diff -r 3d0686d90f55 Doc/library/csv.rst
--- a/Doc/library/csv.rst
+++ b/Doc/library/csv.rst
@@ -46,6 +46,9 @@
 The :mod:`csv` module defines the following functions:
 
 
+.. index::
+   single: universal newlines; csv.reader function
+
 .. function:: reader(csvfile, dialect='excel', **fmtparams)
 
    Return a reader object which will iterate over lines in the given *csvfile*.
@@ -486,4 +489,5 @@
 .. [1] If ``newline=''`` is not specified, newlines embedded inside quoted fields
    will not be interpreted correctly, and on platforms that use ``\r\n`` linendings
    on write an extra ``\r`` will be added.  It should always be safe to specify
-   ``newline=''``, since the csv module does its own (universal) newline handling.
+   ``newline=''``, since the csv module does its own
+   (:term:`universal <universal newlines>`) newline handling.
diff -r 3d0686d90f55 Doc/library/ctypes.rst
--- a/Doc/library/ctypes.rst
+++ b/Doc/library/ctypes.rst
@@ -243,6 +243,11 @@
 | :class:`c_ulonglong` | :c:type:`unsigned __int64` or            | int                        |
 |                      | :c:type:`unsigned long long`             |                            |
 +----------------------+------------------------------------------+----------------------------+
+| :class:`c_size_t`    | :c:type:`size_t`                         | int                        |
++----------------------+------------------------------------------+----------------------------+
+| :class:`c_ssize_t`   | :c:type:`ssize_t` or                     | int                        |
+|                      | :c:type:`Py_ssize_t`                     |                            |
++----------------------+------------------------------------------+----------------------------+
 | :class:`c_float`     | :c:type:`float`                          | float                      |
 +----------------------+------------------------------------------+----------------------------+
 | :class:`c_double`    | :c:type:`double`                         | float                      |
diff -r 3d0686d90f55 Doc/library/datetime.rst
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -12,28 +12,34 @@
 The :mod:`datetime` module supplies classes for manipulating dates and times in
 both simple and complex ways.  While date and time arithmetic is supported, the
 focus of the implementation is on efficient attribute extraction for output
-formatting and manipulation. For related
-functionality, see also the :mod:`time` and :mod:`calendar` modules.
+formatting and manipulation. For related functionality, see also the
+:mod:`time` and :mod:`calendar` modules.
 
-There are two kinds of date and time objects: "naive" and "aware". This
-distinction refers to whether the object has any notion of time zone, daylight
-saving time, or other kind of algorithmic or political time adjustment.  Whether
-a naive :class:`.datetime` object represents Coordinated Universal Time (UTC),
-local time, or time in some other timezone is purely up to the program, just
-like it's up to the program whether a particular number represents metres,
-miles, or mass.  Naive :class:`.datetime` objects are easy to understand and to
-work with, at the cost of ignoring some aspects of reality.
+There are two kinds of date and time objects: "naive" and "aware".
 
-For applications requiring more, :class:`.datetime` and :class:`.time` objects
-have an optional time zone information attribute, :attr:`tzinfo`, that can be
-set to an instance of a subclass of the abstract :class:`tzinfo` class.  These
-:class:`tzinfo` objects capture information about the offset from UTC time, the
-time zone name, and whether Daylight Saving Time is in effect.  Note that only
-one concrete :class:`tzinfo` class, the :class:`timezone` class, is supplied by the
-:mod:`datetime` module.  The :class:`timezone` class can represent simple
-timezones with fixed offset from UTC such as UTC itself or North American EST and
-EDT timezones.  Supporting timezones at whatever level of detail is
-required is up to the application.  The rules for time adjustment across the
+An aware object has sufficient knowledge of applicable algorithmic and
+political time adjustments, such as time zone and daylight saving time
+information, to locate itself relative to other aware objects.  An aware object
+is used to represent a specific moment in time that is not open to
+interpretation [#]_.
+
+A naive object does not contain enough information to unambiguously locate
+itself relative to other date/time objects.  Whether a naive object represents
+Coordinated Universal Time (UTC), local time, or time in some other timezone is
+purely up to the program, just like it is up to the program whether a
+particular number represents metres, miles, or mass.  Naive objects are easy to
+understand and to work with, at the cost of ignoring some aspects of reality.
+
+For applications requiring aware objects, :class:`.datetime` and :class:`.time`
+objects have an optional time zone information attribute, :attr:`tzinfo`, that
+can be set to an instance of a subclass of the abstract :class:`tzinfo` class.
+These :class:`tzinfo` objects capture information about the offset from UTC
+time, the time zone name, and whether Daylight Saving Time is in effect.  Note
+that only one concrete :class:`tzinfo` class, the :class:`timezone` class, is
+supplied by the :mod:`datetime` module.  The :class:`timezone` class can
+represent simple timezones with fixed offset from UTC, such as UTC itself or
+North American EST and EDT timezones.  Supporting timezones at deeper levels of
+detail is up to the application.  The rules for time adjustment across the
 world are more political than rational, change frequently, and there is no
 standard suitable for every application aside from UTC.
 
@@ -114,10 +120,13 @@
 
 Objects of the :class:`date` type are always naive.
 
-An object *d* of type :class:`.time` or :class:`.datetime` may be naive or aware.
-*d* is aware if ``d.tzinfo`` is not ``None`` and ``d.tzinfo.utcoffset(d)`` does
-not return ``None``.  If ``d.tzinfo`` is ``None``, or if ``d.tzinfo`` is not
-``None`` but ``d.tzinfo.utcoffset(d)`` returns ``None``, *d* is naive.
+An object of type :class:`.time` or :class:`.datetime` may be naive or aware.
+A :class:`.datetime` object *d* is aware if ``d.tzinfo`` is not ``None`` and
+``d.tzinfo.utcoffset(d)`` does not return ``None``.  If ``d.tzinfo`` is
+``None``, or if ``d.tzinfo`` is not ``None`` but ``d.tzinfo.utcoffset(d)``
+returns ``None``, *d* is naive.  A :class:`.time` object *t* is aware
+if ``t.tzinfo`` is not ``None`` and ``t.tzinfo.utcoffset(None)`` does not return
+``None``.  Otherwise, *t* is naive.
 
 The distinction between naive and aware doesn't apply to :class:`timedelta`
 objects.
@@ -1118,14 +1127,14 @@
 
     >>> from datetime import timedelta, datetime, tzinfo
     >>> class GMT1(tzinfo):
-    ...     def __init__(self):         # DST starts last Sunday in March
+    ...     def utcoffset(self, dt):
+    ...         return timedelta(hours=1) + self.dst(dt)
+    ...     def dst(self, dt):
+    ...         # DST starts last Sunday in March
     ...         d = datetime(dt.year, 4, 1)   # ends last Sunday in October
     ...         self.dston = d - timedelta(days=d.weekday() + 1)
     ...         d = datetime(dt.year, 11, 1)
     ...         self.dstoff = d - timedelta(days=d.weekday() + 1)
-    ...     def utcoffset(self, dt):
-    ...         return timedelta(hours=1) + self.dst(dt)
-    ...     def dst(self, dt):
     ...         if self.dston <=  dt.replace(tzinfo=None) < self.dstoff:
     ...             return timedelta(hours=1)
     ...         else:
@@ -1134,16 +1143,15 @@
     ...          return "GMT +1"
     ...
     >>> class GMT2(tzinfo):
-    ...     def __init__(self):
+    ...     def utcoffset(self, dt):
+    ...         return timedelta(hours=2) + self.dst(dt)
+    ...     def dst(self, dt):
     ...         d = datetime(dt.year, 4, 1)
     ...         self.dston = d - timedelta(days=d.weekday() + 1)
     ...         d = datetime(dt.year, 11, 1)
     ...         self.dstoff = d - timedelta(days=d.weekday() + 1)
-    ...     def utcoffset(self, dt):
-    ...         return timedelta(hours=1) + self.dst(dt)
-    ...     def dst(self, dt):
     ...         if self.dston <=  dt.replace(tzinfo=None) < self.dstoff:
-    ...             return timedelta(hours=2)
+    ...             return timedelta(hours=1)
     ...         else:
     ...             return timedelta(0)
     ...     def tzname(self,dt):
@@ -1518,7 +1526,6 @@
 
 .. literalinclude:: ../includes/tzinfo-examples.py
 
-
 Note that there are unavoidable subtleties twice per year in a :class:`tzinfo`
 subclass accounting for both standard and daylight time, at the DST transition
 points.  For concreteness, consider US Eastern (UTC -0500), where EDT begins the
@@ -1537,7 +1544,7 @@
 3:00.  A wall time of the form 2:MM doesn't really make sense on that day, so
 ``astimezone(Eastern)`` won't deliver a result with ``hour == 2`` on the day DST
 begins.  In order for :meth:`astimezone` to make this guarantee, the
-:meth:`rzinfo.dst` method must consider times in the "missing hour" (2:MM for
+:meth:`tzinfo.dst` method must consider times in the "missing hour" (2:MM for
 Eastern) to be in daylight time.
 
 When DST ends (the "end" line), there's a potentially worse problem: there's an
@@ -1558,6 +1565,22 @@
 or any other fixed-offset :class:`tzinfo` subclass (such as a class representing
 only EST (fixed offset -5 hours), or only EDT (fixed offset -4 hours)).
 
+.. seealso::
+
+   `pytz <http://pypi.python.org/pypi/pytz/>`_
+      The standard library has no :class:`tzinfo` instances except for UTC, but
+      there exists a third-party library which brings the *IANA timezone
+      database* (also known as the Olson database) to Python: *pytz*.
+
+      *pytz* contains up-to-date information and its usage is recommended.
+
+   `IANA timezone database <http://www.iana.org/time-zones>`_
+      The Time Zone Database (often called tz or zoneinfo) contains code and
+      data that represent the history of local time for many representative
+      locations around the globe. It is updated periodically to reflect changes
+      made by political bodies to time zone boundaries, UTC offsets, and
+      daylight-saving rules.
+
 
 .. _datetime-timezone:
 
@@ -1791,3 +1814,7 @@
    When the ``%z`` directive is provided to the :meth:`strptime` method, an
    aware :class:`.datetime` object will be produced.  The ``tzinfo`` of the
    result will be set to a :class:`timezone` instance.
+
+.. rubric:: Footnotes
+
+.. [#] If, that is, we ignore the effects of Relativity
diff -r 3d0686d90f55 Doc/library/email.charset.rst
--- a/Doc/library/email.charset.rst
+++ b/Doc/library/email.charset.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Representing character sets
------------------------------------------
+:mod:`email.charset`: Representing character sets
+-------------------------------------------------
 
 .. module:: email.charset
    :synopsis: Character Sets
diff -r 3d0686d90f55 Doc/library/email.encoders.rst
--- a/Doc/library/email.encoders.rst
+++ b/Doc/library/email.encoders.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Encoders
-----------------------
+:mod:`email.encoders`: Encoders
+-------------------------------
 
 .. module:: email.encoders
    :synopsis: Encoders for email message payloads.
@@ -18,6 +18,10 @@
 payload, encode it, and reset the payload to this newly encoded value.  They
 should also set the :mailheader:`Content-Transfer-Encoding` header as appropriate.
 
+Note that these functions are not meaningful for a multipart message.  They
+must be applied to individual subparts instead, and will raise a
+:exc:`TypeError` if passed a message whose type is multipart.
+
 Here are the encoding functions provided:
 
 
diff -r 3d0686d90f55 Doc/library/email.errors.rst
--- a/Doc/library/email.errors.rst
+++ b/Doc/library/email.errors.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Exception and Defect classes
-------------------------------------------
+:mod:`email.errors`: Exception and Defect classes
+-------------------------------------------------
 
 .. module:: email.errors
    :synopsis: The exception classes used by the email package.
diff -r 3d0686d90f55 Doc/library/email.generator.rst
--- a/Doc/library/email.generator.rst
+++ b/Doc/library/email.generator.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Generating MIME documents
----------------------------------------
+:mod:`email.generator`: Generating MIME documents
+-------------------------------------------------
 
 .. module:: email.generator
    :synopsis: Generate flat text email messages from a message structure.
@@ -17,10 +17,10 @@
 standards-compliant way, should handle MIME and non-MIME email messages just
 fine, and is designed so that the transformation from flat text, to a message
 structure via the :class:`~email.parser.Parser` class, and back to flat text,
-is idempotent (the input is identical to the output).  On the other hand, using
-the Generator on a :class:`~email.message.Message` constructed by program may
-result in changes to the :class:`~email.message.Message` object as defaults are
-filled in.
+is idempotent (the input is identical to the output) [#]_.  On the other hand,
+using the Generator on a :class:`~email.message.Message` constructed by program
+may result in changes to the :class:`~email.message.Message` object as defaults
+are filled in.
 
 :class:`bytes` output can be generated using the :class:`BytesGenerator` class.
 If the message object structure contains non-ASCII bytes, this generator's
@@ -178,7 +178,7 @@
 representing the part.
 
 
-.. class:: DecodedGenerator(outfp[, mangle_from_=True, maxheaderlen=78, fmt=None)
+.. class:: DecodedGenerator(outfp, mangle_from_=True, maxheaderlen=78, fmt=None)
 
    This class, derived from :class:`Generator` walks through all the subparts of a
    message.  If the subpart is of main type :mimetype:`text`, then it prints the
@@ -204,3 +204,12 @@
    The default value for *fmt* is ``None``, meaning ::
 
       [Non-text (%(type)s) part of message omitted, filename %(filename)s]
+
+
+.. rubric:: Footnotes
+
+.. [#] This statement assumes that you use the appropriate setting for the
+       ``unixfrom`` argument, and that you set maxheaderlen=0 (which will
+       preserve whatever the input line lengths were).  It is also not strictly
+       true, since in many cases runs of whitespace in headers are collapsed
+       into single blanks.  The latter is a bug that will eventually be fixed.
diff -r 3d0686d90f55 Doc/library/email.header.rst
--- a/Doc/library/email.header.rst
+++ b/Doc/library/email.header.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Internationalized headers
----------------------------------------
+:mod:`email.header`: Internationalized headers
+----------------------------------------------
 
 .. module:: email.header
    :synopsis: Representing non-ASCII headers
diff -r 3d0686d90f55 Doc/library/email.iterators.rst
--- a/Doc/library/email.iterators.rst
+++ b/Doc/library/email.iterators.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Iterators
------------------------
+:mod:`email.iterators`: Iterators
+---------------------------------
 
 .. module:: email.iterators
    :synopsis: Iterate over a  message object tree.
diff -r 3d0686d90f55 Doc/library/email.message.rst
--- a/Doc/library/email.message.rst
+++ b/Doc/library/email.message.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Representing an email message
--------------------------------------------
+:mod:`email.message`: Representing an email message
+---------------------------------------------------
 
 .. module:: email.message
    :synopsis: The base class representing email messages.
diff -r 3d0686d90f55 Doc/library/email.mime.rst
--- a/Doc/library/email.mime.rst
+++ b/Doc/library/email.mime.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Creating email and MIME objects from scratch
-----------------------------------------------------------
+:mod:`email.mime`: Creating email and MIME objects from scratch
+---------------------------------------------------------------
 
 .. module:: email.mime
    :synopsis: Build MIME messages.
diff -r 3d0686d90f55 Doc/library/email.parser.rst
--- a/Doc/library/email.parser.rst
+++ b/Doc/library/email.parser.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Parsing email messages
-------------------------------------
+:mod:`email.parser`: Parsing email messages
+-------------------------------------------
 
 .. module:: email.parser
    :synopsis: Parse flat text email messages to produce a message object structure.
diff -r 3d0686d90f55 Doc/library/email.util.rst
--- a/Doc/library/email.util.rst
+++ b/Doc/library/email.util.rst
@@ -1,5 +1,5 @@
-:mod:`email`: Miscellaneous utilities
--------------------------------------
+:mod:`email.utils`: Miscellaneous utilities
+-------------------------------------------
 
 .. module:: email.utils
    :synopsis: Miscellaneous email package utilities.
diff -r 3d0686d90f55 Doc/library/filecmp.rst
--- a/Doc/library/filecmp.rst
+++ b/Doc/library/filecmp.rst
@@ -75,6 +75,9 @@
    'tags']``. *hide* is a list of names to hide, and defaults to ``[os.curdir,
    os.pardir]``.
 
+   The :class:`dircmp` class compares files by doing *shallow* comparisons
+   as described for :func:`filecmp.cmp`.
+
    The :class:`dircmp` class provides the following methods:
 
 
@@ -94,7 +97,7 @@
       Print a comparison between *a* and *b* and common subdirectories
       (recursively).
 
-   The :class:`dircmp` offers a number of interesting attributes that may be
+   The :class:`dircmp` class offers a number of interesting attributes that may be
    used to get various bits of information about the directory trees being
    compared.
 
@@ -103,6 +106,16 @@
    to compute are used.
 
 
+   .. attribute:: left
+
+      The directory *a*.
+
+
+   .. attribute:: right
+
+      The directory *b*.
+
+
    .. attribute:: left_list
 
       Files and subdirectories in *a*, filtered by *hide* and *ignore*.
@@ -146,12 +159,14 @@
 
    .. attribute:: same_files
 
-      Files which are identical in both *a* and *b*.
+      Files which are identical in both *a* and *b*, using the class's
+      file comparison operator.
 
 
    .. attribute:: diff_files
 
-      Files which are in both *a* and *b*, whose contents differ.
+      Files which are in both *a* and *b*, whose contents differ according
+      to the class's file comparison operator.
 
 
    .. attribute:: funny_files
@@ -164,3 +179,18 @@
       A dictionary mapping names in :attr:`common_dirs` to :class:`dircmp`
       objects.
 
+
+Here is a simplified example of using the ``subdirs`` attribute to search
+recursively through two directories to show common different files::
+
+    >>> from filecmp import dircmp
+    >>> def print_diff_files(dcmp):
+    ...     for name in dcmp.diff_files:
+    ...         print("diff_file %s found in %s and %s" % (name, dcmp.left,
+    ...               dcmp.right))
+    ...     for sub_dcmp in dcmp.subdirs.values():
+    ...         print_diff_files(sub_dcmp)
+    ...
+    >>> dcmp = dircmp('dir1', 'dir2')
+    >>> print_diff_files(dcmp)
+
diff -r 3d0686d90f55 Doc/library/functions.rst
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -247,6 +247,13 @@
    the function serves as a numeric conversion function like :func:`int`
    and :func:`float`.  If both arguments are omitted, returns ``0j``.
 
+   .. note::
+
+      When converting from a string, the string must not contain whitespace
+      around the central ``+`` or ``-`` operator.  For example,
+      ``complex('1+2j')`` is fine, but ``complex('1 + 2j')`` raises
+      :exc:`ValueError`.
+
    The complex type is described in :ref:`typesnumeric`.
 
 
@@ -410,7 +417,10 @@
    current scope.  If only *globals* is provided, it must be a dictionary, which
    will be used for both the global and the local variables.  If *globals* and
    *locals* are given, they are used for the global and local variables,
-   respectively.  If provided, *locals* can be any mapping object.
+   respectively.  If provided, *locals* can be any mapping object.  Remember
+   that at module level, globals and locals are the same dictionary. If exec
+   gets two separate objects as *globals* and *locals*, the code will be
+   executed as if it were embedded in a class definition.
 
    If the *globals* dictionary does not contain a value for the key
    ``__builtins__``, a reference to the dictionary of the built-in module
@@ -780,10 +790,13 @@
    :meth:`__index__` method that returns an integer.
 
 
+   .. index::
+      single: file object; open() built-in function
+
 .. function:: open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True)
 
-   Open *file* and return a corresponding stream.  If the file cannot be opened,
-   an :exc:`IOError` is raised.
+   Open *file* and return a corresponding :term:`file object`.  If the file
+   cannot be opened, an :exc:`IOError` is raised.
 
    *file* is either a string or bytes object giving the pathname (absolute or
    relative to the current working directory) of the file to be opened or
@@ -809,7 +822,7 @@
    ``'b'``   binary mode
    ``'t'``   text mode (default)
    ``'+'``   open a disk file for updating (reading and writing)
-   ``'U'``   universal newline mode (for backwards compatibility; should
+   ``'U'``   universal newlines mode (for backwards compatibility; should
              not be used in new code)
    ========= ===============================================================
 
@@ -864,32 +877,35 @@
    used.  Any other error handling name that has been registered with
    :func:`codecs.register_error` is also valid.
 
-   *newline* controls how universal newlines works (it only applies to text
-   mode).  It can be ``None``, ``''``, ``'\n'``, ``'\r'``, and ``'\r\n'``.  It
-   works as follows:
+   .. index::
+      single: universal newlines; open() built-in function
 
-   * On input, if *newline* is ``None``, universal newlines mode is enabled.
-     Lines in the input can end in ``'\n'``, ``'\r'``, or ``'\r\n'``, and these
-     are translated into ``'\n'`` before being returned to the caller.  If it is
-     ``''``, universal newline mode is enabled, but line endings are returned to
-     the caller untranslated.  If it has any of the other legal values, input
-     lines are only terminated by the given string, and the line ending is
-     returned to the caller untranslated.
+   *newline* controls how :term:`universal newlines` mode works (it only
+   applies to text mode).  It can be ``None``, ``''``, ``'\n'``, ``'\r'``, and
+   ``'\r\n'``.  It works as follows:
 
-   * On output, if *newline* is ``None``, any ``'\n'`` characters written are
-     translated to the system default line separator, :data:`os.linesep`.  If
-     *newline* is ``''``, no translation takes place.  If *newline* is any of
-     the other legal values, any ``'\n'`` characters written are translated to
-     the given string.
+   * When reading input from the stream, if *newline* is ``None``, universal
+     newlines mode is enabled.  Lines in the input can end in ``'\n'``,
+     ``'\r'``, or ``'\r\n'``, and these are translated into ``'\n'`` before
+     being returned to the caller.  If it is ``''``, universal newlines mode is
+     enabled, but line endings are returned to the caller untranslated.  If it
+     has any of the other legal values, input lines are only terminated by the
+     given string, and the line ending is returned to the caller untranslated.
+
+   * When writing output to the stream, if *newline* is ``None``, any ``'\n'``
+     characters written are translated to the system default line separator,
+     :data:`os.linesep`.  If *newline* is ``''`` or ``'\n'``, no translation
+     takes place.  If *newline* is any of the other legal values, any ``'\n'``
+     characters written are translated to the given string.
 
    If *closefd* is ``False`` and a file descriptor rather than a filename was
    given, the underlying file descriptor will be kept open when the file is
    closed.  If a filename is given *closefd* has no effect and must be ``True``
    (the default).
 
-   The type of file object returned by the :func:`open` function depends on the
-   mode.  When :func:`open` is used to open a file in a text mode (``'w'``,
-   ``'r'``, ``'wt'``, ``'rt'``, etc.), it returns a subclass of
+   The type of :term:`file object` returned by the :func:`open` function
+   depends on the mode.  When :func:`open` is used to open a file in a text
+   mode (``'w'``, ``'r'``, ``'wt'``, ``'rt'``, etc.), it returns a subclass of
    :class:`io.TextIOBase` (specifically :class:`io.TextIOWrapper`).  When used
    to open a file in a binary mode with buffering, the returned class is a
    subclass of :class:`io.BufferedIOBase`.  The exact class varies: in read
@@ -1400,7 +1416,7 @@
       True
 
 
-.. function:: __import__(name, globals={}, locals={}, fromlist=[], level=0)
+.. function:: __import__(name, globals={}, locals={}, fromlist=[], level=-1)
 
    .. index::
       statement: import
@@ -1425,10 +1441,13 @@
    not use its *locals* argument at all, and uses its *globals* only to
    determine the package context of the :keyword:`import` statement.
 
-   *level* specifies whether to use absolute or relative imports. ``0`` (the
-   default) means only perform absolute imports.  Positive values for
-   *level* indicate the number of parent directories to search relative to the
-   directory of the module calling :func:`__import__`.
+   *level* specifies whether to use absolute or relative imports. ``0``
+   means only perform absolute imports. Positive values for *level* indicate the
+   number of parent directories to search relative to the directory of the
+   module calling :func:`__import__`.  Negative values attempt both an implicit
+   relative import and an absolute import (usage of negative values for *level*
+   are strongly discouraged as future versions of Python do not support such
+   values). Import statements only use values of 0 or greater.
 
    When the *name* variable is of the form ``package.module``, normally, the
    top-level package (the name up till the first dot) is returned, *not* the
diff -r 3d0686d90f55 Doc/library/html.rst
--- a/Doc/library/html.rst
+++ b/Doc/library/html.rst
@@ -4,8 +4,6 @@
 .. module:: html
    :synopsis: Helpers for manipulating HTML.
 
-.. versionadded:: 3.2
-
 **Source code:** :source:`Lib/html/__init__.py`
 
 --------------
@@ -19,3 +17,5 @@
    characters in HTML.  If the optional flag *quote* is true, the characters
    (``"``) and (``'``) are also translated; this helps for inclusion in an HTML
    attribute value delimited by quotes, as in ``<a href="...">``.
+
+   .. versionadded:: 3.2
diff -r 3d0686d90f55 Doc/library/http.cookiejar.rst
--- a/Doc/library/http.cookiejar.rst
+++ b/Doc/library/http.cookiejar.rst
@@ -700,7 +700,7 @@
 The :class:`Cookie` class also defines the following method:
 
 
-.. method:: Cookie.is_expired([now=None])
+.. method:: Cookie.is_expired(now=None)
 
    True if cookie has passed the time at which the server requested it should
    expire.  If *now* is given (in seconds since the epoch), return whether the
diff -r 3d0686d90f55 Doc/library/http.cookies.rst
--- a/Doc/library/http.cookies.rst
+++ b/Doc/library/http.cookies.rst
@@ -17,8 +17,14 @@
 
 The module formerly strictly applied the parsing rules described in the
 :rfc:`2109` and :rfc:`2068` specifications.  It has since been discovered that
-MSIE 3.0x doesn't follow the character rules outlined in those specs.  As a
-result, the parsing rules used are a bit less strict.
+MSIE 3.0x doesn't follow the character rules outlined in those specs and also
+many current day browsers and servers have relaxed parsing rules when comes to
+Cookie handling.  As a result, the parsing rules used are a bit less strict.
+
+The character set, :data:`string.ascii_letters`, :data:`string.digits` and
+``!#$%&'*+-.^_`|~`` denote the set of valid characters allowed by this module
+in Cookie name (as :attr:`~Morsel.key`).
+
 
 .. note::
 
diff -r 3d0686d90f55 Doc/library/http.server.rst
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -229,7 +229,7 @@
       to create custom error logging mechanisms. The *format* argument is a
       standard printf-style format string, where the additional arguments to
       :meth:`log_message` are applied as inputs to the formatting. The client
-      address and current date and time are prefixed to every message logged.
+      ip address and current date and time are prefixed to every message logged.
 
    .. method:: version_string()
 
@@ -333,7 +333,7 @@
    httpd.serve_forever()
 
 :mod:`http.server` can also be invoked directly using the :option:`-m`
-switch of the interpreter a with ``port number`` argument.  Similar to
+switch of the interpreter with a ``port number`` argument.  Similar to
 the previous example, this serves files relative to the current directory. ::
 
         python -m http.server 8000
diff -r 3d0686d90f55 Doc/library/imp.rst
--- a/Doc/library/imp.rst
+++ b/Doc/library/imp.rst
@@ -64,7 +64,7 @@
    path and the last item in the *description* tuple is :const:`PKG_DIRECTORY`.
 
    This function does not handle hierarchical module names (names containing
-   dots).  In order to find *P*.*M*, that is, submodule *M* of package *P*, use
+   dots).  In order to find *P.M*, that is, submodule *M* of package *P*, use
    :func:`find_module` and :func:`load_module` to find and load package *P*, and
    then use :func:`find_module` with the *path* argument set to ``P.__path__``.
    When *P* itself has a dotted name, apply this recipe recursively.
@@ -256,7 +256,7 @@
 
 .. data:: PY_FROZEN
 
-   The module was found as a frozen module (see :func:`init_frozen`).
+   The module was found as a frozen module.
 
 
 .. class:: NullImporter(path_string)
diff -r 3d0686d90f55 Doc/library/importlib.rst
--- a/Doc/library/importlib.rst
+++ b/Doc/library/importlib.rst
@@ -189,12 +189,16 @@
         (e.g. built-in module).  :exc:`ImportError` is raised if loader cannot
         find the requested module.
 
+        .. index::
+           single: universal newlines; importlib.abc.InspectLoader.get_source method
+
     .. method:: get_source(fullname)
 
         An abstract method to return the source of a module. It is returned as
-        a text string with universal newlines. Returns ``None`` if no
-        source is available (e.g. a built-in module). Raises :exc:`ImportError`
-        if the loader cannot find the module specified.
+        a text string using :term:`universal newlines`, translating all
+        recognized line separators into ``'\n'`` characters.  Returns ``None``
+        if no source is available (e.g. a built-in module). Raises
+        :exc:`ImportError` if the loader cannot find the module specified.
 
     .. method:: is_package(fullname)
 
diff -r 3d0686d90f55 Doc/library/io.rst
--- a/Doc/library/io.rst
+++ b/Doc/library/io.rst
@@ -16,11 +16,15 @@
 Overview
 --------
 
-The :mod:`io` module provides Python's main facilities for dealing for various
-types of I/O.  There are three main types of I/O: *text I/O*, *binary I/O*, *raw
-I/O*.  These are generic categories, and various backing stores can be used for
-each of them.  Concrete objects belonging to any of these categories will often
-be called *streams*; another common term is *file-like objects*.
+.. index::
+   single: file object; io module
+
+The :mod:`io` module provides Python's main facilities for dealing with various
+types of I/O.  There are three main types of I/O: *text I/O*, *binary I/O*
+and *raw I/O*.  These are generic categories, and various backing stores can
+be used for each of them.  A concrete object belonging to any of these
+categories is called a :term:`file object`.  Other common terms are *stream*
+and *file-like object*.
 
 Independently of its category, each concrete stream object will also have
 various capabilities: it can be read-only, write-only, or read-write. It can
@@ -699,11 +703,13 @@
       Read and return at most *n* characters from the stream as a single
       :class:`str`.  If *n* is negative or ``None``, reads until EOF.
 
-   .. method:: readline()
+   .. method:: readline(limit=-1)
 
       Read until newline or EOF and return a single ``str``.  If the stream is
       already at EOF, an empty string is returned.
 
+      If *limit* is specified, at most *limit* characters will be read.
+
    .. method:: seek(offset, whence=SEEK_SET)
 
       Change the stream position to the given *offset*.  Behaviour depends
@@ -755,14 +761,26 @@
    sequences) can be used.  Any other error handling name that has been
    registered with :func:`codecs.register_error` is also valid.
 
-   *newline* can be ``None``, ``''``, ``'\n'``, ``'\r'``, or ``'\r\n'``.  It
-   controls the handling of line endings.  If it is ``None``, universal newlines
-   is enabled.  With this enabled, on input, the lines endings ``'\n'``,
-   ``'\r'``, or ``'\r\n'`` are translated to ``'\n'`` before being returned to
-   the caller.  Conversely, on output, ``'\n'`` is translated to the system
-   default line separator, :data:`os.linesep`.  If *newline* is any other of its
-   legal values, that newline becomes the newline when the file is read and it
-   is returned untranslated.  On output, ``'\n'`` is converted to the *newline*.
+   .. index::
+      single: universal newlines; io.TextIOWrapper class
+
+   *newline* controls how line endings are handled.  It can be ``None``,
+   ``''``, ``'\n'``, ``'\r'``, and ``'\r\n'``.  It works as follows:
+
+   * When reading input from the stream, if *newline* is ``None``,
+     :term:`universal newlines` mode is enabled.  Lines in the input can end in
+     ``'\n'``, ``'\r'``, or ``'\r\n'``, and these are translated into ``'\n'``
+     before being returned to the caller.  If it is ``''``, universal newlines
+     mode is enabled, but line endings are returned to the caller untranslated.
+     If it has any of the other legal values, input lines are only terminated
+     by the given string, and the line ending is returned to the caller
+     untranslated.
+
+   * When writing output to the stream, if *newline* is ``None``, any ``'\n'``
+     characters written are translated to the system default line separator,
+     :data:`os.linesep`.  If *newline* is ``''`` or ``'\n'``, no translation
+     takes place.  If *newline* is any of the other legal values, any ``'\n'``
+     characters written are translated to the given string.
 
    If *line_buffering* is ``True``, :meth:`flush` is implied when a call to
    write contains a newline character.
@@ -809,10 +827,13 @@
       output.close()
 
 
+.. index::
+   single: universal newlines; io.IncrementalNewlineDecoder class
+
 .. class:: IncrementalNewlineDecoder
 
-   A helper codec that decodes newlines for universal newlines mode.  It
-   inherits :class:`codecs.IncrementalDecoder`.
+   A helper codec that decodes newlines for :term:`universal newlines` mode.
+   It inherits :class:`codecs.IncrementalDecoder`.
 
 
 Performance
diff -r 3d0686d90f55 Doc/library/json.rst
--- a/Doc/library/json.rst
+++ b/Doc/library/json.rst
@@ -6,8 +6,10 @@
 .. moduleauthor:: Bob Ippolito <bob@redivi.com>
 .. sectionauthor:: Bob Ippolito <bob@redivi.com>
 
-`JSON (JavaScript Object Notation) <http://json.org>`_ is a subset of JavaScript
-syntax (ECMA-262 3rd edition) used as a lightweight data interchange format.
+`JSON (JavaScript Object Notation) <http://json.org>`_, specified by
+:rfc:`4627`, is a lightweight data interchange format based on a subset of
+`JavaScript <http://en.wikipedia.org/wiki/JavaScript>`_ syntax (`ECMA-262 3rd
+edition <http://www.ecma-international.org/publications/files/ECMA-ST-ARCH/ECMA-262,%203rd%20edition,%20December%201999.pdf>`_).
 
 :mod:`json` exposes an API familiar to users of the standard library
 :mod:`marshal` and :mod:`pickle` modules.
@@ -90,7 +92,7 @@
     ['[2.0', ', 1.0', ']']
 
 
-.. highlight:: none
+.. highlight:: bash
 
 Using json.tool from the shell to validate and pretty-print::
 
@@ -98,15 +100,17 @@
     {
         "json": "obj"
     }
-    $ echo '{ 1.2:3.4}' | python -mjson.tool
-    Expecting property name: line 1 column 2 (char 2)
+    $ echo '{1.2:3.4}' | python -mjson.tool
+    Expecting property name enclosed in double quotes: line 1 column 1 (char 1)
 
-.. highlight:: python
+.. highlight:: python3
 
 .. note::
 
-   The JSON produced by this module's default settings is a subset of
-   YAML, so it may be used as a serializer for that as well.
+   JSON is a subset of `YAML <http://yaml.org/>`_ 1.2.  The JSON produced by
+   this module's default settings (in particular, the default *separators*
+   value) is also a subset of YAML 1.0 and 1.1.  This module can thus also be
+   used as a YAML serializer.
 
 
 Basic Usage
@@ -115,7 +119,7 @@
 .. function:: dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, default=None, **kw)
 
    Serialize *obj* as a JSON formatted stream to *fp* (a ``.write()``-supporting
-   file-like object).
+   :term:`file-like object`).
 
    If *skipkeys* is ``True`` (default: ``False``), then dict keys that are not
    of a basic type (:class:`str`, :class:`int`, :class:`float`, :class:`bool`,
@@ -168,16 +172,25 @@
       so trying to serialize multiple objects with repeated calls to
       :func:`dump` using the same *fp* will result in an invalid JSON file.
 
+   .. note::
+
+      Keys in key/value pairs of JSON are always of the type :class:`str`. When
+      a dictionary is converted into JSON, all the keys of the dictionary are
+      coerced to strings. As a result of this, if a dictionary is convered
+      into JSON and then back into a dictionary, the dictionary may not equal
+      the original one. That is, ``loads(dumps(x)) != x`` if x has non-string
+      keys.
 
 .. function:: load(fp, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw)
 
-   Deserialize *fp* (a ``.read()``-supporting file-like object containing a JSON
-   document) to a Python object.
+   Deserialize *fp* (a ``.read()``-supporting :term:`file-like object`
+   containing a JSON document) to a Python object.
 
    *object_hook* is an optional function that will be called with the result of
    any object literal decoded (a :class:`dict`).  The return value of
    *object_hook* will be used instead of the :class:`dict`.  This feature can be used
-   to implement custom decoders (e.g. JSON-RPC class hinting).
+   to implement custom decoders (e.g. `JSON-RPC <http://www.jsonrpc.org>`_
+   class hinting).
 
    *object_pairs_hook* is an optional function that will be called with the
    result of any object literal decoded with an ordered list of pairs.  The
@@ -201,10 +214,13 @@
    (e.g. :class:`float`).
 
    *parse_constant*, if specified, will be called with one of the following
-   strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``, ``'null'``, ``'true'``,
-   ``'false'``.  This can be used to raise an exception if invalid JSON numbers
+   strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``.
+   This can be used to raise an exception if invalid JSON numbers
    are encountered.
 
+   .. versionchanged:: 3.1
+      *parse_constant* doesn't get called on 'null', 'true', 'false' anymore.
+
    To use a custom :class:`JSONDecoder` subclass, specify it with the ``cls``
    kwarg; otherwise :class:`JSONDecoder` is used.  Additional keyword arguments
    will be passed to the constructor of the class.
@@ -219,7 +235,7 @@
    *encoding* which is ignored and deprecated.
 
 
-Encoders and decoders
+Encoders and Decoders
 ---------------------
 
 .. class:: JSONDecoder(object_hook=None, parse_float=None, parse_int=None, parse_constant=None, strict=True, object_pairs_hook=None)
@@ -404,3 +420,103 @@
 
             for chunk in json.JSONEncoder().iterencode(bigobject):
                 mysocket.write(chunk)
+
+
+Standard Compliance
+-------------------
+
+The JSON format is specified by :rfc:`4627`.  This section details this
+module's level of compliance with the RFC.  For simplicity,
+:class:`JSONEncoder` and :class:`JSONDecoder` subclasses, and parameters other
+than those explicitly mentioned, are not considered.
+
+This module does not comply with the RFC in a strict fashion, implementing some
+extensions that are valid JavaScript but not valid JSON.  In particular:
+
+- Top-level non-object, non-array values are accepted and output;
+- Infinite and NaN number values are accepted and output;
+- Repeated names within an object are accepted, and only the value of the last
+  name-value pair is used.
+
+Since the RFC permits RFC-compliant parsers to accept input texts that are not
+RFC-compliant, this module's deserializer is technically RFC-compliant under
+default settings.
+
+Character Encodings
+^^^^^^^^^^^^^^^^^^^
+
+The RFC recommends that JSON be represented using either UTF-8, UTF-16, or
+UTF-32, with UTF-8 being the default.
+
+As permitted, though not required, by the RFC, this module's serializer sets
+*ensure_ascii=True* by default, thus escaping the output so that the resulting
+strings only contain ASCII characters.
+
+Other than the *ensure_ascii* parameter, this module is defined strictly in
+terms of conversion between Python objects and
+:class:`Unicode strings <str>`, and thus does not otherwise address the issue
+of character encodings.
+
+
+Top-level Non-Object, Non-Array Values
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The RFC specifies that the top-level value of a JSON text must be either a
+JSON object or array (Python :class:`dict` or :class:`list`).  This module's
+deserializer also accepts input texts consisting solely of a
+JSON null, boolean, number, or string value::
+
+   >>> just_a_json_string = '"spam and eggs"'  # Not by itself a valid JSON text
+   >>> json.loads(just_a_json_string)
+   'spam and eggs'
+
+This module itself does not include a way to request that such input texts be
+regarded as illegal.  Likewise, this module's serializer also accepts single
+Python :data:`None`, :class:`bool`, numeric, and :class:`str`
+values as input and will generate output texts consisting solely of a top-level
+JSON null, boolean, number, or string value without raising an exception::
+
+   >>> neither_a_list_nor_a_dict = "spam and eggs"
+   >>> json.dumps(neither_a_list_nor_a_dict)  # The result is not a valid JSON text
+   '"spam and eggs"'
+
+This module's serializer does not itself include a way to enforce the
+aforementioned constraint.
+
+
+Infinite and NaN Number Values
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The RFC does not permit the representation of infinite or NaN number values.
+Despite that, by default, this module accepts and outputs ``Infinity``,
+``-Infinity``, and ``NaN`` as if they were valid JSON number literal values::
+
+   >>> # Neither of these calls raises an exception, but the results are not valid JSON
+   >>> json.dumps(float('-inf'))
+   '-Infinity'
+   >>> json.dumps(float('nan'))
+   'NaN'
+   >>> # Same when deserializing
+   >>> json.loads('-Infinity')
+   -inf
+   >>> json.loads('NaN')
+   nan
+
+In the serializer, the *allow_nan* parameter can be used to alter this
+behavior.  In the deserializer, the *parse_constant* parameter can be used to
+alter this behavior.
+
+
+Repeated Names Within an Object
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The RFC specifies that the names within a JSON object should be unique, but
+does not specify how repeated names in JSON objects should be handled.  By
+default, this module does not raise an exception; instead, it ignores all but
+the last name-value pair for a given name::
+
+   >>> weird_json = '{"x": 1, "x": 2, "x": 3}'
+   >>> json.loads(weird_json)
+   {'x': 3}
+
+The *object_pairs_hook* parameter can be used to alter this behavior.
diff -r 3d0686d90f55 Doc/library/locale.rst
--- a/Doc/library/locale.rst
+++ b/Doc/library/locale.rst
@@ -160,22 +160,22 @@
 
    .. data:: D_T_FMT
 
-      Get a string that can be used as a format string for :func:`strftime` to
+      Get a string that can be used as a format string for :func:`time.strftime` to
       represent date and time in a locale-specific way.
 
    .. data:: D_FMT
 
-      Get a string that can be used as a format string for :func:`strftime` to
+      Get a string that can be used as a format string for :func:`time.strftime` to
       represent a date in a locale-specific way.
 
    .. data:: T_FMT
 
-      Get a string that can be used as a format string for :func:`strftime` to
+      Get a string that can be used as a format string for :func:`time.strftime` to
       represent a time in a locale-specific way.
 
    .. data:: T_FMT_AMPM
 
-      Get a format string for :func:`strftime` to represent time in the am/pm
+      Get a format string for :func:`time.strftime` to represent time in the am/pm
       format.
 
    .. data:: DAY_1 ... DAY_7
@@ -239,24 +239,24 @@
       then-emperor's reign.
 
       Normally it should not be necessary to use this value directly. Specifying
-      the ``E`` modifier in their format strings causes the :func:`strftime`
+      the ``E`` modifier in their format strings causes the :func:`time.strftime`
       function to use this information.  The format of the returned string is not
       specified, and therefore you should not assume knowledge of it on different
       systems.
 
    .. data:: ERA_D_T_FMT
 
-      Get a format string for :func:`strftime` to represent date and time in a
+      Get a format string for :func:`time.strftime` to represent date and time in a
       locale-specific era-based way.
 
    .. data:: ERA_D_FMT
 
-      Get a format string for :func:`strftime` to represent a date in a
+      Get a format string for :func:`time.strftime` to represent a date in a
       locale-specific era-based way.
 
    .. data:: ERA_T_FMT
 
-      Get a format string for :func:`strftime` to represent a time in a
+      Get a format string for :func:`time.strftime` to represent a time in a
       locale-specific era-based way.
 
    .. data:: ALT_DIGITS
diff -r 3d0686d90f55 Doc/library/logging.config.rst
--- a/Doc/library/logging.config.rst
+++ b/Doc/library/logging.config.rst
@@ -109,6 +109,19 @@
    send it to the socket as a string of bytes preceded by a four-byte length
    string packed in binary using ``struct.pack('>L', n)``.
 
+   .. note:: Because portions of the configuration are passed through
+      :func:`eval`, use of this function may open its users to a security risk.
+      While the function only binds to a socket on ``localhost``, and so does
+      not accept connections from remote machines, there are scenarios where
+      untrusted code could be run under the account of the process which calls
+      :func:`listen`. Specifically, if the process calling :func:`listen` runs
+      on a multi-user machine where users cannot trust each other, then a
+      malicious user could arrange to run essentially arbitrary code in a
+      victim user's process, simply by connecting to the victim's
+      :func:`listen` socket and sending a configuration which runs whatever
+      code the attacker wants to have executed in the victim's process. This is
+      especially easy to do if the default port is used, but not hard even if a
+      different port is used).
 
 .. function:: stopListening()
 
@@ -694,6 +707,12 @@
 :class:`Formatter` subclass.  Subclasses of :class:`Formatter` can present
 exception tracebacks in an expanded or condensed format.
 
+.. note:: Due to the use of :func:`eval` as described above, there are
+   potential security risks which result from using the :func:`listen` to send
+   and receive configurations via sockets. The risks are limited to where
+   multiple users with no mutual trust run code on the same machine; see the
+   :func:`listen` documentation for more information.
+
 .. seealso::
 
    Module :mod:`logging`
diff -r 3d0686d90f55 Doc/library/logging.handlers.rst
--- a/Doc/library/logging.handlers.rst
+++ b/Doc/library/logging.handlers.rst
@@ -654,7 +654,7 @@
 :class:`BufferingHandler`, which is an abstract class. This buffers logging
 records in memory. Whenever each record is added to the buffer, a check is made
 by calling :meth:`shouldFlush` to see if the buffer should be flushed.  If it
-should, then :meth:`flush` is expected to do the needful.
+should, then :meth:`flush` is expected to do the flushing.
 
 
 .. class:: BufferingHandler(capacity)
diff -r 3d0686d90f55 Doc/library/logging.rst
--- a/Doc/library/logging.rst
+++ b/Doc/library/logging.rst
@@ -49,9 +49,22 @@
 Logger Objects
 --------------
 
-Loggers have the following attributes and methods. Note that Loggers are never
+Loggers have the following attributes and methods.  Note that Loggers are never
 instantiated directly, but always through the module-level function
-``logging.getLogger(name)``.
+``logging.getLogger(name)``.  Multiple calls to :func:`getLogger` with the same
+name will always return a reference to the same Logger object.
+
+The ``name`` is potentially a period-separated hierarchical value, like
+``foo.bar.baz`` (though it could also be just plain ``foo``, for example).
+Loggers that are further down in the hierarchical list are children of loggers
+higher up in the list.  For example, given a logger with a name of ``foo``,
+loggers with names of ``foo.bar``, ``foo.bar.baz``, and ``foo.bam`` are all
+descendants of ``foo``.  The logger name hierarchy is analogous to the Python
+package hierarchy, and identical to it if you organise your loggers on a
+per-module basis using the recommended construction
+``logging.getLogger(__name__)``.  That's because in a module, ``__name__``
+is the module's name in the Python package namespace.
+
 
 .. class:: Logger
 
@@ -159,7 +172,7 @@
 
       FORMAT = '%(asctime)-15s %(clientip)s %(user)-8s %(message)s'
       logging.basicConfig(format=FORMAT)
-      d = { 'clientip' : '192.168.0.1', 'user' : 'fbloggs' }
+      d = {'clientip': '192.168.0.1', 'user': 'fbloggs'}
       logger = logging.getLogger('tcpserver')
       logger.warning('Protocol problem: %s', 'connection reset', extra=d)
 
@@ -717,6 +730,9 @@
 | threadName     | ``%(threadName)s``      | Thread name (if available).                   |
 +----------------+-------------------------+-----------------------------------------------+
 
+.. versionchanged:: 3.1
+   *processName* was added.
+
 
 .. _logger-adapter:
 
@@ -929,7 +945,8 @@
    effect is to disable all logging calls of severity *lvl* and below, so that
    if you call it with a value of INFO, then all INFO and DEBUG events would be
    discarded, whereas those of severity WARNING and above would be processed
-   according to the logger's effective level.
+   according to the logger's effective level. To undo the effect of a call to
+   ``logging.disable(lvl)``, call ``logging.disable(logging.NOTSET)``.
 
 
 .. function:: addLevelName(lvl, levelName)
@@ -1077,7 +1094,7 @@
    If *capture* is ``True``, warnings issued by the :mod:`warnings` module will
    be redirected to the logging system. Specifically, a warning will be
    formatted using :func:`warnings.formatwarning` and the resulting string
-   logged to a logger named ``'py.warnings'`` with a severity of ``'WARNING'``.
+   logged to a logger named ``'py.warnings'`` with a severity of :const:`WARNING`.
 
    If *capture* is ``False``, the redirection of warnings to the logging system
    will stop, and warnings will be redirected to their original destinations
diff -r 3d0686d90f55 Doc/library/mailbox.rst
--- a/Doc/library/mailbox.rst
+++ b/Doc/library/mailbox.rst
@@ -89,7 +89,8 @@
       format-specific information is used. Otherwise, reasonable defaults for
       format-specific information are used.
 
-      .. versionchanged:: 3.2 support for binary input
+      .. versionchanged:: 3.2
+         Support for binary input was added.
 
 
    .. method:: remove(key)
diff -r 3d0686d90f55 Doc/library/markup.rst
--- a/Doc/library/markup.rst
+++ b/Doc/library/markup.rst
@@ -23,7 +23,7 @@
    html.rst
    html.parser.rst
    html.entities.rst
-   pyexpat.rst
+   xml.etree.elementtree.rst
    xml.dom.rst
    xml.dom.minidom.rst
    xml.dom.pulldom.rst
@@ -31,4 +31,4 @@
    xml.sax.handler.rst
    xml.sax.utils.rst
    xml.sax.reader.rst
-   xml.etree.elementtree.rst
+   pyexpat.rst
diff -r 3d0686d90f55 Doc/library/multiprocessing.rst
--- a/Doc/library/multiprocessing.rst
+++ b/Doc/library/multiprocessing.rst
@@ -79,7 +79,8 @@
     def info(title):
         print(title)
         print('module name:', __name__)
-        print('parent process:', os.getppid())
+        if hasattr(os, 'getppid'):  # only available on Unix
+            print('parent process:', os.getppid())
         print('process id:', os.getpid())
 
     def f(name):
@@ -1114,7 +1115,7 @@
     HELLO WORLD
     [(3.515625, 39.0625), (33.0625, 4.0), (5.640625, 90.25)]
 
-.. highlight:: python
+.. highlight:: python3
 
 
 .. _multiprocessing-managers:
@@ -1150,10 +1151,10 @@
    *address* is the address on which the manager process listens for new
    connections.  If *address* is ``None`` then an arbitrary one is chosen.
 
-   *authkey* is the authentication key which will be used to check the validity
-   of incoming connections to the server process.  If *authkey* is ``None`` then
-   ``current_process().authkey``.  Otherwise *authkey* is used and it
-   must be a string.
+   *authkey* is the authentication key which will be used to check the
+   validity of incoming connections to the server process.  If
+   *authkey* is ``None`` then ``current_process().authkey`` is used.
+   Otherwise *authkey* is used and it must be a byte string.
 
    .. method:: start([initializer[, initargs]])
 
@@ -1167,7 +1168,7 @@
       :meth:`serve_forever` method::
 
       >>> from multiprocessing.managers import BaseManager
-      >>> manager = BaseManager(address=('', 50000), authkey='abc')
+      >>> manager = BaseManager(address=('', 50000), authkey=b'abc')
       >>> server = manager.get_server()
       >>> server.serve_forever()
 
@@ -1178,7 +1179,7 @@
       Connect a local manager object to a remote manager process::
 
       >>> from multiprocessing.managers import BaseManager
-      >>> m = BaseManager(address=('127.0.0.1', 5000), authkey='abc')
+      >>> m = BaseManager(address=('127.0.0.1', 5000), authkey=b'abc')
       >>> m.connect()
 
    .. method:: shutdown()
@@ -1379,7 +1380,7 @@
    >>> queue = queue.Queue()
    >>> class QueueManager(BaseManager): pass
    >>> QueueManager.register('get_queue', callable=lambda:queue)
-   >>> m = QueueManager(address=('', 50000), authkey='abracadabra')
+   >>> m = QueueManager(address=('', 50000), authkey=b'abracadabra')
    >>> s = m.get_server()
    >>> s.serve_forever()
 
@@ -1388,7 +1389,7 @@
    >>> from multiprocessing.managers import BaseManager
    >>> class QueueManager(BaseManager): pass
    >>> QueueManager.register('get_queue')
-   >>> m = QueueManager(address=('foo.bar.org', 50000), authkey='abracadabra')
+   >>> m = QueueManager(address=('foo.bar.org', 50000), authkey=b'abracadabra')
    >>> m.connect()
    >>> queue = m.get_queue()
    >>> queue.put('hello')
@@ -1398,7 +1399,7 @@
    >>> from multiprocessing.managers import BaseManager
    >>> class QueueManager(BaseManager): pass
    >>> QueueManager.register('get_queue')
-   >>> m = QueueManager(address=('foo.bar.org', 50000), authkey='abracadabra')
+   >>> m = QueueManager(address=('foo.bar.org', 50000), authkey=b'abracadabra')
    >>> m.connect()
    >>> queue = m.get_queue()
    >>> queue.get()
@@ -1422,7 +1423,7 @@
     >>> class QueueManager(BaseManager): pass
     ...
     >>> QueueManager.register('get_queue', callable=lambda: queue)
-    >>> m = QueueManager(address=('', 50000), authkey='abracadabra')
+    >>> m = QueueManager(address=('', 50000), authkey=b'abracadabra')
     >>> s = m.get_server()
     >>> s.serve_forever()
 
@@ -1767,9 +1768,9 @@
    generally be omitted since it can usually be inferred from the format of
    *address*. (See :ref:`multiprocessing-address-formats`)
 
-   If *authenticate* is ``True`` or *authkey* is a string then digest
+   If *authenticate* is ``True`` or *authkey* is a byte string then digest
    authentication is used.  The key used for authentication will be either
-   *authkey* or ``current_process().authkey)`` if *authkey* is ``None``.
+   *authkey* or ``current_process().authkey`` if *authkey* is ``None``.
    If authentication fails then :exc:`AuthenticationError` is raised.  See
    :ref:`multiprocessing-auth-keys`.
 
@@ -1804,8 +1805,8 @@
    If *authenticate* is ``True`` (``False`` by default) or *authkey* is not
    ``None`` then digest authentication is used.
 
-   If *authkey* is a string then it will be used as the authentication key;
-   otherwise it must be *None*.
+   If *authkey* is a byte string then it will be used as the
+   authentication key; otherwise it must be *None*.
 
    If *authkey* is ``None`` and *authenticate* is ``True`` then
    ``current_process().authkey`` is used as the authentication key.  If
@@ -1918,12 +1919,13 @@
 risk.  Therefore :class:`Listener` and :func:`Client` use the :mod:`hmac` module
 to provide digest authentication.
 
-An authentication key is a string which can be thought of as a password: once a
-connection is established both ends will demand proof that the other knows the
-authentication key.  (Demonstrating that both ends are using the same key does
-**not** involve sending the key over the connection.)
-
-If authentication is requested but do authentication key is specified then the
+An authentication key is a byte string which can be thought of as a
+password: once a connection is established both ends will demand proof
+that the other knows the authentication key.  (Demonstrating that both
+ends are using the same key does **not** involve sending the key over
+the connection.)
+
+If authentication is requested but no authentication key is specified then the
 return value of ``current_process().authkey`` is used (see
 :class:`~multiprocessing.Process`).  This value will automatically inherited by
 any :class:`~multiprocessing.Process` object that the current process creates.
@@ -2256,16 +2258,19 @@
 Demonstration of how to create and use customized managers and proxies:
 
 .. literalinclude:: ../includes/mp_newtype.py
+   :language: python3
 
 
 Using :class:`Pool`:
 
 .. literalinclude:: ../includes/mp_pool.py
+   :language: python3
 
 
 Synchronization types like locks, conditions and queues:
 
 .. literalinclude:: ../includes/mp_synchronize.py
+   :language: python3
 
 
 An example showing how to use queues to feed tasks to a collection of worker
diff -r 3d0686d90f55 Doc/library/nis.rst
--- a/Doc/library/nis.rst
+++ b/Doc/library/nis.rst
@@ -17,7 +17,7 @@
 The :mod:`nis` module defines the following functions:
 
 
-.. function:: match(key, mapname[, domain=default_domain])
+.. function:: match(key, mapname, domain=default_domain)
 
    Return the match for *key* in map *mapname*, or raise an error
    (:exc:`nis.error`) if there is none. Both should be strings, *key* is 8-bit
@@ -30,7 +30,7 @@
    unspecified, lookup is in the default NIS domain.
 
 
-.. function:: cat(mapname[, domain=default_domain])
+.. function:: cat(mapname, domain=default_domain)
 
    Return a dictionary mapping *key* to *value* such that ``match(key,
    mapname)==value``. Note that both keys and values of the dictionary are
@@ -42,7 +42,7 @@
    unspecified, lookup is in the default NIS domain.
 
 
-.. function:: maps([domain=default_domain])
+.. function:: maps(domain=default_domain)
 
    Return a list of all valid maps.
 
diff -r 3d0686d90f55 Doc/library/operator.rst
--- a/Doc/library/operator.rst
+++ b/Doc/library/operator.rst
@@ -340,7 +340,7 @@
 +-----------------------+-------------------------+---------------------------------------+
 | Containment Test      | ``obj in seq``          | ``contains(seq, obj)``                |
 +-----------------------+-------------------------+---------------------------------------+
-| Division              | ``a / b``               | ``div(a, b)``                         |
+| Division              | ``a / b``               | ``truediv(a, b)``                     |
 +-----------------------+-------------------------+---------------------------------------+
 | Division              | ``a // b``              | ``floordiv(a, b)``                    |
 +-----------------------+-------------------------+---------------------------------------+
@@ -404,7 +404,7 @@
 +-----------------------+-------------------------+---------------------------------------+
 
 Inplace Operators
-=================
+-----------------
 
 Many operations have an "in-place" version.  Listed below are functions
 providing a more primitive access to in-place operators than the usual syntax
diff -r 3d0686d90f55 Doc/library/os.rst
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -227,6 +227,20 @@
 
    Availability: Unix.
 
+   .. note:: On Mac OS X, :func:`getgroups` behavior differs somewhat from
+      other Unix platforms. If the Python interpreter was built with a
+      deployment target of :const:`10.5` or earlier, :func:`getgroups` returns
+      the list of effective group ids associated with the current user process;
+      this list is limited to a system-defined number of entries, typically 16,
+      and may be modified by calls to :func:`setgroups` if suitably privileged.
+      If built with a deployment target greater than :const:`10.5`,
+      :func:`getgroups` returns the current group access list for the user
+      associated with the effective user id of the process; the group access
+      list may change over the lifetime of the process, it is not affected by
+      calls to :func:`setgroups`, and its length is not limited to 16.  The
+      deployment target value, :const:`MACOSX_DEPLOYMENT_TARGET`, can be
+      obtained with :func:`sysconfig.get_config_var`.
+
 
 .. function:: initgroups(username, gid)
 
@@ -389,6 +403,10 @@
 
    Availability: Unix.
 
+   .. note:: On Mac OS X, the length of *groups* may not exceed the
+      system-defined maximum number of effective group ids, typically 16.
+      See the documentation for :func:`getgroups` for cases where it may not
+      return the same group list set by calling setgroups().
 
 .. function:: setpgrp()
 
@@ -527,22 +545,12 @@
 These functions create new :term:`file objects <file object>`. (See also :func:`open`.)
 
 
-.. function:: fdopen(fd[, mode[, bufsize]])
-
-   .. index:: single: I/O control; buffering
-
-   Return an open file object connected to the file descriptor *fd*.  The *mode*
-   and *bufsize* arguments have the same meaning as the corresponding arguments to
-   the built-in :func:`open` function.
-
-   When specified, the *mode* argument must start with one of the letters
-   ``'r'``, ``'w'``, or ``'a'``, otherwise a :exc:`ValueError` is raised.
-
-   On Unix, when the *mode* argument starts with ``'a'``, the *O_APPEND* flag is
-   set on the file descriptor (which the :c:func:`fdopen` implementation already
-   does on most platforms).
-
-   Availability: Unix, Windows.
+.. function:: fdopen(fd, *args, **kwargs)
+
+   Return an open file object connected to the file descriptor *fd*.
+   This is an alias of :func:`open` and accepts the same arguments.
+   The only difference is that the first argument of :func:`fdopen`
+   must always be an integer.
 
 
 .. _os-fd-ops:
@@ -1127,7 +1135,7 @@
    Availability: Unix.
 
 
-.. function:: mknod(filename[, mode=0o600[, device]])
+.. function:: mknod(filename[, mode=0o600[, device=0]])
 
    Create a filesystem node (file, device special file or named pipe) named
    *filename*. *mode* specifies both the permissions to use and the type of node
diff -r 3d0686d90f55 Doc/library/ossaudiodev.rst
--- a/Doc/library/ossaudiodev.rst
+++ b/Doc/library/ossaudiodev.rst
@@ -277,7 +277,7 @@
 simple calculations.
 
 
-.. method:: oss_audio_device.setparameters(format, nchannels, samplerate [, strict=False])
+.. method:: oss_audio_device.setparameters(format, nchannels, samplerate[, strict=False])
 
    Set the key audio sampling parameters---sample format, number of channels, and
    sampling rate---in one method call.  *format*,  *nchannels*, and *samplerate*
diff -r 3d0686d90f55 Doc/library/pickle.rst
--- a/Doc/library/pickle.rst
+++ b/Doc/library/pickle.rst
@@ -85,45 +85,48 @@
 ------------------
 
 .. index::
-   single: XDR
    single: External Data Representation
 
 The data format used by :mod:`pickle` is Python-specific.  This has the
 advantage that there are no restrictions imposed by external standards such as
-XDR (which can't represent pointer sharing); however it means that non-Python
-programs may not be able to reconstruct pickled Python objects.
+JSON or XDR (which can't represent pointer sharing); however it means that
+non-Python programs may not be able to reconstruct pickled Python objects.
 
-By default, the :mod:`pickle` data format uses a compact binary representation.
+By default, the :mod:`pickle` data format uses a relatively compact binary
+representation.  If you need optimal size characteristics, you can efficiently
+:doc:`compress <archiving>` pickled data.
+
 The module :mod:`pickletools` contains tools for analyzing data streams
-generated by :mod:`pickle`.
+generated by :mod:`pickle`.  :mod:`pickletools` source code has extensive
+comments about opcodes used by pickle protocols.
 
 There are currently 4 different protocols which can be used for pickling.
 
-* Protocol version 0 is the original human-readable protocol and is
+* Protocol version 0 is the original "human-readable" protocol and is
   backwards compatible with earlier versions of Python.
 
-* Protocol version 1 is the old binary format which is also compatible with
+* Protocol version 1 is an old binary format which is also compatible with
   earlier versions of Python.
 
 * Protocol version 2 was introduced in Python 2.3.  It provides much more
-  efficient pickling of :term:`new-style class`\es.
+  efficient pickling of :term:`new-style class`\es.  Refer to :pep:`307` for
+  information about improvements brought by protocol 2.
 
-* Protocol version 3 was added in Python 3.0.  It has explicit support for
-  bytes and cannot be unpickled by Python 2.x pickle modules.  This is
-  the current recommended protocol, use it whenever it is possible.
-
-Refer to :pep:`307` for information about improvements brought by
-protocol 2.  See :mod:`pickletools`'s source code for extensive
-comments about opcodes used by pickle protocols.
+* Protocol version 3 was added in Python 3.  It has explicit support for
+  :class:`bytes` objects and cannot be unpickled by Python 2.x.  This is
+  the default as well as the current recommended protocol; use it whenever
+  possible.
 
 
 Module Interface
 ----------------
 
-To serialize an object hierarchy, you first create a pickler, then you call the
-pickler's :meth:`dump` method.  To de-serialize a data stream, you first create
-an unpickler, then you call the unpickler's :meth:`load` method.  The
-:mod:`pickle` module provides the following constant:
+To serialize an object hierarchy, you simply call the :func:`dumps` function.
+Similarly, to de-serialize a data stream, you call the :func:`loads` function.
+However, if you want more control over serialization and de-serialization,
+you can create a :class:`Pickler` or an :class:`Unpickler` object, respectively.
+
+The :mod:`pickle` module provides the following constants:
 
 
 .. data:: HIGHEST_PROTOCOL
@@ -134,8 +137,7 @@
 .. data:: DEFAULT_PROTOCOL
 
    The default protocol used for pickling.  May be less than HIGHEST_PROTOCOL.
-   Currently the default protocol is 3; a backward-incompatible protocol
-   designed for Python 3.0.
+   Currently the default protocol is 3, a new protocol designed for Python 3.0.
 
 
 The :mod:`pickle` module provides the following functions to make the pickling
diff -r 3d0686d90f55 Doc/library/platform.rst
--- a/Doc/library/platform.rst
+++ b/Doc/library/platform.rst
@@ -3,7 +3,7 @@
 
 .. module:: platform
    :synopsis: Retrieves as much platform identifying data as possible.
-.. moduleauthor:: Marc-Andre Lemburg <mal@egenix.com>
+.. moduleauthor:: Marc-André Lemburg <mal@egenix.com>
 .. sectionauthor:: Bjorn Pettersen <bpettersen@corp.fairisaac.com>
 
 **Source code:** :source:`Lib/platform.py`
diff -r 3d0686d90f55 Doc/library/queue.rst
--- a/Doc/library/queue.rst
+++ b/Doc/library/queue.rst
@@ -15,8 +15,8 @@
 availability of thread support in Python; see the :mod:`threading`
 module.
 
-Implements three types of queue whose only difference is the order that
-the entries are retrieved.  In a FIFO queue, the first tasks added are
+The module implements three types of queue, which differ only in the order in
+which the entries are retrieved.  In a FIFO queue, the first tasks added are
 the first retrieved. In a LIFO queue, the most recently added entry is
 the first retrieved (operating like a stack).  With a priority queue,
 the entries are kept sorted (using the :mod:`heapq` module) and the
diff -r 3d0686d90f55 Doc/library/random.rst
--- a/Doc/library/random.rst
+++ b/Doc/library/random.rst
@@ -74,7 +74,7 @@
 
    *state* should have been obtained from a previous call to :func:`getstate`, and
    :func:`setstate` restores the internal state of the generator to what it was at
-   the time :func:`setstate` was called.
+   the time :func:`getstate` was called.
 
 
 .. function:: getrandbits(k)
@@ -309,7 +309,7 @@
    >>> random.sample([1, 2, 3, 4, 5],  3)   # Three samples without replacement
    [4, 1, 5]
 
-A common task is to make a :func:`random.choice` with weighted probababilites.
+A common task is to make a :func:`random.choice` with weighted probabilities.
 
 If the weights are small integer ratios, a simple technique is to build a sample
 population with repeats::
diff -r 3d0686d90f55 Doc/library/re.rst
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -278,7 +278,7 @@
    lookbehind will back up 3 characters and check if the contained pattern matches.
    The contained pattern must only match strings of some fixed length, meaning that
    ``abc`` or ``a|b`` are allowed, but ``a*`` and ``a{3,4}`` are not.  Note that
-   patterns which start with positive lookbehind assertions will never match at the
+   patterns which start with positive lookbehind assertions will not match at the
    beginning of the string being searched; you will most likely want to use the
    :func:`search` function rather than the :func:`match` function:
 
@@ -330,16 +330,22 @@
    Matches the empty string, but only at the beginning or end of a word.
    A word is defined as a sequence of Unicode alphanumeric or underscore
    characters, so the end of a word is indicated by whitespace or a
-   non-alphanumeric, non-underscore Unicode character. Note that
-   formally, ``\b`` is defined as the boundary between a ``\w`` and a
-   ``\W`` character (or vice versa). By default Unicode alphanumerics
-   are the ones used, but this can be changed by using the :const:`ASCII`
-   flag.  Inside a character range, ``\b`` represents the backspace
-   character, for compatibility with Python's string literals.
+   non-alphanumeric, non-underscore Unicode character.  Note that formally,
+   ``\b`` is defined as the boundary between a ``\w`` and a ``\W`` character
+   (or vice versa), or between ``\w`` and the beginning/end of the string.
+   This means that ``r'\bfoo\b'`` matches ``'foo'``, ``'foo.'``, ``'(foo)'``,
+   ``'bar foo baz'`` but not ``'foobar'`` or ``'foo3'``.
+
+   By default Unicode alphanumerics are the ones used, but this can  be changed
+   by using the :const:`ASCII` flag.  Inside a character range, ``\b``
+   represents the backspace character, for compatibility with Python's string
+   literals.
 
 ``\B``
-   Matches the empty string, but only when it is *not* at the beginning or end of a
-   word.  This is just the opposite of ``\b``, so word characters are
+   Matches the empty string, but only when it is *not* at the beginning or end
+   of a word.  This means that ``r'py\B'`` matches ``'python'``, ``'py3'``,
+   ``'py2'``, but not ``'py'``, ``'py.'``, or ``'py!'``.
+   ``\B`` is just the opposite of ``\b``, so word characters are
    Unicode alphanumerics or the underscore, although this can be changed
    by using the :const:`ASCII` flag.
 
@@ -411,37 +417,15 @@
    \r      \t      \v      \x
    \\
 
+(Note that ``\b`` is used to represent word boundaries, and means "backspace"
+only inside character classes.)
+
 Octal escapes are included in a limited form.  If the first digit is a 0, or if
 there are three octal digits, it is considered an octal escape. Otherwise, it is
 a group reference.  As for string literals, octal escapes are always at most
 three digits in length.
 
 
-.. _matching-searching:
-
-Matching vs. Searching
-----------------------
-
-.. sectionauthor:: Fred L. Drake, Jr. <fdrake@acm.org>
-
-
-Python offers two different primitive operations based on regular expressions:
-**match** checks for a match only at the beginning of the string, while
-**search** checks for a match anywhere in the string (this is what Perl does
-by default).
-
-Note that match may differ from search even when using a regular expression
-beginning with ``'^'``: ``'^'`` matches only at the start of the string, or in
-:const:`MULTILINE` mode also immediately following a newline.  The "match"
-operation succeeds only if the pattern matches at the start of the string
-regardless of mode, or at the starting position given by the optional *pos*
-argument regardless of whether a newline precedes it.
-
-   >>> re.match("c", "abcdef")  # No match
-   >>> re.search("c", "abcdef") # Match
-   <_sre.SRE_Match object at ...>
-
-
 .. _contents-of-module-re:
 
 Module Contents
@@ -575,10 +559,11 @@
    <match-objects>`.  Return ``None`` if the string does not match the pattern;
    note that this is different from a zero-length match.
 
-   .. note::
+   Note that even in :const:`MULTILINE` mode, :func:`re.match` will only match
+   at the beginning of the string and not at the beginning of each line.
 
-      If you want to locate a match anywhere in *string*, use :func:`search`
-      instead.
+   If you want to locate a match anywhere in *string*, use :func:`search`
+   instead (see also :ref:`search-vs-match`).
 
 
 .. function:: split(pattern, string, maxsplit=0, flags=0)
@@ -762,16 +747,14 @@
    The optional *pos* and *endpos* parameters have the same meaning as for the
    :meth:`~regex.search` method.
 
-   .. note::
-
-      If you want to locate a match anywhere in *string*, use
-      :meth:`~regex.search` instead.
-
    >>> pattern = re.compile("o")
    >>> pattern.match("dog")      # No match as "o" is not at the start of "dog".
    >>> pattern.match("dog", 1)   # Match as "o" is the 2nd character of "dog".
    <_sre.SRE_Match object at ...>
 
+   If you want to locate a match anywhere in *string*, use
+   :meth:`~regex.search` instead (see also :ref:`search-vs-match`).
+
 
 .. method:: regex.split(string, maxsplit=0)
 
@@ -804,8 +787,9 @@
 
 .. attribute:: regex.flags
 
-   The flags argument used when the RE object was compiled, or ``0`` if no flags
-   were provided.
+   The regex matching flags.  This is a combination of the flags given to
+   :func:`.compile`, any ``(?...)`` inline flags in the pattern, and implicit
+   flags such as :data:`UNICODE` if the pattern is a Unicode string.
 
 
 .. attribute:: regex.groups
@@ -964,16 +948,15 @@
 .. attribute:: match.pos
 
    The value of *pos* which was passed to the :meth:`~regex.search` or
-   :meth:`~regex.match` method of a :ref:`match object <match-objects>`.  This
-   is the index into the string at which the RE engine started looking for a
-   match.
+   :meth:`~regex.match` method of a :ref:`regex object <re-objects>`.  This is
+   the index into the string at which the RE engine started looking for a match.
 
 
 .. attribute:: match.endpos
 
    The value of *endpos* which was passed to the :meth:`~regex.search` or
-   :meth:`~regex.match` method of a :ref:`match object <match-objects>`.  This
-   is the index into the string beyond which the RE engine will not go.
+   :meth:`~regex.match` method of a :ref:`regex object <re-objects>`.  This is
+   the index into the string beyond which the RE engine will not go.
 
 
 .. attribute:: match.lastindex
@@ -1089,13 +1072,13 @@
 +--------------------------------+---------------------------------------------+
 | ``%i``                         | ``[-+]?(0[xX][\dA-Fa-f]+|0[0-7]*|\d+)``     |
 +--------------------------------+---------------------------------------------+
-| ``%o``                         | ``0[0-7]*``                                 |
+| ``%o``                         | ``[-+]?[0-7]+``                             |
 +--------------------------------+---------------------------------------------+
 | ``%s``                         | ``\S+``                                     |
 +--------------------------------+---------------------------------------------+
 | ``%u``                         | ``\d+``                                     |
 +--------------------------------+---------------------------------------------+
-| ``%x``, ``%X``                 | ``0[xX][\dA-Fa-f]+``                        |
+| ``%x``, ``%X``                 | ``[-+]?(0[xX])?[\dA-Fa-f]+``                |
 +--------------------------------+---------------------------------------------+
 
 To extract the filename and numbers from a string like ::
@@ -1111,59 +1094,39 @@
    (\S+) - (\d+) errors, (\d+) warnings
 
 
-Avoiding recursion
-^^^^^^^^^^^^^^^^^^
-
-If you create regular expressions that require the engine to perform a lot of
-recursion, you may encounter a :exc:`RuntimeError` exception with the message
-``maximum recursion limit exceeded``. For example, ::
-
-   >>> s = 'Begin ' + 1000*'a very long string ' + 'end'
-   >>> re.match('Begin (\w| )*? end', s).end()
-   Traceback (most recent call last):
-     File "<stdin>", line 1, in ?
-     File "/usr/local/lib/python3.2/re.py", line 132, in match
-       return _compile(pattern, flags).match(string)
-   RuntimeError: maximum recursion limit exceeded
-
-You can often restructure your regular expression to avoid recursion.
-
-Simple uses of the ``*?`` pattern are special-cased to avoid recursion.  Thus,
-the above regular expression can avoid recursion by being recast as ``Begin
-[a-zA-Z0-9_ ]*?end``.  As a further benefit, such regular expressions will run
-faster than their recursive equivalents.
-
+.. _search-vs-match:
 
 search() vs. match()
 ^^^^^^^^^^^^^^^^^^^^
 
-In a nutshell, :func:`match` only attempts to match a pattern at the beginning
-of a string where :func:`search` will match a pattern anywhere in a string.
-For example:
+.. sectionauthor:: Fred L. Drake, Jr. <fdrake@acm.org>
 
-   >>> re.match("o", "dog")  # No match as "o" is not the first letter of "dog".
-   >>> re.search("o", "dog") # Match as search() looks everywhere in the string.
+Python offers two different primitive operations based on regular expressions:
+:func:`re.match` checks for a match only at the beginning of the string, while
+:func:`re.search` checks for a match anywhere in the string (this is what Perl
+does by default).
+
+For example::
+
+   >>> re.match("c", "abcdef")  # No match
+   >>> re.search("c", "abcdef") # Match
    <_sre.SRE_Match object at ...>
 
-.. note::
+Regular expressions beginning with ``'^'`` can be used with :func:`search` to
+restrict the match at the beginning of the string::
 
-   The following applies only to regular expression objects like those created
-   with ``re.compile("pattern")``, not the primitives ``re.match(pattern,
-   string)`` or ``re.search(pattern, string)``.
+   >>> re.match("c", "abcdef")  # No match
+   >>> re.search("^c", "abcdef") # No match
+   >>> re.search("^a", "abcdef")  # Match
+   <_sre.SRE_Match object at ...>
 
-:func:`match` has an optional second parameter that gives an index in the string
-where the search is to start::
+Note however that in :const:`MULTILINE` mode :func:`match` only matches at the
+beginning of the string, whereas using :func:`search` with a regular expression
+beginning with ``'^'`` will match at the beginning of each line.
 
-   >>> pattern = re.compile("o")
-   >>> pattern.match("dog")      # No match as "o" is not at the start of "dog."
-
-   # Equivalent to the above expression as 0 is the default starting index:
-   >>> pattern.match("dog", 0)
-
-   # Match as "o" is the 2nd character of "dog" (index 0 is the first):
-   >>> pattern.match("dog", 1)
+   >>> re.match('X', 'A\nB\nX', re.MULTILINE)  # No match
+   >>> re.search('^X', 'A\nB\nX', re.MULTILINE)  # Match
    <_sre.SRE_Match object at ...>
-   >>> pattern.match("dog", 2)   # No match as "o" is not the 3rd character of "dog."
 
 
 Making a Phonebook
@@ -1177,7 +1140,7 @@
 First, here is the input.  Normally it may come from a file, here we are using
 triple-quoted string syntax:
 
-   >>> input = """Ross McFluff: 834.345.1254 155 Elm Street
+   >>> text = """Ross McFluff: 834.345.1254 155 Elm Street
    ...
    ... Ronald Heathmore: 892.345.3428 436 Finley Avenue
    ... Frank Burger: 925.541.7625 662 South Dogwood Way
@@ -1191,7 +1154,7 @@
 .. doctest::
    :options: +NORMALIZE_WHITESPACE
 
-   >>> entries = re.split("\n+", input)
+   >>> entries = re.split("\n+", text)
    >>> entries
    ['Ross McFluff: 834.345.1254 155 Elm Street',
    'Ronald Heathmore: 892.345.3428 436 Finley Avenue',
diff -r 3d0686d90f55 Doc/library/runpy.rst
--- a/Doc/library/runpy.rst
+++ b/Doc/library/runpy.rst
@@ -14,6 +14,15 @@
 line switch that allows scripts to be located using the Python module
 namespace rather than the filesystem.
 
+Note that this is *not* a sandbox module - all code is executed in the
+current process, and any side effects (such as cached imports of other
+modules) will remain in place after the functions have returned.
+
+Furthermore, any functions and classes defined by the executed code are not
+guaranteed to work correctly after a :mod:`runpy` function has returned.
+If that limitation is not acceptable for a given use case, :mod:`importlib`
+is likely to be a more suitable choice than this module.
+
 The :mod:`runpy` module provides two functions:
 
 
@@ -141,3 +150,5 @@
       PEP written and implemented by Nick Coghlan.
 
    :ref:`using-on-general` - CPython command line details
+
+   The :func:`importlib.import_module` function
diff -r 3d0686d90f55 Doc/library/select.rst
--- a/Doc/library/select.rst
+++ b/Doc/library/select.rst
@@ -181,7 +181,7 @@
    Remove a registered file descriptor from the epoll object.
 
 
-.. method:: epoll.poll([timeout=-1[, maxevents=-1]])
+.. method:: epoll.poll(timeout=-1, maxevents=-1)
 
    Wait for events. timeout in seconds (float)
 
diff -r 3d0686d90f55 Doc/library/signal.rst
--- a/Doc/library/signal.rst
+++ b/Doc/library/signal.rst
@@ -5,46 +5,58 @@
    :synopsis: Set handlers for asynchronous events.
 
 
-This module provides mechanisms to use signal handlers in Python. Some general
-rules for working with signals and their handlers:
+This module provides mechanisms to use signal handlers in Python.
 
-* A handler for a particular signal, once set, remains installed until it is
-  explicitly reset (Python emulates the BSD style interface regardless of the
-  underlying implementation), with the exception of the handler for
-  :const:`SIGCHLD`, which follows the underlying implementation.
 
-* There is no way to "block" signals temporarily from critical sections (since
-  this is not supported by all Unix flavors).
+General rules
+-------------
 
-* Although Python signal handlers are called asynchronously as far as the Python
-  user is concerned, they can only occur between the "atomic" instructions of the
-  Python interpreter.  This means that signals arriving during long calculations
-  implemented purely in C (such as regular expression matches on large bodies of
-  text) may be delayed for an arbitrary amount of time.
+The :func:`signal.signal` function allows to define custom handlers to be
+executed when a signal is received.  A small number of default handlers are
+installed: :const:`SIGPIPE` is ignored (so write errors on pipes and sockets
+can be reported as ordinary Python exceptions) and :const:`SIGINT` is
+translated into a :exc:`KeyboardInterrupt` exception.
 
-* When a signal arrives during an I/O operation, it is possible that the I/O
-  operation raises an exception after the signal handler returns. This is
-  dependent on the underlying Unix system's semantics regarding interrupted system
-  calls.
+A handler for a particular signal, once set, remains installed until it is
+explicitly reset (Python emulates the BSD style interface regardless of the
+underlying implementation), with the exception of the handler for
+:const:`SIGCHLD`, which follows the underlying implementation.
 
-* Because the C signal handler always returns, it makes little sense to catch
-  synchronous errors like :const:`SIGFPE` or :const:`SIGSEGV`.
+There is no way to "block" signals temporarily from critical sections (since
+this is not supported by all Unix flavors).
 
-* Python installs a small number of signal handlers by default: :const:`SIGPIPE`
-  is ignored (so write errors on pipes and sockets can be reported as ordinary
-  Python exceptions) and :const:`SIGINT` is translated into a
-  :exc:`KeyboardInterrupt` exception.  All of these can be overridden.
 
-* Some care must be taken if both signals and threads are used in the same
-  program.  The fundamental thing to remember in using signals and threads
-  simultaneously is: always perform :func:`signal` operations in the main thread
-  of execution.  Any thread can perform an :func:`alarm`, :func:`getsignal`,
-  :func:`pause`, :func:`setitimer` or :func:`getitimer`; only the main thread
-  can set a new signal handler, and the main thread will be the only one to
-  receive signals (this is enforced by the Python :mod:`signal` module, even
-  if the underlying thread implementation supports sending signals to
-  individual threads).  This means that signals can't be used as a means of
-  inter-thread communication.  Use locks instead.
+Execution of Python signal handlers
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A Python signal handler does not get executed inside the low-level (C) signal
+handler.  Instead, the low-level signal handler sets a flag which tells the
+:term:`virtual machine` to execute the corresponding Python signal handler
+at a later point(for example at the next :term:`bytecode` instruction).
+This has consequences:
+
+* It makes little sense to catch synchronous errors like :const:`SIGFPE` or
+  :const:`SIGSEGV`.
+
+* A long-running calculation implemented purely in C (such as regular
+  expression matching on a large body of text) may run uninterrupted for an
+  arbitrary amount of time, regardless of any signals received.  The Python
+  signal handlers will be called when the calculation finishes.
+
+
+Signals and threads
+^^^^^^^^^^^^^^^^^^^
+
+Python signal handlers are always executed in the main Python thread,
+even if the signal was received in another thread.  This means that signals
+can't be used as a means of inter-thread communication.  You can use
+the synchronization primitives from the :mod:`threading` module instead.
+
+Besides, only the main thread is allowed to set a new signal handler.
+
+
+Module contents
+---------------
 
 The variables defined in the :mod:`signal` module are:
 
diff -r 3d0686d90f55 Doc/library/smtpd.rst
--- a/Doc/library/smtpd.rst
+++ b/Doc/library/smtpd.rst
@@ -109,7 +109,7 @@
    .. attribute:: received_lines
 
       Holds a list of the line strings (decoded using UTF-8) received from
-      the client. The lines have their "\r\n" line ending translated to "\n".
+      the client. The lines have their "\\r\\n" line ending translated to "\\n".
 
    .. attribute:: smtp_state
 
diff -r 3d0686d90f55 Doc/library/socket.rst
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -47,7 +47,7 @@
 - A pair ``(host, port)`` is used for the :const:`AF_INET` address family,
   where *host* is a string representing either a hostname in Internet domain
   notation like ``'daring.cwi.nl'`` or an IPv4 address like ``'100.50.200.5'``,
-  and *port* is an integral port number.
+  and *port* is an integer.
 
 - For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo,
   scopeid)`` is used, where *flowinfo* and *scopeid* represent the ``sin6_flowinfo``
diff -r 3d0686d90f55 Doc/library/sqlite3.rst
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -3,7 +3,7 @@
 
 .. module:: sqlite3
    :synopsis: A DB-API 2.0 implementation using SQLite 3.x.
-.. sectionauthor:: Gerhard HÃ¤ring <gh@ghaering.de>
+.. sectionauthor:: Gerhard Häring <gh@ghaering.de>
 
 
 SQLite is a C library that provides a lightweight disk-based database that
@@ -20,6 +20,7 @@
 represents the database.  Here the data will be stored in the
 :file:`/tmp/example` file::
 
+   import sqlite3
    conn = sqlite3.connect('/tmp/example')
 
 You can also supply the special name ``:memory:`` to create a database in RAM.
@@ -56,15 +57,15 @@
 
    # Never do this -- insecure!
    symbol = 'IBM'
-   c.execute("... where symbol = '%s'" % symbol)
+   c.execute("select * from stocks where symbol = '%s'" % symbol)
 
    # Do this instead
-   t = (symbol,)
+   t = ('IBM',)
    c.execute('select * from stocks where symbol=?', t)
 
    # Larger example
    for t in [('2006-03-28', 'BUY', 'IBM', 1000, 45.00),
-             ('2006-04-05', 'BUY', 'MSOFT', 1000, 72.00),
+             ('2006-04-05', 'BUY', 'MSFT', 1000, 72.00),
              ('2006-04-06', 'SELL', 'IBM', 500, 53.00),
             ]:
        c.execute('insert into stocks values (?,?,?,?,?)', t)
@@ -271,7 +272,6 @@
    calling the cursor method, then calls the cursor's :meth:`executemany
    <Cursor.executemany>` method with the parameters given.
 
-
 .. method:: Connection.executescript(sql_script)
 
    This is a nonstandard shortcut that creates an intermediate cursor object by
@@ -376,22 +376,22 @@
    aggregates or whole new virtual table implementations.  One well-known
    extension is the fulltext-search extension distributed with SQLite.
 
+   Loadable extensions are disabled by default. See [#f1]_.
+
    .. versionadded:: 3.2
 
    .. literalinclude:: ../includes/sqlite3/load_extension.py
 
-   Loadable extensions are disabled by default. See [#f1]_.
-
 .. method:: Connection.load_extension(path)
 
    This routine loads a SQLite extension from a shared library.  You have to
    enable extension loading with :meth:`enable_load_extension` before you can
    use this routine.
 
+   Loadable extensions are disabled by default. See [#f1]_.
+
    .. versionadded:: 3.2
 
-   Loadable extensions are disabled by default. See [#f1]_.
-
 .. attribute:: Connection.row_factory
 
    You can change this attribute to a callable that accepts the cursor and the
@@ -514,7 +514,7 @@
    or :const:`None` when no more data is available.
 
 
-.. method:: Cursor.fetchmany([size=cursor.arraysize])
+.. method:: Cursor.fetchmany(size=cursor.arraysize)
 
    Fetches the next set of rows of a query result, returning a list.  An empty
    list is returned when no more rows are available.
diff -r 3d0686d90f55 Doc/library/ssl.rst
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -427,23 +427,39 @@
    If the parameter ``binary_form`` is :const:`False`, and a certificate was
    received from the peer, this method returns a :class:`dict` instance.  If the
    certificate was not validated, the dict is empty.  If the certificate was
-   validated, it returns a dict with the keys ``subject`` (the principal for
-   which the certificate was issued), and ``notAfter`` (the time after which the
-   certificate should not be trusted).  If a certificate contains an instance
-   of the *Subject Alternative Name* extension (see :rfc:`3280`), there will
-   also be a ``subjectAltName`` key in the dictionary.
+   validated, it returns a dict with several keys, amongst them ``subject``
+   (the principal for which the certificate was issued) and ``issuer``
+   (the principal issuing the certificate).  If a certificate contains an
+   instance of the *Subject Alternative Name* extension (see :rfc:`3280`),
+   there will also be a ``subjectAltName`` key in the dictionary.
 
-   The "subject" field is a tuple containing the sequence of relative
-   distinguished names (RDNs) given in the certificate's data structure for the
-   principal, and each RDN is a sequence of name-value pairs::
+   The ``subject`` and ``issuer`` fields are tuples containing the sequence
+   of relative distinguished names (RDNs) given in the certificate's data
+   structure for the respective fields, and each RDN is a sequence of
+   name-value pairs.  Here is a real-world example::
 
-      {'notAfter': 'Feb 16 16:54:50 2013 GMT',
-       'subject': ((('countryName', 'US'),),
-                   (('stateOrProvinceName', 'Delaware'),),
-                   (('localityName', 'Wilmington'),),
-                   (('organizationName', 'Python Software Foundation'),),
-                   (('organizationalUnitName', 'SSL'),),
-                   (('commonName', 'somemachine.python.org'),))}
+      {'issuer': ((('countryName', 'IL'),),
+                  (('organizationName', 'StartCom Ltd.'),),
+                  (('organizationalUnitName',
+                    'Secure Digital Certificate Signing'),),
+                  (('commonName',
+                    'StartCom Class 2 Primary Intermediate Server CA'),)),
+       'notAfter': 'Nov 22 08:15:19 2013 GMT',
+       'notBefore': 'Nov 21 03:09:52 2011 GMT',
+       'serialNumber': '95F0',
+       'subject': ((('description', '571208-SLe257oHY9fVQ07Z'),),
+                   (('countryName', 'US'),),
+                   (('stateOrProvinceName', 'California'),),
+                   (('localityName', 'San Francisco'),),
+                   (('organizationName', 'Electronic Frontier Foundation, Inc.'),),
+                   (('commonName', '*.eff.org'),),
+                   (('emailAddress', 'hostmaster@eff.org'),)),
+       'subjectAltName': (('DNS', '*.eff.org'), ('DNS', 'eff.org')),
+       'version': 3}
+
+   .. note::
+      To validate a certificate for a particular service, you can use the
+      :func:`match_hostname` function.
 
    If the ``binary_form`` parameter is :const:`True`, and a certificate was
    provided, this method returns the DER-encoded form of the entire certificate
diff -r 3d0686d90f55 Doc/library/stdtypes.rst
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -819,7 +819,8 @@
 also string-specific methods described in the :ref:`string-methods` section.
 
 Bytes and bytearray objects contain single bytes -- the former is immutable
-while the latter is a mutable sequence.  Bytes objects can be constructed the
+while the latter is a mutable sequence.
+Bytes objects can be constructed by using the
 constructor, :func:`bytes`, and from literals; use a ``b`` prefix with normal
 string syntax: ``b'xyzzy'``.  To construct byte arrays, use the
 :func:`bytearray` function.
@@ -1304,8 +1305,8 @@
    Return a list of the words in the string, using *sep* as the delimiter
    string.  If *maxsplit* is given, at most *maxsplit* splits are done (thus,
    the list will have at most ``maxsplit+1`` elements).  If *maxsplit* is not
-   specified, then there is no limit on the number of splits (all possible
-   splits are made).
+   specified or ``-1``, then there is no limit on the number of splits
+   (all possible splits are made).
 
    If *sep* is given, consecutive delimiters are not grouped together and are
    deemed to delimit empty strings (for example, ``'1,,2'.split(',')`` returns
@@ -1324,11 +1325,23 @@
    ``'  1  2   3  '.split(None, 1)`` returns ``['1', '2   3  ']``.
 
 
+.. index::
+   single: universal newlines; str.splitlines method
+
 .. method:: str.splitlines([keepends])
 
-   Return a list of the lines in the string, breaking at line boundaries.  Line
-   breaks are not included in the resulting list unless *keepends* is given and
-   true.
+   Return a list of the lines in the string, breaking at line boundaries.
+   This method uses the :term:`universal newlines` approach to splitting lines.
+   Line breaks are not included in the resulting list unless *keepends* is
+   given and true.
+
+   For example, ``'ab c\n\nde fg\rkl\r\n'.splitlines()`` returns
+   ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splitlines(True)``
+   returns ``['ab c\n', '\n', 'de fg\r', 'kl\r\n']``.
+
+   Unlike :meth:`~str.split` when a delimiter string *sep* is given, this
+   method returns an empty list for the empty string, and a terminal line
+   break does not result in an extra line.
 
 
 .. method:: str.startswith(prefix[, start[, end]])
@@ -1437,8 +1450,13 @@
 
 .. note::
 
-   The formatting operations described here are obsolete and may go away in future
-   versions of Python.  Use the new :ref:`string-formatting` in new code.
+   The formatting operations described here are modelled on C's printf()
+   syntax.  They only support formatting of certain builtin types.  The
+   use of a binary operator means that care may be needed in order to
+   format tuples and dictionaries correctly.  As the new
+   :ref:`string-formatting` syntax is more flexible and handles tuples and
+   dictionaries naturally, it is recommended for new code.  However, there
+   are no current plans to deprecate printf-style formatting.
 
 String objects have one unique built-in operation: the ``%`` operator (modulo).
 This is also known as the string *formatting* or *interpolation* operator.
diff -r 3d0686d90f55 Doc/library/string.rst
--- a/Doc/library/string.rst
+++ b/Doc/library/string.rst
@@ -91,8 +91,8 @@
 
    .. method:: format(format_string, *args, **kwargs)
 
-      :meth:`format` is the primary API method.  It takes a format template
-      string, and an arbitrary set of positional and keyword argument.
+      :meth:`format` is the primary API method.  It takes a format string and
+      an arbitrary set of positional and keyword arguments.
       :meth:`format` is just a wrapper that calls :meth:`vformat`.
 
    .. method:: vformat(format_string, args, kwargs)
@@ -101,8 +101,8 @@
       separate function for cases where you want to pass in a predefined
       dictionary of arguments, rather than unpacking and repacking the
       dictionary as individual arguments using the ``*args`` and ``**kwds``
-      syntax.  :meth:`vformat` does the work of breaking up the format template
-      string into character data and replacement fields.  It calls the various
+      syntax.  :meth:`vformat` does the work of breaking up the format string
+      into character data and replacement fields.  It calls the various
       methods described below.
 
    In addition, the :class:`Formatter` defines a number of methods that are
@@ -173,7 +173,8 @@
 
       Converts the value (returned by :meth:`get_field`) given a conversion type
       (as in the tuple returned by the :meth:`parse` method).  The default
-      version understands 'r' (repr) and 's' (str) conversion types.
+      version understands 's' (str), 'r' (repr) and 'a' (ascii) conversion
+      types.
 
 
 .. _formatstrings:
@@ -292,7 +293,7 @@
 
 .. productionlist:: sf
    format_spec: [[`fill`]`align`][`sign`][#][0][`width`][,][.`precision`][`type`]
-   fill: <a character other than '}'>
+   fill: <a character other than '{' or '}'>
    align: "<" | ">" | "=" | "^"
    sign: "+" | "-" | " "
    width: `integer`
@@ -368,9 +369,9 @@
 *width* is a decimal integer defining the minimum field width.  If not
 specified, then the field width will be determined by the content.
 
-If the *width* field is preceded by a zero (``'0'``) character, this enables
-zero-padding.  This is equivalent to an *alignment* type of ``'='`` and a *fill*
-character of ``'0'``.
+Preceding the *width* field by a zero (``'0'``) character enables
+sign-aware zero-padding for numeric types.  This is equivalent to a *fill*
+character of ``'0'`` with an *alignment* type of ``'='``.
 
 The *precision* is a decimal number indicating how many digits should be
 displayed after the decimal point for a floating point value formatted with
diff -r 3d0686d90f55 Doc/library/subprocess.rst
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -136,7 +136,7 @@
    decoding to text will often need to be handled at the application level.
 
    This behaviour may be overridden by setting *universal_newlines* to
-   :const:`True` as described below in :ref:`frequently-used-arguments`.
+   ``True`` as described below in :ref:`frequently-used-arguments`.
 
    To also capture standard error in the result, use
    ``stderr=subprocess.STDOUT``::
@@ -176,6 +176,26 @@
    output.
 
 
+.. exception:: CalledProcessError
+
+    Exception raised when a process run by :func:`check_call` or
+    :func:`check_output` returns a non-zero exit status.
+
+    .. attribute:: returncode
+
+        Exit status of the child process.
+
+    .. attribute:: cmd
+
+        Command that was used to spawn the child process.
+
+    .. attribute:: output
+
+        Output of the child process if this exception is raised by
+        :func:`check_output`.  Otherwise, ``None``.
+
+
+
 .. _frequently-used-arguments:
 
 Frequently Used Arguments
@@ -204,13 +224,27 @@
    the stderr data from the child process should be captured into the same file
    handle as for stdout.
 
-   When *stdout* or *stderr* are pipes and *universal_newlines* is
-   :const:`True` then the output data is assumed to be encoded as UTF-8 and
-   will automatically be decoded to text. All line endings will be converted
-   to ``'\n'`` as described for the universal newlines ``'U'`` mode argument
-   to :func:`open`.
+   .. index::
+      single: universal newlines; subprocess module
 
-   If *shell* is :const:`True`, the specified command will be executed through
+   If *universal_newlines* is ``True``, the file objects *stdin*, *stdout*
+   and *stderr* will be opened as text streams in :term:`universal newlines`
+   mode using the encoding returned by :func:`locale.getpreferredencoding`.
+   For *stdin*, line ending characters ``'\n'`` in the input will be converted
+   to the default line separator :data:`os.linesep`.  For *stdout* and
+   *stderr*, all line endings in the output will be converted to ``'\n'``.
+   For more information see the documentation of the :class:`io.TextIOWrapper`
+   class when the *newline* argument to its constructor is ``None``.
+
+   .. note::
+
+      The *universal_newlines* feature is supported only if Python is built
+      with universal newline support (the default).  Also, the newlines
+      attribute of the file objects :attr:`Popen.stdin`, :attr:`Popen.stdout`
+      and :attr:`Popen.stderr` are not updated by the
+      :meth:`Popen.communicate` method.
+
+   If *shell* is ``True``, the specified command will be executed through
    the shell. This can be useful if you are using Python primarily for the
    enhanced control flow it offers over most system shells and still want
    access to other shell features such as filename wildcards, shell pipes and
@@ -408,18 +442,9 @@
 
    .. _side-by-side assembly: http://en.wikipedia.org/wiki/Side-by-Side_Assembly
 
-   If *universal_newlines* is :const:`True`, the file objects stdout and stderr are
-   opened as text files, but lines may be terminated by any of ``'\n'``, the Unix
-   end-of-line convention, ``'\r'``, the old Macintosh convention or ``'\r\n'``, the
-   Windows convention. All of these external representations are seen as ``'\n'``
-   by the Python program.
-
-   .. note::
-
-      This feature is only available if Python is built with universal newline
-      support (the default).  Also, the newlines attribute of the file objects
-      :attr:`stdout`, :attr:`stdin` and :attr:`stderr` are not updated by the
-      :meth:`communicate` method.
+   If *universal_newlines* is ``True``, the file objects *stdin*, *stdout*
+   and *stderr* are opened as text streams in universal newlines mode, as
+   described above in :ref:`frequently-used-arguments`.
 
    If given, *startupinfo* will be a :class:`STARTUPINFO` object, which is
    passed to the underlying ``CreateProcess`` function.
@@ -735,7 +760,7 @@
 to receive a SIGPIPE if p2 exits before p1.
 
 Alternatively, for trusted input, the shell's own pipeline support may still
-be used directly:
+be used directly::
 
    output=`dmesg | grep hda`
    # becomes
diff -r 3d0686d90f55 Doc/library/syslog.rst
--- a/Doc/library/syslog.rst
+++ b/Doc/library/syslog.rst
@@ -78,7 +78,8 @@
 Facilities:
    :const:`LOG_KERN`, :const:`LOG_USER`, :const:`LOG_MAIL`, :const:`LOG_DAEMON`,
    :const:`LOG_AUTH`, :const:`LOG_LPR`, :const:`LOG_NEWS`, :const:`LOG_UUCP`,
-   :const:`LOG_CRON` and :const:`LOG_LOCAL0` to :const:`LOG_LOCAL7`.
+   :const:`LOG_CRON`, :const:`LOG_SYSLOG` and :const:`LOG_LOCAL0` to
+   :const:`LOG_LOCAL7`.
 
 Log options:
    :const:`LOG_PID`, :const:`LOG_CONS`, :const:`LOG_NDELAY`, :const:`LOG_NOWAIT`
diff -r 3d0686d90f55 Doc/library/test.rst
--- a/Doc/library/test.rst
+++ b/Doc/library/test.rst
@@ -171,9 +171,9 @@
 
 Running :mod:`test` directly allows what resources are available for
 tests to use to be set. You do this by using the ``-u`` command-line
-option. Run :program:`python -m test -uall` to turn on all
-resources; specifying ``all`` as an option for ``-u`` enables all
-possible resources. If all but one resource is desired (a more common case), a
+option. Specifying ``all`` as the value for the ``-u`` option enables all
+possible resources: :program:`python -m test -uall`.
+If all but one resource is desired (a more common case), a
 comma-separated list of resources that are not desired may be listed after
 ``all``. The command :program:`python -m test -uall,-audio,-largefile`
 will run :mod:`test` with all resources except the ``audio`` and
@@ -362,7 +362,7 @@
 
       with captured_stdout() as s:
           print("hello")
-      assert s.getvalue() == "hello"
+      assert s.getvalue() == "hello\n"
 
 
 .. function:: import_module(name, deprecated=False)
diff -r 3d0686d90f55 Doc/library/textwrap.rst
--- a/Doc/library/textwrap.rst
+++ b/Doc/library/textwrap.rst
@@ -109,9 +109,11 @@
 
    .. attribute:: replace_whitespace
 
-      (default: ``True``) If true, each whitespace character (as defined by
-      ``string.whitespace``) remaining after tab expansion will be replaced by a
-      single space.
+      (default: ``True``) If true, after tab expansion but before wrapping,
+      the :meth:`wrap` method will replace each whitespace character
+      with a single space.  The whitespace characters replaced are
+      as follows: tab, newline, vertical tab, formfeed, and carriage
+      return (``'\t\n\v\f\r'``).
 
       .. note::
 
diff -r 3d0686d90f55 Doc/library/threading.rst
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -218,30 +218,31 @@
 
 This class represents an activity that is run in a separate thread of control.
 There are two ways to specify the activity: by passing a callable object to the
-constructor, or by overriding the :meth:`run` method in a subclass.  No other
-methods (except for the constructor) should be overridden in a subclass.  In
-other words,  *only*  override the :meth:`__init__` and :meth:`run` methods of
-this class.
+constructor, or by overriding the :meth:`~Thread.run` method in a subclass.
+No other methods (except for the constructor) should be overridden in a
+subclass.  In other words,  *only*  override the :meth:`~Thread.__init__`
+and :meth:`~Thread.run` methods of this class.
 
 Once a thread object is created, its activity must be started by calling the
-thread's :meth:`start` method.  This invokes the :meth:`run` method in a
-separate thread of control.
+thread's :meth:`~Thread.start` method.  This invokes the :meth:`~Thread.run`
+method in a separate thread of control.
 
 Once the thread's activity is started, the thread is considered 'alive'. It
-stops being alive when its :meth:`run` method terminates -- either normally, or
-by raising an unhandled exception.  The :meth:`is_alive` method tests whether the
-thread is alive.
+stops being alive when its :meth:`~Thread.run` method terminates -- either
+normally, or by raising an unhandled exception.  The :meth:`~Thread.is_alive`
+method tests whether the thread is alive.
 
-Other threads can call a thread's :meth:`join` method.  This blocks the calling
-thread until the thread whose :meth:`join` method is called is terminated.
+Other threads can call a thread's :meth:`~Thread.join` method.  This blocks
+the calling thread until the thread whose :meth:`~Thread.join` method is
+called is terminated.
 
 A thread has a name.  The name can be passed to the constructor, and read or
-changed through the :attr:`name` attribute.
+changed through the :attr:`~Thread.name` attribute.
 
-A thread can be flagged as a "daemon thread".  The significance of this flag is
-that the entire Python program exits when only daemon threads are left.  The
-initial value is inherited from the creating thread.  The flag can be set
-through the :attr:`daemon` property.
+A thread can be flagged as a "daemon thread".  The significance of this flag
+is that the entire Python program exits when only daemon threads are left.
+The initial value is inherited from the creating thread.  The flag can be
+set through the :attr:`~Thread.daemon` property.
 
 There is a "main thread" object; this corresponds to the initial thread of
 control in the Python program.  It is not a daemon thread.
@@ -250,8 +251,8 @@
 thread objects corresponding to "alien threads", which are threads of control
 started outside the threading module, such as directly from C code.  Dummy
 thread objects have limited functionality; they are always considered alive and
-daemonic, and cannot be :meth:`join`\ ed.  They are never deleted, since it is
-impossible to detect the termination of alien threads.
+daemonic, and cannot be :meth:`~Thread.join`\ ed.  They are never deleted,
+since it is impossible to detect the termination of alien threads.
 
 
 .. class:: Thread(group=None, target=None, name=None, args=(), kwargs={})
@@ -282,7 +283,8 @@
       Start the thread's activity.
 
       It must be called at most once per thread object.  It arranges for the
-      object's :meth:`run` method to be invoked in a separate thread of control.
+      object's :meth:`~Thread.run` method to be invoked in a separate thread
+      of control.
 
       This method will raise a :exc:`RuntimeError` if called more than once
       on the same thread object.
@@ -298,25 +300,27 @@
 
    .. method:: join(timeout=None)
 
-      Wait until the thread terminates. This blocks the calling thread until the
-      thread whose :meth:`join` method is called terminates -- either normally
-      or through an unhandled exception -- or until the optional timeout occurs.
+      Wait until the thread terminates. This blocks the calling thread until
+      the thread whose :meth:`~Thread.join` method is called terminates -- either
+      normally or through an unhandled exception --, or until the optional
+      timeout occurs.
 
       When the *timeout* argument is present and not ``None``, it should be a
       floating point number specifying a timeout for the operation in seconds
-      (or fractions thereof). As :meth:`join` always returns ``None``, you must
-      call :meth:`is_alive` after :meth:`join` to decide whether a timeout
-      happened -- if the thread is still alive, the :meth:`join` call timed out.
+      (or fractions thereof). As :meth:`~Thread.join` always returns ``None``,
+      you must call :meth:`~Thread.is_alive` after :meth:`~Thread.join` to
+      decide whether a timeout happened -- if the thread is still alive, the
+      :meth:`~Thread.join` call timed out.
 
       When the *timeout* argument is not present or ``None``, the operation will
       block until the thread terminates.
 
-      A thread can be :meth:`join`\ ed many times.
+      A thread can be :meth:`~Thread.join`\ ed many times.
 
-      :meth:`join` raises a :exc:`RuntimeError` if an attempt is made to join
-      the current thread as that would cause a deadlock. It is also an error to
-      :meth:`join` a thread before it has been started and attempts to do so
-      raises the same exception.
+      :meth:`~Thread.join` raises a :exc:`RuntimeError` if an attempt is made
+      to join the current thread as that would cause a deadlock. It is also
+      an error to :meth:`~Thread.join` a thread before it has been started
+      and attempts to do so raise the same exception.
 
    .. attribute:: name
 
@@ -334,7 +338,7 @@
 
       The 'thread identifier' of this thread or ``None`` if the thread has not
       been started.  This is a nonzero integer.  See the
-      :func:`thread.get_ident()` function.  Thread identifiers may be recycled
+      :func:`_thread.get_ident()` function.  Thread identifiers may be recycled
       when a thread exits and another thread is created.  The identifier is
       available even after the thread has exited.
 
@@ -342,18 +346,18 @@
 
       Return whether the thread is alive.
 
-      This method returns ``True`` just before the :meth:`run` method starts
-      until just after the :meth:`run` method terminates.  The module function
-      :func:`.enumerate` returns a list of all alive threads.
+      This method returns ``True`` just before the :meth:`~Thread.run` method
+      starts until just after the :meth:`~Thread.run` method terminates.  The
+      module function :func:`.enumerate` returns a list of all alive threads.
 
    .. attribute:: daemon
 
       A boolean value indicating whether this thread is a daemon thread (True)
-      or not (False).  This must be set before :meth:`start` is called,
+      or not (False).  This must be set before :meth:`~Thread.start` is called,
       otherwise :exc:`RuntimeError` is raised.  Its initial value is inherited
       from the creating thread; the main thread is not a daemon thread and
-      therefore all threads created in the main thread default to :attr:`daemon`
-      = ``False``.
+      therefore all threads created in the main thread default to
+      :attr:`~Thread.daemon` = ``False``.
 
       The entire Python program exits when no alive non-daemon threads are left.
 
@@ -375,19 +379,22 @@
 extension module.
 
 A primitive lock is in one of two states, "locked" or "unlocked". It is created
-in the unlocked state.  It has two basic methods, :meth:`acquire` and
-:meth:`release`.  When the state is unlocked, :meth:`acquire` changes the state
-to locked and returns immediately.  When the state is locked, :meth:`acquire`
-blocks until a call to :meth:`release` in another thread changes it to unlocked,
-then the :meth:`acquire` call resets it to locked and returns.  The
-:meth:`release` method should only be called in the locked state; it changes the
-state to unlocked and returns immediately. If an attempt is made to release an
-unlocked lock, a :exc:`RuntimeError` will be raised.
+in the unlocked state.  It has two basic methods, :meth:`~Lock.acquire` and
+:meth:`~Lock.release`.  When the state is unlocked, :meth:`~Lock.acquire`
+changes the state to locked and returns immediately.  When the state is locked,
+:meth:`~Lock.acquire` blocks until a call to :meth:`~Lock.release` in another
+thread changes it to unlocked, then the :meth:`~Lock.acquire` call resets it
+to locked and returns.  The :meth:`~Lock.release` method should only be
+called in the locked state; it changes the state to unlocked and returns
+immediately. If an attempt is made to release an unlocked lock, a
+:exc:`RuntimeError` will be raised.
 
-When more than one thread is blocked in :meth:`acquire` waiting for the state to
-turn to unlocked, only one thread proceeds when a :meth:`release` call resets
-the state to unlocked; which one of the waiting threads proceeds is not defined,
-and may vary across implementations.
+Locks also support the :ref:`context manager protocol <with-locks>`.
+
+When more than one thread is blocked in :meth:`~Lock.acquire` waiting for the
+state to turn to unlocked, only one thread proceeds when a :meth:`~Lock.release`
+call resets the state to unlocked; which one of the waiting threads proceeds
+is not defined, and may vary across implementations.
 
 All methods are executed atomically.
 
@@ -396,15 +403,12 @@
 
    Acquire a lock, blocking or non-blocking.
 
-   When invoked without arguments, block until the lock is unlocked, then set it to
-   locked, and return true.
+   When invoked with the *blocking* argument set to ``True`` (the default),
+   block until the lock is unlocked, then set it to locked and return ``True``.
 
-   When invoked with the *blocking* argument set to true, do the same thing as when
-   called without arguments, and return true.
-
-   When invoked with the *blocking* argument set to false, do not block.  If a call
-   without an argument would block, return false immediately; otherwise, do the
-   same thing as when called without arguments, and return true.
+   When invoked with the *blocking* argument set to ``False``, do not block.
+   If a call with *blocking* set to ``True`` would block, return ``False``
+   immediately; otherwise, set the lock to locked and return ``True``.
 
    When invoked with the floating-point *timeout* argument set to a positive
    value, block for at most the number of seconds specified by *timeout*
@@ -424,13 +428,14 @@
 
 .. method:: Lock.release()
 
-   Release a lock.
+   Release a lock.  This can be called from any thread, not only the thread
+   which has acquired the lock.
 
    When the lock is locked, reset it to unlocked, and return.  If any other threads
    are blocked waiting for the lock to become unlocked, allow exactly one of them
    to proceed.
 
-   Do not call this method when the lock is unlocked.
+   When invoked on an unlocked lock, a :exc:`ThreadError` is raised.
 
    There is no return value.
 
@@ -446,12 +451,14 @@
 locks.  In the locked state, some thread owns the lock; in the unlocked state,
 no thread owns it.
 
-To lock the lock, a thread calls its :meth:`acquire` method; this returns once
-the thread owns the lock.  To unlock the lock, a thread calls its
-:meth:`release` method. :meth:`acquire`/:meth:`release` call pairs may be
-nested; only the final :meth:`release` (the :meth:`release` of the outermost
-pair) resets the lock to unlocked and allows another thread blocked in
-:meth:`acquire` to proceed.
+To lock the lock, a thread calls its :meth:`~RLock.acquire` method; this
+returns once the thread owns the lock.  To unlock the lock, a thread calls
+its :meth:`~Lock.release` method. :meth:`~Lock.acquire`/:meth:`~Lock.release`
+call pairs may be nested; only the final :meth:`~Lock.release` (the
+:meth:`~Lock.release` of the outermost pair) resets the lock to unlocked and
+allows another thread blocked in :meth:`~Lock.acquire` to proceed.
+
+Reentrant locks also support the :ref:`context manager protocol <with-locks>`.
 
 
 .. method:: RLock.acquire(blocking=True, timeout=-1)
@@ -503,62 +510,76 @@
 -----------------
 
 A condition variable is always associated with some kind of lock; this can be
-passed in or one will be created by default.  (Passing one in is useful when
-several condition variables must share the same lock.)
+passed in or one will be created by default.  Passing one in is useful when
+several condition variables must share the same lock.  The lock is part of
+the condition object: you don't have to track it separately.
 
-A condition variable has :meth:`acquire` and :meth:`release` methods that call
-the corresponding methods of the associated lock. It also has a :meth:`wait`
-method, and :meth:`notify` and :meth:`notify_all` methods.  These three must only
-be called when the calling thread has acquired the lock, otherwise a
-:exc:`RuntimeError` is raised.
+A condition variable obeys the :ref:`context manager protocol <with-locks>`:
+using the ``with`` statement acquires the associated lock for the duration of
+the enclosed block.  The :meth:`~Condition.acquire` and
+:meth:`~Condition.release` methods also call the corresponding methods of
+the associated lock.
 
-The :meth:`wait` method releases the lock, and then blocks until it is awakened
-by a :meth:`notify` or :meth:`notify_all` call for the same condition variable in
-another thread.  Once awakened, it re-acquires the lock and returns.  It is also
-possible to specify a timeout.
+Other methods must be called with the associated lock held.  The
+:meth:`~Condition.wait` method releases the lock, and then blocks until
+another thread awakens it by calling :meth:`~Condition.notify` or
+:meth:`~Condition.notify_all`.  Once awakened, :meth:`~Condition.wait`
+re-acquires the lock and returns.  It is also possible to specify a timeout.
 
-The :meth:`notify` method wakes up one of the threads waiting for the condition
-variable, if any are waiting.  The :meth:`notify_all` method wakes up all threads
-waiting for the condition variable.
+The :meth:`~Condition.notify` method wakes up one of the threads waiting for
+the condition variable, if any are waiting.  The :meth:`~Condition.notify_all`
+method wakes up all threads waiting for the condition variable.
 
-Note: the :meth:`notify` and :meth:`notify_all` methods don't release the lock;
-this means that the thread or threads awakened will not return from their
-:meth:`wait` call immediately, but only when the thread that called
-:meth:`notify` or :meth:`notify_all` finally relinquishes ownership of the lock.
+Note: the :meth:`~Condition.notify` and :meth:`~Condition.notify_all` methods
+don't release the lock; this means that the thread or threads awakened will
+not return from their :meth:`~Condition.wait` call immediately, but only when
+the thread that called :meth:`~Condition.notify` or :meth:`~Condition.notify_all`
+finally relinquishes ownership of the lock.
 
-Tip: the typical programming style using condition variables uses the lock to
+
+Usage
+^^^^^
+
+The typical programming style using condition variables uses the lock to
 synchronize access to some shared state; threads that are interested in a
-particular change of state call :meth:`wait` repeatedly until they see the
-desired state, while threads that modify the state call :meth:`notify` or
-:meth:`notify_all` when they change the state in such a way that it could
-possibly be a desired state for one of the waiters.  For example, the following
-code is a generic producer-consumer situation with unlimited buffer capacity::
+particular change of state call :meth:`~Condition.wait` repeatedly until they
+see the desired state, while threads that modify the state call
+:meth:`~Condition.notify` or :meth:`~Condition.notify_all` when they change
+the state in such a way that it could possibly be a desired state for one
+of the waiters.  For example, the following code is a generic
+producer-consumer situation with unlimited buffer capacity::
 
    # Consume one item
-   cv.acquire()
-   while not an_item_is_available():
-       cv.wait()
-   get_an_available_item()
-   cv.release()
+   with cv:
+       while not an_item_is_available():
+           cv.wait()
+       get_an_available_item()
 
    # Produce one item
-   cv.acquire()
-   make_an_item_available()
-   cv.notify()
-   cv.release()
+   with cv:
+       make_an_item_available()
+       cv.notify()
 
-To choose between :meth:`notify` and :meth:`notify_all`, consider whether one
-state change can be interesting for only one or several waiting threads.  E.g.
-in a typical producer-consumer situation, adding one item to the buffer only
-needs to wake up one consumer thread.
+The ``while`` loop checking for the application's condition is necessary
+because :meth:`~Condition.wait` can return after an arbitrary long time,
+and the condition which prompted the :meth:`~Condition.notify` call may
+no longer hold true.  This is inherent to multi-threaded programming.  The
+:meth:`~Condition.wait_for` method can be used to automate the condition
+checking, and eases the computation of timeouts::
 
-Note:  Condition variables can be, depending on the implementation, subject
-to both spurious wakeups (when :meth:`wait` returns without a :meth:`notify`
-call) and stolen wakeups (when another thread acquires the lock before the
-awoken thread.)  For this reason, it is always necessary to verify the state
-the thread is waiting for when :meth:`wait` returns and optionally repeat
-the call as often as necessary.
+   # Consume an item
+   with cv:
+       cv.wait_for(an_item_is_available)
+       get_an_available_item()
 
+To choose between :meth:`~Condition.notify` and :meth:`~Condition.notify_all`,
+consider whether one state change can be interesting for only one or several
+waiting threads.  E.g. in a typical producer-consumer situation, adding one
+item to the buffer only needs to wake up one consumer thread.
+
+
+Interface
+^^^^^^^^^
 
 .. class:: Condition(lock=None)
 
@@ -626,12 +647,6 @@
       held when called and is re-aquired on return.  The predicate is evaluated
       with the lock held.
 
-      Using this method, the consumer example above can be written thus::
-
-         with cv:
-             cv.wait_for(an_item_is_available)
-             get_an_available_item()
-
       .. versionadded:: 3.2
 
    .. method:: notify(n=1)
@@ -667,12 +682,16 @@
 
 This is one of the oldest synchronization primitives in the history of computer
 science, invented by the early Dutch computer scientist Edsger W. Dijkstra (he
-used :meth:`P` and :meth:`V` instead of :meth:`acquire` and :meth:`release`).
+used the names ``P()`` and ``V()`` instead of :meth:`~Semaphore.acquire` and
+:meth:`~Semaphore.release`).
 
 A semaphore manages an internal counter which is decremented by each
-:meth:`acquire` call and incremented by each :meth:`release` call.  The counter
-can never go below zero; when :meth:`acquire` finds that it is zero, it blocks,
-waiting until some other thread calls :meth:`release`.
+:meth:`~Semaphore.acquire` call and incremented by each :meth:`~Semaphore.release`
+call.  The counter can never go below zero; when :meth:`~Semaphore.acquire`
+finds that it is zero, it blocks, waiting until some other thread calls
+:meth:`~Semaphore.release`.
+
+Semaphores also support the :ref:`context manager protocol <with-locks>`.
 
 
 .. class:: Semaphore(value=1)
@@ -688,11 +707,12 @@
       When invoked without arguments: if the internal counter is larger than
       zero on entry, decrement it by one and return immediately.  If it is zero
       on entry, block, waiting until some other thread has called
-      :meth:`release` to make it larger than zero.  This is done with proper
-      interlocking so that if multiple :meth:`acquire` calls are blocked,
-      :meth:`release` will wake exactly one of them up.  The implementation may
-      pick one at random, so the order in which blocked threads are awakened
-      should not be relied on.  Returns true (or blocks indefinitely).
+      :meth:`~Semaphore.release` to make it larger than zero.  This is done
+      with proper interlocking so that if multiple :meth:`acquire` calls are
+      blocked, :meth:`~Semaphore.release` will wake exactly one of them up.
+      The implementation may pick one at random, so the order in which
+      blocked threads are awakened should not be relied on.  Returns
+      true (or blocks indefinitely).
 
       When invoked with *blocking* set to false, do not block.  If a call
       without an argument would block, return false immediately; otherwise,
@@ -729,11 +749,12 @@
 Once spawned, worker threads call the semaphore's acquire and release methods
 when they need to connect to the server::
 
-   pool_sema.acquire()
-   conn = connectdb()
-   ... use connection ...
-   conn.close()
-   pool_sema.release()
+   with pool_sema:
+       conn = connectdb()
+       try:
+           ... use connection ...
+       finally:
+           conn.close()
 
 The use of a bounded semaphore reduces the chance that a programming error which
 causes the semaphore to be released more than it's acquired will go undetected.
@@ -748,8 +769,8 @@
 thread signals an event and other threads wait for it.
 
 An event object manages an internal flag that can be set to true with the
-:meth:`~Event.set` method and reset to false with the :meth:`clear` method.  The
-:meth:`wait` method blocks until the flag is true.
+:meth:`~Event.set` method and reset to false with the :meth:`~Event.clear`
+method.  The :meth:`~Event.wait` method blocks until the flag is true.
 
 
 .. class:: Event()
@@ -776,7 +797,7 @@
 
       Block until the internal flag is true.  If the internal flag is true on
       entry, return immediately.  Otherwise, block until another thread calls
-      :meth:`set` to set the flag to true, or until the optional timeout occurs.
+      :meth:`.set` to set the flag to true, or until the optional timeout occurs.
 
       When the timeout argument is present and not ``None``, it should be a
       floating point number specifying a timeout for the operation in seconds
@@ -832,8 +853,8 @@
 
 This class provides a simple synchronization primitive for use by a fixed number
 of threads that need to wait for each other.  Each of the threads tries to pass
-the barrier by calling the :meth:`wait` method and will block until all of the
-threads have made the call.  At this points, the threads are released
+the barrier by calling the :meth:`~Barrier.wait` method and will block until
+all of the threads have made the call.  At this points, the threads are released
 simultanously.
 
 The barrier can be reused any number of times for the same number of threads.
@@ -934,19 +955,24 @@
 
 All of the objects provided by this module that have :meth:`acquire` and
 :meth:`release` methods can be used as context managers for a :keyword:`with`
-statement.  The :meth:`acquire` method will be called when the block is entered,
-and :meth:`release` will be called when the block is exited.
+statement.  The :meth:`acquire` method will be called when the block is
+entered, and :meth:`release` will be called when the block is exited.  Hence,
+the following snippet::
+
+   with some_lock:
+       # do something...
+
+is equivalent to::
+
+   some_lock.acquire()
+   try:
+       # do something...
+   finally:
+       some_lock.release()
 
 Currently, :class:`Lock`, :class:`RLock`, :class:`Condition`,
 :class:`Semaphore`, and :class:`BoundedSemaphore` objects may be used as
-:keyword:`with` statement context managers.  For example::
-
-   import threading
-
-   some_rlock = threading.RLock()
-
-   with some_rlock:
-       print("some_rlock is locked while this executes")
+:keyword:`with` statement context managers.
 
 
 .. _threaded-imports:
diff -r 3d0686d90f55 Doc/library/time.rst
--- a/Doc/library/time.rst
+++ b/Doc/library/time.rst
@@ -81,9 +81,9 @@
   the units in which their value or argument is expressed. E.g. on most Unix
   systems, the clock "ticks" only 50 or 100 times a second.
 
-* On the other hand, the precision of :func:`time` and :func:`sleep` is better
+* On the other hand, the precision of :func:`.time` and :func:`sleep` is better
   than their Unix equivalents: times are expressed as floating point numbers,
-  :func:`time` returns the most accurate time available (using Unix
+  :func:`.time` returns the most accurate time available (using Unix
   :c:func:`gettimeofday` where available), and :func:`sleep` will accept a time
   with a nonzero fraction (Unix :c:func:`select` is used to implement this, where
   available).
@@ -177,7 +177,7 @@
 
    Convert a time expressed in seconds since the epoch to a string representing
    local time. If *secs* is not provided or :const:`None`, the current time as
-   returned by :func:`time` is used.  ``ctime(secs)`` is equivalent to
+   returned by :func:`.time` is used.  ``ctime(secs)`` is equivalent to
    ``asctime(localtime(secs))``. Locale information is not used by :func:`ctime`.
 
 
@@ -190,7 +190,7 @@
 
    Convert a time expressed in seconds since the epoch to a :class:`struct_time` in
    UTC in which the dst flag is always zero.  If *secs* is not provided or
-   :const:`None`, the current time as returned by :func:`time` is used.  Fractions
+   :const:`None`, the current time as returned by :func:`.time` is used.  Fractions
    of a second are ignored.  See above for a description of the
    :class:`struct_time` object. See :func:`calendar.timegm` for the inverse of this
    function.
@@ -199,7 +199,7 @@
 .. function:: localtime([secs])
 
    Like :func:`gmtime` but converts to local time.  If *secs* is not provided or
-   :const:`None`, the current time as returned by :func:`time` is used.  The dst
+   :const:`None`, the current time as returned by :func:`.time` is used.  The dst
    flag is set to ``1`` when DST applies to the given time.
 
 
@@ -208,7 +208,7 @@
    This is the inverse function of :func:`localtime`.  Its argument is the
    :class:`struct_time` or full 9-tuple (since the dst flag is needed; use ``-1``
    as the dst flag if it is unknown) which expresses the time in *local* time, not
-   UTC.  It returns a floating point number, for compatibility with :func:`time`.
+   UTC.  It returns a floating point number, for compatibility with :func:`.time`.
    If the input value cannot be represented as a valid time, either
    :exc:`OverflowError` or :exc:`ValueError` will be raised (which depends on
    whether the invalid value is caught by Python or the underlying C libraries).
@@ -430,8 +430,8 @@
 
 .. function:: time()
 
-   Return the time as a floating point number expressed in seconds since the epoch,
-   in UTC.  Note that even though the time is always returned as a floating point
+   Return the time in seconds since the epoch as a floating point number.
+   Note that even though the time is always returned as a floating point
    number, not all systems provide time with a better precision than 1 second.
    While this function normally returns non-decreasing values, it can return a
    lower value than a previous call if the system clock has been set back between
diff -r 3d0686d90f55 Doc/library/timeit.rst
--- a/Doc/library/timeit.rst
+++ b/Doc/library/timeit.rst
@@ -99,7 +99,19 @@
          timeit.Timer('for i in range(10): oct(i)', 'gc.enable()').timeit()
 
 
-The module also defines two convenience functions:
+The module also defines three convenience functions:
+
+
+.. function:: default_timer()
+
+   Define a default timer, in a platform specific manner. On Windows,
+   :func:`time.clock` has microsecond granularity but :func:`time.time`'s
+   granularity is 1/60th of a second; on Unix, :func:`time.clock` has 1/100th of
+   a second granularity and :func:`time.time` is much more precise.  On either
+   platform, :func:`default_timer` measures wall clock time, not the CPU
+   time.  This means that other processes running on the same computer may
+   interfere with the timing.
+
 
 .. function:: repeat(stmt='pass', setup='pass', timer=<default timer>, repeat=3, number=1000000)
 
@@ -161,13 +173,9 @@
 If :option:`-n` is not given, a suitable number of loops is calculated by trying
 successive powers of 10 until the total time is at least 0.2 seconds.
 
-The default timer function is platform dependent.  On Windows,
-:func:`time.clock` has microsecond granularity but :func:`time.time`'s
-granularity is 1/60th of a second; on Unix, :func:`time.clock` has 1/100th of a
-second granularity and :func:`time.time` is much more precise.  On either
-platform, the default timer functions measure wall clock time, not the CPU time.
-This means that other processes running on the same computer may interfere with
-the timing.  The best thing to do when accurate timing is necessary is to repeat
+:func:`default_timer` measurations can be affected by other programs running on
+the same machine, so
+the best thing to do when accurate timing is necessary is to repeat
 the timing a few times and use the best time.  The :option:`-r` option is good
 for this; the default of 3 repetitions is probably enough in most cases.  On
 Unix, you can use :func:`time.clock` to measure CPU time.
diff -r 3d0686d90f55 Doc/library/tkinter.rst
--- a/Doc/library/tkinter.rst
+++ b/Doc/library/tkinter.rst
@@ -19,12 +19,27 @@
       The Python Tkinter Topic Guide provides a great deal of information on using Tk
       from Python and links to other sources of information on Tk.
 
+   `TKDocs <http://www.tkdocs.com/>`_
+      Extensive tutorial plus friendlier widget pages for some of the widgets.
+
+   `Tkinter reference: a GUI for Python <http://infohost.nmt.edu/tcc/help/pubs/tkinter/>`_
+      On-line reference material.
+
+   `Tkinter docs from effbot <http://effbot.org/tkinterbook/>`_
+      Online reference for tkinter supported by effbot.org.
+
+   `Tcl/Tk manual <http://www.tcl.tk/man/tcl8.5/>`_
+      Official manual for the latest tcl/tk version.
+
+   `Programming Python <http://www.amazon.com/Programming-Python-Mark-Lutz/dp/0596158106/>`_
+      Book by Mark Lutz, has excellent coverage of Tkinter.
+
+   `Modern Tkinter for Busy Python Developers <http://www.amazon.com/Modern-Tkinter-Python-Developers-ebook/dp/B0071QDNLO/>`_
+      Book by Mark Rozerman about building attractive and modern graphical user interfaces with Python and Tkinter.
+
    `An Introduction to Tkinter <http://www.pythonware.com/library/an-introduction-to-tkinter.htm>`_
       Fredrik Lundh's on-line reference material.
 
-   `Tkinter reference: a GUI for Python <http://infohost.nmt.edu/tcc/help/pubs/lang.html>`_
-      On-line reference material.
-
    `Python and Tkinter Programming <http://www.amazon.com/exec/obidos/ASIN/1884777813>`_
       The book by John Grayson (ISBN 1-884777-81-3).
 
diff -r 3d0686d90f55 Doc/library/unicodedata.rst
--- a/Doc/library/unicodedata.rst
+++ b/Doc/library/unicodedata.rst
@@ -3,8 +3,8 @@
 
 .. module:: unicodedata
    :synopsis: Access the Unicode Database.
-.. moduleauthor:: Marc-Andre Lemburg <mal@lemburg.com>
-.. sectionauthor:: Marc-Andre Lemburg <mal@lemburg.com>
+.. moduleauthor:: Marc-André Lemburg <mal@lemburg.com>
+.. sectionauthor:: Marc-André Lemburg <mal@lemburg.com>
 .. sectionauthor:: Martin v. Löwis <martin@v.loewis.de>
 
 
diff -r 3d0686d90f55 Doc/library/unittest.rst
--- a/Doc/library/unittest.rst
+++ b/Doc/library/unittest.rst
@@ -640,7 +640,7 @@
 
 Classes can be skipped just like methods: ::
 
-   @skip("showing class skipping")
+   @unittest.skip("showing class skipping")
    class MySkippedTestCase(unittest.TestCase):
        def test_not_run(self):
            pass
@@ -1553,8 +1553,8 @@
    The :class:`TestLoader` class is used to create test suites from classes and
    modules.  Normally, there is no need to create an instance of this class; the
    :mod:`unittest` module provides an instance that can be shared as
-   ``unittest.defaultTestLoader``. Using a subclass or instance, however, allows
-   customization of some configurable properties.
+   :data:`unittest.defaultTestLoader`.  Using a subclass or instance, however,
+   allows customization of some configurable properties.
 
    :class:`TestLoader` objects have the following methods:
 
@@ -1926,9 +1926,10 @@
                    testLoader=unittest.defaultTestLoader, exit=True, verbosity=1, \
                    failfast=None, catchbreak=None, buffer=None, warnings=None)
 
-   A command-line program that runs a set of tests; this is primarily for making
-   test modules conveniently executable.  The simplest use for this function is to
-   include the following line at the end of a test script::
+   A command-line program that loads a set of tests from *module* and runs them;
+   this is primarily for making test modules conveniently executable.
+   The simplest use for this function is to include the following line at the
+   end of a test script::
 
       if __name__ == '__main__':
           unittest.main()
@@ -1939,10 +1940,17 @@
       if __name__ == '__main__':
           unittest.main(verbosity=2)
 
+   The *argv* argument can be a list of options passed to the program, with the
+   first element being the program name.  If not specified or ``None``,
+   the values of :data:`sys.argv` are used.
+
    The *testRunner* argument can either be a test runner class or an already
    created instance of it. By default ``main`` calls :func:`sys.exit` with
    an exit code indicating success or failure of the tests run.
 
+   The *testLoader* argument has to be a :class:`TestLoader` instance,
+   and defaults to :data:`defaultTestLoader`.
+
    ``main`` supports being used from the interactive interpreter by passing in the
    argument ``exit=False``. This displays the result on standard output without
    calling :func:`sys.exit`::
@@ -1950,7 +1958,7 @@
       >>> from unittest import main
       >>> main(module='test_module', exit=False)
 
-   The ``failfast``, ``catchbreak`` and ``buffer`` parameters have the same
+   The *failfast*, *catchbreak* and *buffer* parameters have the same
    effect as the same-name `command-line options`_.
 
    The *warning* argument specifies the :ref:`warning filter <warning-filter>`
@@ -1962,11 +1970,11 @@
    This stores the result of the tests run as the ``result`` attribute.
 
    .. versionchanged:: 3.1
-      The ``exit`` parameter was added.
+      The *exit* parameter was added.
 
    .. versionchanged:: 3.2
-      The ``verbosity``, ``failfast``, ``catchbreak``, ``buffer``
-      and ``warnings`` parameters were added.
+      The *verbosity*, *failfast*, *catchbreak*, *buffer*
+      and *warnings* parameters were added.
 
 
 load_tests Protocol
diff -r 3d0686d90f55 Doc/library/urllib.parse.rst
--- a/Doc/library/urllib.parse.rst
+++ b/Doc/library/urllib.parse.rst
@@ -22,11 +22,11 @@
 to an absolute URL given a "base URL."
 
 The module has been designed to match the Internet RFC on Relative Uniform
-Resource Locators (and discovered a bug in an earlier draft!). It supports the
-following URL schemes: ``file``, ``ftp``, ``gopher``, ``hdl``, ``http``,
-``https``, ``imap``, ``mailto``, ``mms``, ``news``, ``nntp``, ``prospero``,
-``rsync``, ``rtsp``, ``rtspu``, ``sftp``, ``shttp``, ``sip``, ``sips``,
-``snews``, ``svn``, ``svn+ssh``, ``telnet``, ``wais``.
+Resource Locators. It supports the following URL schemes: ``file``, ``ftp``,
+``gopher``, ``hdl``, ``http``, ``https``, ``imap``, ``mailto``, ``mms``,
+``news``, ``nntp``, ``prospero``, ``rsync``, ``rtsp``, ``rtspu``, ``sftp``,
+``shttp``, ``sip``, ``sips``, ``snews``, ``svn``, ``svn+ssh``, ``telnet``,
+``wais``.
 
 The :mod:`urllib.parse` module defines functions that fall into two broad
 categories: URL parsing and URL quoting. These are covered in detail in
@@ -512,9 +512,10 @@
 
    Convert a mapping object or a sequence of two-element tuples, which may
    either be a :class:`str` or a :class:`bytes`,  to a "percent-encoded"
-   string.  The resultant string must be converted to bytes using the
-   user-specified encoding before it is sent to :func:`urlopen` as the optional
-   *data* argument.
+   string.  If the resultant string is to be used as a *data* for POST
+   operation with :func:`urlopen` function, then it should be properly encoded
+   to bytes, otherwise it would result in a :exc:`TypeError`.
+
    The resulting string is a series of ``key=value`` pairs separated by ``'&'``
    characters, where both *key* and *value* are quoted using :func:`quote_plus`
    above. When a sequence of two-element tuples is used as the *query*
diff -r 3d0686d90f55 Doc/library/urllib.request.rst
--- a/Doc/library/urllib.request.rst
+++ b/Doc/library/urllib.request.rst
@@ -2,9 +2,10 @@
 =============================================================
 
 .. module:: urllib.request
-   :synopsis: Next generation URL opening library.
+   :synopsis: Extensible library for opening URLs.
 .. moduleauthor:: Jeremy Hylton <jeremy@alum.mit.edu>
 .. sectionauthor:: Moshe Zadka <moshez@users.sourceforge.net>
+.. sectionauthor:: Senthil Kumaran <senthil@uthcode.com>
 
 
 The :mod:`urllib.request` module defines functions and classes which help in
@@ -20,16 +21,26 @@
    Open the URL *url*, which can be either a string or a
    :class:`Request` object.
 
-   *data* may be a bytes object specifying additional data to send to the
+   *data* must be a bytes object specifying additional data to be sent to the
    server, or ``None`` if no such data is needed. *data* may also be an
    iterable object and in that case Content-Length value must be specified in
    the headers. Currently HTTP requests are the only ones that use *data*; the
    HTTP request will be a POST instead of a GET when the *data* parameter is
-   provided.  *data* should be a buffer in the standard
+   provided.
+
+   *data* should be a buffer in the standard
    :mimetype:`application/x-www-form-urlencoded` format.  The
    :func:`urllib.parse.urlencode` function takes a mapping or sequence of
-   2-tuples and returns a string in this format. urllib.request module uses
-   HTTP/1.1 and includes ``Connection:close`` header in its HTTP requests.
+   2-tuples and returns a string in this format. It should be encoded to bytes
+   before being used as the *data* parameter. The charset parameter in
+   ``Content-Type`` header may be used to specify the encoding. If charset
+   parameter is not sent with the Content-Type header, the server following the
+   HTTP 1.1 recommendation may assume that the data is encoded in ISO-8859-1
+   encoding. It is advisable to use charset parameter with encoding used in
+   ``Content-Type`` header with the :class:`Request`.
+
+   urllib.request module uses HTTP/1.1 and includes ``Connection:close`` header
+   in its HTTP requests.
 
    The optional *timeout* parameter specifies a timeout in seconds for
    blocking operations like the connection attempt (if not specified,
@@ -46,8 +57,8 @@
       If neither *cafile* nor *capath* is specified, an HTTPS request
       will not do any verification of the server's certificate.
 
-   This function returns a file-like object with two additional methods from
-   the :mod:`urllib.response` module
+   This function returns a file-like object that works as a :term:`context manager`,
+   with two additional methods from the :mod:`urllib.response` module
 
    * :meth:`geturl` --- return the URL of the resource retrieved,
      commonly used to determine if a redirect was followed
@@ -66,9 +77,10 @@
    are handled through the proxy when they are set.
 
    The legacy ``urllib.urlopen`` function from Python 2.6 and earlier has been
-   discontinued; :func:`urlopen` corresponds to the old ``urllib2.urlopen``.
-   Proxy handling, which was done by passing a dictionary parameter to
-   ``urllib.urlopen``, can be obtained by using :class:`ProxyHandler` objects.
+   discontinued; :func:`urllib.request.urlopen` corresponds to the old
+   ``urllib2.urlopen``.  Proxy handling, which was done by passing a dictionary
+   parameter to ``urllib.urlopen``, can be obtained by using
+   :class:`ProxyHandler` objects.
 
    .. versionchanged:: 3.2
       *cafile* and *capath* were added.
@@ -83,10 +95,11 @@
 .. function:: install_opener(opener)
 
    Install an :class:`OpenerDirector` instance as the default global opener.
-   Installing an opener is only necessary if you want urlopen to use that opener;
-   otherwise, simply call :meth:`OpenerDirector.open` instead of :func:`urlopen`.
-   The code does not check for a real :class:`OpenerDirector`, and any class with
-   the appropriate interface will work.
+   Installing an opener is only necessary if you want urlopen to use that
+   opener; otherwise, simply call :meth:`OpenerDirector.open` instead of
+   :func:`~urllib.request.urlopen`.  The code does not check for a real
+   :class:`OpenerDirector`, and any class with the appropriate interface will
+   work.
 
 
 .. function:: build_opener([handler, ...])
@@ -138,14 +151,21 @@
 
    *url* should be a string containing a valid URL.
 
-   *data* may be a string specifying additional data to send to the
-   server, or ``None`` if no such data is needed.  Currently HTTP
-   requests are the only ones that use *data*; the HTTP request will
-   be a POST instead of a GET when the *data* parameter is provided.
-   *data* should be a buffer in the standard
-   :mimetype:`application/x-www-form-urlencoded` format.  The
-   :func:`urllib.parse.urlencode` function takes a mapping or sequence
-   of 2-tuples and returns a string in this format.
+   *data* must be a bytes object specifying additional data to send to the
+   server, or ``None`` if no such data is needed.  Currently HTTP requests are
+   the only ones that use *data*; the HTTP request will be a POST instead of a
+   GET when the *data* parameter is provided.  *data* should be a buffer in the
+   standard :mimetype:`application/x-www-form-urlencoded` format.
+
+   The :func:`urllib.parse.urlencode` function takes a mapping or sequence of
+   2-tuples and returns a string in this format. It should be encoded to bytes
+   before being used as the *data* parameter. The charset parameter in
+   ``Content-Type`` header may be used to specify the encoding. If charset
+   parameter is not sent with the Content-Type header, the server following the
+   HTTP 1.1 recommendation may assume that the data is encoded in ISO-8859-1
+   encoding. It is advisable to use charset parameter with encoding used in
+   ``Content-Type`` header with the :class:`Request`.
+
 
    *headers* should be a dictionary, and will be treated as if
    :meth:`add_header` was called with each key and value as arguments.
@@ -157,6 +177,9 @@
    :mod:`urllib`'s default user agent string is
    ``"Python-urllib/2.6"`` (on Python 2.6).
 
+   An example of using ``Content-Type`` header with *data* argument would be
+   sending a dictionary like ``{"Content-Type":" application/x-www-form-urlencoded;charset=utf-8"}``
+
    The final two arguments are only of interest for correct handling
    of third-party HTTP cookies:
 
@@ -424,6 +447,17 @@
    Return the selector --- the part of the URL that is sent to the server.
 
 
+.. method:: Request.get_header(header_name, default=None)
+
+   Return the value of the given header. If the header is not present, return
+   the default value.
+
+
+.. method:: Request.header_items()
+
+   Return a list of tuples (header_name, header_value) of the Request headers.
+
+
 .. method:: Request.set_proxy(host, type)
 
    Prepare the request by connecting to a proxy server. The *host* and *type* will
@@ -868,10 +902,9 @@
    Open the file locally, if there is no host name, or the host name is
    ``'localhost'``.
 
-   This method is applicable only for local hostnames. When a remote hostname
-   is given, an :exc:`URLError` is raised.
-
-.. versionchanged:: 3.2
+   .. versionchanged:: 3.2
+      This method is applicable only for local hostnames.  When a remote
+      hostname is given, an :exc:`URLError` is raised.
 
 
 .. _ftp-handler-objects:
@@ -967,8 +1000,17 @@
 the various ways in which a (X)HTML or a XML document could have specified its
 encoding information.
 
-As python.org website uses *utf-8* encoding as specified in it's meta tag, we
-will use same for decoding the bytes object. ::
+As the python.org website uses *utf-8* encoding as specified in it's meta tag, we
+will use the same for decoding the bytes object. ::
+
+   >>> with urllib.request.urlopen('http://www.python.org/') as f:
+   ...     print(f.read(100).decode('utf-8'))
+   ...
+   <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+   "http://www.w3.org/TR/xhtml1/DTD/xhtm
+
+It is also possible to achieve the same result without using the
+:term:`context manager` approach. ::
 
    >>> import urllib.request
    >>> f = urllib.request.urlopen('http://www.python.org/')
@@ -976,7 +1018,6 @@
    <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
    "http://www.w3.org/TR/xhtml1/DTD/xhtm
 
-
 In the following example, we are sending a data-stream to the stdin of a CGI
 and reading the data it returns to us. Note that this example will only work
 when the Python installation supports SSL. ::
@@ -1045,8 +1086,9 @@
    opener.open('http://www.example.com/')
 
 Also, remember that a few standard headers (:mailheader:`Content-Length`,
-:mailheader:`Content-Type` and :mailheader:`Host`) are added when the
-:class:`Request` is passed to :func:`urlopen` (or :meth:`OpenerDirector.open`).
+:mailheader:`Content-Type` without charset parameter and :mailheader:`Host`)
+are added when the :class:`Request` is passed to :func:`urlopen` (or
+:meth:`OpenerDirector.open`).
 
 .. _urllib-examples:
 
@@ -1064,9 +1106,12 @@
 
    >>> import urllib.request
    >>> import urllib.parse
-   >>> params = urllib.parse.urlencode({'spam': 1, 'eggs': 2, 'bacon': 0})
-   >>> params = params.encode('utf-8')
-   >>> f = urllib.request.urlopen("http://www.musi-cal.com/cgi-bin/query", params)
+   >>> data = urllib.parse.urlencode({'spam': 1, 'eggs': 2, 'bacon': 0})
+   >>> data = data.encode('utf-8')
+   >>> request = urllib.request.Request("http://requestb.in/xrbl82xr")
+   >>> # adding charset parameter to the Content-Type header.
+   >>> request.add_header("Content-Type","application/x-www-form-urlencoded;charset=utf-8")
+   >>> f = urllib.request.urlopen(request, data)
    >>> print(f.read().decode('utf-8'))
 
 The following example uses an explicitly specified HTTP proxy, overriding
@@ -1114,10 +1159,10 @@
    size in response to a retrieval request.
 
    If the *url* uses the :file:`http:` scheme identifier, the optional *data*
-   argument may be given to specify a ``POST`` request (normally the request type
-   is ``GET``).  The *data* argument must in standard
-   :mimetype:`application/x-www-form-urlencoded` format; see the :func:`urlencode`
-   function below.
+   argument may be given to specify a ``POST`` request (normally the request
+   type is ``GET``).  The *data* argument must be a bytes object in standard
+   :mimetype:`application/x-www-form-urlencoded` format; see the
+   :func:`urlencode` function below.
 
    :func:`urlretrieve` will raise :exc:`ContentTooShortError` when it detects that
    the amount of data available  was less than the expected amount (which is the
diff -r 3d0686d90f55 Doc/library/weakref.rst
--- a/Doc/library/weakref.rst
+++ b/Doc/library/weakref.rst
@@ -24,7 +24,10 @@
 A weak reference to an object is not enough to keep the object alive: when the
 only remaining references to a referent are weak references,
 :term:`garbage collection` is free to destroy the referent and reuse its memory
-for something else.  A primary use for weak references is to implement caches or
+for something else.  However, until the object is actually destroyed the weak
+reference may return the object even if there are no strong references to it.
+
+A primary use for weak references is to implement caches or
 mappings holding large objects, where it's desired that a large object not be
 kept alive solely because it appears in a cache or mapping.
 
@@ -53,12 +56,6 @@
 directly.  The low-level machinery used by the weak dictionary implementations
 is exposed by the :mod:`weakref` module for the benefit of advanced uses.
 
-.. note::
-
-   Weak references to an object are cleared before the object's :meth:`__del__`
-   is called, to ensure that the weak reference callback (if any) finds the
-   object still alive.
-
 Not all objects can be weakly referenced; those objects which can include class
 instances, functions written in Python (but not in C), instance methods, sets,
 frozensets, some :term:`file objects <file object>`, :term:`generator`\s, type
diff -r 3d0686d90f55 Doc/library/webbrowser.rst
--- a/Doc/library/webbrowser.rst
+++ b/Doc/library/webbrowser.rst
@@ -36,7 +36,9 @@
 module. It accepts an URL as the argument. It accepts the following optional
 parameters: ``-n`` opens the URL in a new browser window, if possible;
 ``-t`` opens the URL in a new browser page ("tab"). The options are,
-naturally, mutually exclusive.
+naturally, mutually exclusive.  Usage example::
+
+   python -m webbrowser -t "http://www.python.org"
 
 The following exception is defined:
 
@@ -133,9 +135,9 @@
 +-----------------------+-----------------------------------------+-------+
 | ``'windows-default'`` | :class:`WindowsDefault`                 | \(2)  |
 +-----------------------+-----------------------------------------+-------+
-| ``'internet-config'`` | :class:`InternetConfig`                 | \(3)  |
+| ``'macosx'``          | :class:`MacOSX('default')`              | \(3)  |
 +-----------------------+-----------------------------------------+-------+
-| ``'macosx'``          | :class:`MacOSX('default')`              | \(4)  |
+| ``'safari'``          | :class:`MacOSX('safari')`               | \(3)  |
 +-----------------------+-----------------------------------------+-------+
 
 Notes:
@@ -151,9 +153,6 @@
    Only on Windows platforms.
 
 (3)
-   Only on Mac OS platforms; requires the standard MacPython :mod:`ic` module.
-
-(4)
    Only on Mac OS X platform.
 
 Here are some simple examples::
diff -r 3d0686d90f55 Doc/library/winreg.rst
--- a/Doc/library/winreg.rst
+++ b/Doc/library/winreg.rst
@@ -243,7 +243,7 @@
    specified in *file_name* is relative to the remote computer.
 
 
-.. function:: OpenKey(key, sub_key, reserved=0, access=KEY_ALL_ACCESS)
+.. function:: OpenKey(key, sub_key, reserved=0, access=KEY_READ)
 
    Opens the specified key, returning a :ref:`handle object <handle-object>`.
 
@@ -252,9 +252,9 @@
 
    *sub_key* is a string that identifies the sub_key to open.
 
-   *res* is a reserved integer, and must be zero.  The default is zero.
+   *reserved* is a reserved integer, and must be zero.  The default is zero.
 
-   *sam* is an integer that specifies an access mask that describes the desired
+   *access* is an integer that specifies an access mask that describes the desired
    security access for the key.  Default is :const:`KEY_READ`.  See :ref:`Access
    Rights <access-rights>` for other allowed values.
 
diff -r 3d0686d90f55 Doc/library/xml.dom.minidom.rst
--- a/Doc/library/xml.dom.minidom.rst
+++ b/Doc/library/xml.dom.minidom.rst
@@ -15,6 +15,14 @@
 Model interface.  It is intended to be simpler than the full DOM and also
 significantly smaller.
 
+.. note::
+
+   The :mod:`xml.dom.minidom` module provides an implementation of the W3C-DOM,
+   with an API similar to that in other programming languages.  Users who are
+   unfamiliar with the W3C-DOM interface or who would like to write less code
+   for processing XML files should consider using the
+   :mod:`xml.etree.ElementTree` module instead.
+
 DOM applications typically start by parsing some XML into a DOM.  With
 :mod:`xml.dom.minidom`, this is done through the parse functions::
 
diff -r 3d0686d90f55 Doc/library/xml.dom.pulldom.rst
--- a/Doc/library/xml.dom.pulldom.rst
+++ b/Doc/library/xml.dom.pulldom.rst
@@ -9,34 +9,72 @@
 
 --------------
 
-:mod:`xml.dom.pulldom` allows building only selected portions of a Document
-Object Model representation of a document from SAX events.
+The :mod:`xml.dom.pulldom` module provides a "pull parser" which can also be
+asked to produce DOM-accessible fragments of the document where necessary. The
+basic concept involves pulling "events" from a stream of incoming XML and
+processing them. In contrast to SAX which also employs an event-driven
+processing model together with callbacks, the user of a pull parser is
+responsible for explicitly pulling events from the stream, looping over those
+events until either processing is finished or an error condition occurs.
 
+Example::
 
-.. class:: PullDOM(documentFactory=None)
+   from xml.dom import pulldom
 
-   :class:`xml.sax.handler.ContentHandler` implementation that ...
+   doc = pulldom.parse('sales_items.xml')
+   for event, node in doc:
+       if event == pulldom.START_ELEMENT and node.tagName == 'item':
+           if int(node.getAttribute('price')) > 50:
+               doc.expandNode(node)
+               print(node.toxml())
 
+``event`` is a constant and can be one of:
 
-.. class:: DOMEventStream(stream, parser, bufsize)
+* :data:`START_ELEMENT`
+* :data:`END_ELEMENT`
+* :data:`COMMENT`
+* :data:`START_DOCUMENT`
+* :data:`END_DOCUMENT`
+* :data:`CHARACTERS`
+* :data:`PROCESSING_INSTRUCTION`
+* :data:`IGNORABLE_WHITESPACE`
 
-   ...
+``node`` is a object of type :class:`xml.dom.minidom.Document`,
+:class:`xml.dom.minidom.Element` or :class:`xml.dom.minidom.Text`.
+
+Since the document is treated as a "flat" stream of events, the document "tree"
+is implicitly traversed and the desired elements are found regardless of their
+depth in the tree. In other words, one does not need to consider hierarchical
+issues such as recursive searching of the document nodes, although if the
+context of elements were important, one would either need to maintain some
+context-related state (i.e. remembering where one is in the document at any
+given point) or to make use of the :func:`DOMEventStream.expandNode` method
+and switch to DOM-related processing.
+
+
+.. class:: PullDom(documentFactory=None)
+
+   Subclass of :class:`xml.sax.handler.ContentHandler`.
 
 
 .. class:: SAX2DOM(documentFactory=None)
 
-   :class:`xml.sax.handler.ContentHandler` implementation that ...
+   Subclass of :class:`xml.sax.handler.ContentHandler`.
 
 
 .. function:: parse(stream_or_string, parser=None, bufsize=None)
 
-   ...
+   Return a :class:`DOMEventStream` from the given input. *stream_or_string* may be
+   either a file name, or a file-like object. *parser*, if given, must be a
+   :class:`XmlReader` object. This function will change the document handler of the
+   parser and activate namespace support; other parser configuration (like
+   setting an entity resolver) must have been done in advance.
 
+If you have XML in a string, you can use the :func:`parseString` function instead:
 
 .. function:: parseString(string, parser=None)
 
-   ...
-
+   Return a :class:`DOMEventStream` that represents the (Unicode) *string*.
 
 .. data:: default_bufsize
 
@@ -45,24 +83,37 @@
    The value of this variable can be changed before calling :func:`parse` and
    the new value will take effect.
 
-
 .. _domeventstream-objects:
 
 DOMEventStream Objects
 ----------------------
 
+.. class:: DOMEventStream(stream, parser, bufsize)
 
-.. method:: DOMEventStream.getEvent()
 
-   ...
+   .. method:: getEvent()
 
+      Return a tuple containing *event* and the current *node* as
+      :class:`xml.dom.minidom.Document` if event equals :data:`START_DOCUMENT`,
+      :class:`xml.dom.minidom.Element` if event equals :data:`START_ELEMENT` or
+      :data:`END_ELEMENT` or :class:`xml.dom.minidom.Text` if event equals
+      :data:`CHARACTERS`.
+      The current node does not contain informations about its children, unless
+      :func:`expandNode` is called.
 
-.. method:: DOMEventStream.expandNode(node)
+   .. method:: expandNode(node)
 
-   ...
+      Expands all children of *node* into *node*. Example::
 
+          xml = '<html><title>Foo</title> <p>Some text <div>and more</div></p> </html>'
+          doc = pulldom.parseString(xml)
+          for event, node in doc:
+              if event == pulldom.START_ELEMENT and node.tagName == 'p':
+                  # Following statement only prints '<p/>'
+                  print(node.toxml())
+                  doc.exandNode(node)
+                  # Following statement prints node with all its children '<p>Some text <div>and more</div></p>'
+                  print(node.toxml())
 
-.. method:: DOMEventStream.reset()
+   .. method:: DOMEventStream.reset()
 
-   ...
-
diff -r 3d0686d90f55 Doc/library/xml.etree.elementtree.rst
--- a/Doc/library/xml.etree.elementtree.rst
+++ b/Doc/library/xml.etree.elementtree.rst
@@ -95,11 +95,14 @@
 .. function:: iterparse(source, events=None, parser=None)
 
    Parses an XML section into an element tree incrementally, and reports what's
-   going on to the user.  *source* is a filename or :term:`file object` containing
-   XML data.  *events* is a list of events to report back.  If omitted, only "end"
-   events are reported.  *parser* is an optional parser instance.  If not
-   given, the standard :class:`XMLParser` parser is used.  Returns an
-   :term:`iterator` providing ``(event, elem)`` pairs.
+   going on to the user.  *source* is a filename or :term:`file object`
+   containing XML data.  *events* is a list of events to report back.  The
+   supported events are the strings ``"start"``, ``"end"``, ``"start-ns"``
+   and ``"end-ns"`` (the "ns" events are used to get detailed namespace
+   information).  If *events* is omitted, only ``"end"`` events are reported.
+   *parser* is an optional parser instance.  If not given, the standard
+   :class:`XMLParser` parser is used.  Returns an :term:`iterator` providing
+   ``(event, elem)`` pairs.
 
    .. note::
 
diff -r 3d0686d90f55 Doc/library/zipfile.rst
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -169,14 +169,18 @@
    Return a list of archive members by name.
 
 
+.. index::
+   single: universal newlines; zipfile.ZipFile.open method
+
 .. method:: ZipFile.open(name, mode='r', pwd=None)
 
-   Extract a member from the archive as a file-like object (ZipExtFile). *name* is
-   the name of the file in the archive, or a :class:`ZipInfo` object. The *mode*
-   parameter, if included, must be one of the following: ``'r'`` (the  default),
-   ``'U'``, or ``'rU'``. Choosing ``'U'`` or  ``'rU'`` will enable universal newline
-   support in the read-only object. *pwd* is the password used for encrypted files.
-   Calling  :meth:`open` on a closed ZipFile will raise a  :exc:`RuntimeError`.
+   Extract a member from the archive as a file-like object (ZipExtFile). *name*
+   is the name of the file in the archive, or a :class:`ZipInfo` object. The
+   *mode* parameter, if included, must be one of the following: ``'r'`` (the
+   default), ``'U'``, or ``'rU'``. Choosing ``'U'`` or  ``'rU'`` will enable
+   :term:`universal newlines` support in the read-only object.  *pwd* is the
+   password used for encrypted files.  Calling  :meth:`open` on a closed
+   ZipFile will raise a  :exc:`RuntimeError`.
 
    .. note::
 
diff -r 3d0686d90f55 Doc/reference/compound_stmts.rst
--- a/Doc/reference/compound_stmts.rst
+++ b/Doc/reference/compound_stmts.rst
@@ -307,12 +307,23 @@
 :keyword:`try` clause is executed, including any :keyword:`except` and
 :keyword:`else` clauses.  If an exception occurs in any of the clauses and is
 not handled, the exception is temporarily saved. The :keyword:`finally` clause
-is executed.  If there is a saved exception, it is re-raised at the end of the
-:keyword:`finally` clause. If the :keyword:`finally` clause raises another
-exception or executes a :keyword:`return` or :keyword:`break` statement, the
-saved exception is set as the context of the new exception.  The exception
-information is not available to the program during execution of the
-:keyword:`finally` clause.
+is executed.  If there is a saved exception or :keyword:`break` statement,
+it is re-raised at the end of the :keyword:`finally` clause. If the
+:keyword:`finally` clause raises another exception the saved exception
+is set as the context of the new exception; if the :keyword:`finally` clause
+executes a :keyword:`return` statement, the saved exception is discarded::
+
+    def f():
+        try:
+            1/0
+        finally:
+            return 42
+
+    >>> f()
+    42
+
+The exception information is not available to the program during execution of
+the :keyword:`finally` clause.
 
 .. index::
    statement: return
@@ -535,6 +546,11 @@
 access the local variables of the function containing the def.  See section
 :ref:`naming` for details.
 
+.. seealso::
+
+   :pep:`3107` - Function Annotations
+      The original specification for function annotations.
+
 
 .. _class:
 
diff -r 3d0686d90f55 Doc/reference/expressions.rst
--- a/Doc/reference/expressions.rst
+++ b/Doc/reference/expressions.rst
@@ -354,8 +354,15 @@
 
 .. index:: object: generator
 
-The following generator's methods can be used to control the execution of a
-generator function:
+
+Generator-iterator methods
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This subsection describes the methods of a generator iterator.  They can
+be used to control the execution of a generator function.
+
+Note that calling any of the generator methods below when the generator
+is already executing raises a :exc:`ValueError` exception.
 
 .. index:: exception: StopIteration
 
diff -r 3d0686d90f55 Doc/reference/simple_stmts.rst
--- a/Doc/reference/simple_stmts.rst
+++ b/Doc/reference/simple_stmts.rst
@@ -393,7 +393,6 @@
 the sliced object).
 
 .. versionchanged:: 3.2
-
    Previously it was illegal to delete a name from the local namespace if it
    occurs as a free variable in a nested block.
 
diff -r 3d0686d90f55 Doc/tools/sphinxext/download.html
--- a/Doc/tools/sphinxext/download.html
+++ b/Doc/tools/sphinxext/download.html
@@ -39,8 +39,12 @@
   </tr>
 </table>
 
+<p>These archives contain all the content in the documentation.</p>
 
-<p>These archives contain all the content in the documentation.</p>
+<p>HTML Help (<tt>.chm</tt>) files are made available in the "Windows" section
+on the <a href="http://python.org/download/releases/{{ release[:5] }}/">Python
+download page</a>.</p>
+
 
 <h2>Unpacking</h2>
 
diff -r 3d0686d90f55 Doc/tools/sphinxext/indexsidebar.html
--- a/Doc/tools/sphinxext/indexsidebar.html
+++ b/Doc/tools/sphinxext/indexsidebar.html
@@ -3,20 +3,15 @@
 	    <h3>Docs for other versions</h3>
 	    <ul>
 	      <li><a href="http://docs.python.org/2.7/">Python 2.7 (stable)</a></li>
-	      <li><a href="http://docs.python.org/3.1/">Python 3.1 (stable)</a></li>
+	      <li><a href="http://docs.python.org/3.3/">Python 3.3 (in development)</a></li>
               <li><a href="http://www.python.org/doc/versions/">Old versions</a></li>
             </ul>
 
             <h3>Other resources</h3>
             <ul>
               {# XXX: many of these should probably be merged in the main docs #}
-              <li><a href="http://www.python.org/doc/faq/">FAQs</a></li>
-              <li><a href="http://www.python.org/doc/essays/">Guido's Essays</a></li>
-              <li><a href="http://www.python.org/doc/newstyle/">New-style Classes</a></li>
               <li><a href="http://www.python.org/dev/peps/">PEP Index</a></li>
               <li><a href="http://wiki.python.org/moin/BeginnersGuide">Beginner's Guide</a></li>
               <li><a href="http://wiki.python.org/moin/PythonBooks">Book List</a></li>
               <li><a href="http://www.python.org/doc/av/">Audio/Visual Talks</a></li>
-              <li><a href="http://www.python.org/doc/other/">Other Doc Collections</a></li>
-              <li><a href="{{ pathto('bugs') }}">Report a Bug</a></li>
             </ul>
diff -r 3d0686d90f55 Doc/tools/sphinxext/layout.html
--- a/Doc/tools/sphinxext/layout.html
+++ b/Doc/tools/sphinxext/layout.html
@@ -2,6 +2,7 @@
 {% block rootrellink %}
         <li><img src="{{ pathto('_static/py.png', 1) }}" alt=""
                  style="vertical-align: middle; margin-top: -1px"/></li>
+        <li><a href="http://www.python.org/">Python</a>{{ reldelim1 }}</li>
         <li><a href="{{ pathto('index') }}">{{ shorttitle }}</a>{{ reldelim1 }}</li>
 {% endblock %}
 {% block extrahead %}
diff -r 3d0686d90f55 Doc/tools/sphinxext/pydoctheme/static/pydoctheme.css
--- /dev/null
+++ b/Doc/tools/sphinxext/pydoctheme/static/pydoctheme.css
@@ -0,0 +1,170 @@
+@import url("default.css");
+
+body {
+    background-color: white;
+    margin-left: 1em;
+    margin-right: 1em;
+}
+
+div.related {
+    margin-bottom: 1.2em;
+    padding: 0.5em 0;
+    border-top: 1px solid #ccc;
+    margin-top: 0.5em;
+}
+
+div.related a:hover {
+    color: #0095C4;
+}
+
+div.related:first-child {
+    border-top: 0;
+    border-bottom: 1px solid #ccc;
+}
+
+div.sphinxsidebar {
+    background-color: #eeeeee;
+    border-radius: 5px;
+    line-height: 130%;
+    font-size: smaller;
+}
+
+div.sphinxsidebar h3, div.sphinxsidebar h4 {
+    margin-top: 1.5em;
+}
+
+div.sphinxsidebarwrapper > h3:first-child {
+    margin-top: 0.2em;
+}
+
+div.sphinxsidebarwrapper > ul > li > ul > li {
+    margin-bottom: 0.4em;
+}
+
+div.sphinxsidebar a:hover {
+    color: #0095C4;
+}
+
+div.sphinxsidebar input {
+    font-family: 'Lucida Grande',Arial,sans-serif;
+    border: 1px solid #999999;
+    font-size: smaller;
+    border-radius: 3px;
+}
+
+div.sphinxsidebar input[type=text] {
+    max-width: 150px;
+}
+
+div.body {
+    padding: 0 0 0 1.2em;
+}
+
+div.body p {
+    line-height: 140%;
+}
+
+div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 {
+    margin: 0;
+    border: 0;
+    padding: 0.3em 0;
+}
+
+div.body hr {
+    border: 0;
+    background-color: #ccc;
+    height: 1px;
+}
+
+div.body pre {
+    border-radius: 3px;
+    border: 1px solid #ac9;
+}
+
+div.body div.admonition, div.body div.impl-detail {
+    border-radius: 3px;
+}
+
+div.body div.impl-detail > p {
+    margin: 0;
+}
+
+div.body div.seealso {
+    border: 1px solid #dddd66;
+}
+
+div.body a {
+    color: #00608f;
+}
+
+div.body a:visited {
+    color: #30306f;
+}
+
+div.body a:hover {
+    color: #00B0E4;
+}
+
+tt, pre {
+    font-family: monospace, sans-serif;
+    font-size: 96.5%;
+}
+
+div.body tt {
+    border-radius: 3px;
+}
+
+div.body tt.descname {
+    font-size: 120%;
+}
+
+div.body tt.xref, div.body a tt {
+    font-weight: normal;
+}
+
+p.deprecated {
+    border-radius: 3px;
+}
+
+table.docutils {
+    border: 1px solid #ddd;
+    min-width: 20%;
+    border-radius: 3px;
+    margin-top: 10px;
+    margin-bottom: 10px;
+}
+
+table.docutils td, table.docutils th {
+    border: 1px solid #ddd !important;
+    border-radius: 3px;
+}
+
+table p, table li {
+    text-align: left !important;
+}
+
+table.docutils th {
+    background-color: #eee;
+    padding: 0.3em 0.5em;
+}
+
+table.docutils td {
+    background-color: white;
+    padding: 0.3em 0.5em;
+}
+
+table.footnote, table.footnote td {
+    border: 0 !important;
+}
+
+div.footer {
+    line-height: 150%;
+    margin-top: -2em;
+    text-align: right;
+    width: auto;
+    margin-right: 10px;
+}
+
+div.footer a:hover {
+    color: #0095C4;
+}
diff -r 3d0686d90f55 Doc/tools/sphinxext/pydoctheme/theme.conf
--- /dev/null
+++ b/Doc/tools/sphinxext/pydoctheme/theme.conf
@@ -0,0 +1,23 @@
+[theme]
+inherit = default
+stylesheet = pydoctheme.css
+pygments_style = sphinx
+
+[options]
+bodyfont = 'Lucida Grande', Arial, sans-serif
+headfont = 'Lucida Grande', Arial, sans-serif
+footerbgcolor = white
+footertextcolor = #555555
+relbarbgcolor = white
+relbartextcolor = #666666
+relbarlinkcolor = #444444
+sidebarbgcolor = white
+sidebartextcolor = #444444
+sidebarlinkcolor = #444444
+bgcolor = white
+textcolor = #222222
+linkcolor = #0090c0
+visitedlinkcolor = #00608f
+headtextcolor = #1a1a1a
+headbgcolor = white
+headlinkcolor = #aaaaaa
diff -r 3d0686d90f55 Doc/tools/sphinxext/pyspecific.py
--- a/Doc/tools/sphinxext/pyspecific.py
+++ b/Doc/tools/sphinxext/pyspecific.py
@@ -27,10 +27,10 @@
     self.body.append(self.starttag(node, 'p', CLASS=node['type']))
     text = versionlabels[node['type']] % node['version']
     if len(node):
-        text += ': '
+        text += ':'
     else:
         text += '.'
-    self.body.append('<span class="versionmodified">%s</span>' % text)
+    self.body.append('<span class="versionmodified">%s</span> ' % text)
 
 from sphinx.writers.html import HTMLTranslator
 from sphinx.locale import versionlabels
diff -r 3d0686d90f55 Doc/tools/sphinxext/static/copybutton.js
--- a/Doc/tools/sphinxext/static/copybutton.js
+++ b/Doc/tools/sphinxext/static/copybutton.js
@@ -17,7 +17,8 @@
         'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0',
         'border-color': border_color, 'border-style': border_style,
         'border-width': border_width, 'color': border_color, 'text-size': '75%',
-        'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em'
+        'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em',
+        'border-radius': '0 3px 0 0'
     }
 
     // create and add the button to all the code blocks that contain >>>
diff -r 3d0686d90f55 Doc/tools/sphinxext/static/sidebar.js
--- /dev/null
+++ b/Doc/tools/sphinxext/static/sidebar.js
@@ -0,0 +1,155 @@
+/*
+ * sidebar.js
+ * ~~~~~~~~~~
+ *
+ * This script makes the Sphinx sidebar collapsible.
+ *
+ * .sphinxsidebar contains .sphinxsidebarwrapper.  This script adds in
+ * .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton used to
+ * collapse and expand the sidebar.
+ *
+ * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden and the
+ * width of the sidebar and the margin-left of the document are decreased.
+ * When the sidebar is expanded the opposite happens.  This script saves a
+ * per-browser/per-session cookie used to remember the position of the sidebar
+ * among the pages.  Once the browser is closed the cookie is deleted and the
+ * position reset to the default (expanded).
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+$(function() {
+  // global elements used by the functions.
+  // the 'sidebarbutton' element is defined as global after its
+  // creation, in the add_sidebar_button function
+  var bodywrapper = $('.bodywrapper');
+  var sidebar = $('.sphinxsidebar');
+  var sidebarwrapper = $('.sphinxsidebarwrapper');
+
+  // original margin-left of the bodywrapper and width of the sidebar
+  // with the sidebar expanded
+  var bw_margin_expanded = bodywrapper.css('margin-left');
+  var ssb_width_expanded = sidebar.width();
+
+  // margin-left of the bodywrapper and width of the sidebar
+  // with the sidebar collapsed
+  var bw_margin_collapsed = '.8em';
+  var ssb_width_collapsed = '.8em';
+
+  // colors used by the current theme
+  var dark_color = '#AAAAAA';
+  var light_color = '#CCCCCC';
+
+  function sidebar_is_collapsed() {
+    return sidebarwrapper.is(':not(:visible)');
+  }
+
+  function toggle_sidebar() {
+    if (sidebar_is_collapsed())
+      expand_sidebar();
+    else
+      collapse_sidebar();
+  }
+
+  function collapse_sidebar() {
+    sidebarwrapper.hide();
+    sidebar.css('width', ssb_width_collapsed);
+    bodywrapper.css('margin-left', bw_margin_collapsed);
+    sidebarbutton.css({
+        'margin-left': '0',
+        'height': bodywrapper.height(),
+        'border-radius': '5px'
+    });
+    sidebarbutton.find('span').text('»');
+    sidebarbutton.attr('title', _('Expand sidebar'));
+    document.cookie = 'sidebar=collapsed';
+  }
+
+  function expand_sidebar() {
+    bodywrapper.css('margin-left', bw_margin_expanded);
+    sidebar.css('width', ssb_width_expanded);
+    sidebarwrapper.show();
+    sidebarbutton.css({
+        'margin-left': ssb_width_expanded-12,
+        'height': bodywrapper.height(),
+        'border-radius': '0 5px 5px 0'
+    });
+    sidebarbutton.find('span').text('«');
+    sidebarbutton.attr('title', _('Collapse sidebar'));
+    //sidebarwrapper.css({'padding-top':
+    //  Math.max(window.pageYOffset - sidebarwrapper.offset().top, 10)});
+    document.cookie = 'sidebar=expanded';
+  }
+
+  function add_sidebar_button() {
+    sidebarwrapper.css({
+        'float': 'left',
+        'margin-right': '0',
+        'width': ssb_width_expanded - 28
+    });
+    // create the button
+    sidebar.append(
+      '<div id="sidebarbutton"><span>&laquo;</span></div>'
+    );
+    var sidebarbutton = $('#sidebarbutton');
+    // find the height of the viewport to center the '<<' in the page
+    var viewport_height;
+    if (window.innerHeight)
+ 	  viewport_height = window.innerHeight;
+    else
+	  viewport_height = $(window).height();
+    var sidebar_offset = sidebar.offset().top;
+    var sidebar_height = Math.max(bodywrapper.height(), sidebar.height());
+    sidebarbutton.find('span').css({
+        'display': 'block',
+        'position': 'fixed',
+        'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10
+    });
+
+    sidebarbutton.click(toggle_sidebar);
+    sidebarbutton.attr('title', _('Collapse sidebar'));
+    sidebarbutton.css({
+        'border-radius': '0 5px 5px 0',
+        'color': '#444444',
+        'background-color': '#CCCCCC',
+        'font-size': '1.2em',
+        'cursor': 'pointer',
+        'height': sidebar_height,
+        'padding-top': '1px',
+        'padding-left': '1px',
+        'margin-left': ssb_width_expanded - 12
+    });
+
+    sidebarbutton.hover(
+      function () {
+          $(this).css('background-color', dark_color);
+      },
+      function () {
+          $(this).css('background-color', light_color);
+      }
+    );
+  }
+
+  function set_position_from_cookie() {
+    if (!document.cookie)
+      return;
+    var items = document.cookie.split(';');
+    for(var k=0; k<items.length; k++) {
+      var key_val = items[k].split('=');
+      var key = key_val[0];
+      if (key == 'sidebar') {
+        var value = key_val[1];
+        if ((value == 'collapsed') && (!sidebar_is_collapsed()))
+          collapse_sidebar();
+        else if ((value == 'expanded') && (sidebar_is_collapsed()))
+          expand_sidebar();
+      }
+    }
+  }
+
+  add_sidebar_button();
+  var sidebarbutton = $('#sidebarbutton');
+  set_position_from_cookie();
+});
diff -r 3d0686d90f55 Doc/tutorial/classes.rst
--- a/Doc/tutorial/classes.rst
+++ b/Doc/tutorial/classes.rst
@@ -180,7 +180,10 @@
    scope_test()
    print("In global scope:", spam)
 
-The output of the example code is::
+The output of the example code is:
+
+.. code-block:: none
+
 
    After local assignment: test spam
    After nonlocal assignment: nonlocal spam
diff -r 3d0686d90f55 Doc/tutorial/controlflow.rst
--- a/Doc/tutorial/controlflow.rst
+++ b/Doc/tutorial/controlflow.rst
@@ -157,9 +157,6 @@
 The :keyword:`break` statement, like in C, breaks out of the smallest enclosing
 :keyword:`for` or :keyword:`while` loop.
 
-The :keyword:`continue` statement, also borrowed from C, continues with the next
-iteration of the loop.
-
 Loop statements may have an ``else`` clause; it is executed when the loop
 terminates through exhaustion of the list (with :keyword:`for`) or when the
 condition becomes false (with :keyword:`while`), but not when the loop is
@@ -187,6 +184,29 @@
 (Yes, this is the correct code.  Look closely: the ``else`` clause belongs to
 the :keyword:`for` loop, **not** the :keyword:`if` statement.)
 
+When used with a loop, the ``else`` clause has more in common with the
+``else`` clause of a :keyword:`try` statement than it does that of
+:keyword:`if` statements: a :keyword:`try` statement's ``else`` clause runs
+when no exception occurs, and a loop's ``else`` clause runs when no ``break``
+occurs. For more on the :keyword:`try` statement and exceptions, see
+:ref:`tut-handling`.
+
+The :keyword:`continue` statement, also borrowed from C, continues with the next
+iteration of the loop::
+
+    >>> for num in range(2, 10):
+    ...     if num % 2 == 0:
+    ...         print("Found an even number", num)
+    ...         continue
+    ...     print("Found a number", num)
+    Found an even number 2
+    Found a number 3
+    Found an even number 4
+    Found a number 5
+    Found an even number 6
+    Found a number 7
+    Found an even number 8
+    Found a number 9
 
 .. _tut-pass:
 
diff -r 3d0686d90f55 Doc/tutorial/datastructures.rst
--- a/Doc/tutorial/datastructures.rst
+++ b/Doc/tutorial/datastructures.rst
@@ -292,7 +292,7 @@
 In the real world, you should prefer built-in functions to complex flow statements.
 The :func:`zip` function would do a great job for this use case::
 
-   >>> zip(*matrix)
+   >>> list(zip(*matrix))
    [(1, 5, 9), (2, 6, 10), (3, 7, 11), (4, 8, 12)]
 
 See :ref:`tut-unpacking-arguments` for details on the asterisk in this line.
@@ -349,17 +349,31 @@
    ... u = t, (1, 2, 3, 4, 5)
    >>> u
    ((12345, 54321, 'hello!'), (1, 2, 3, 4, 5))
+   >>> # Tuples are immutable:
+   ... t[0] = 88888
+   Traceback (most recent call last):
+     File "<stdin>", line 1, in <module>
+   TypeError: 'tuple' object does not support item assignment
+   >>> # but they can contain mutable objects:
+   ... v = ([1, 2, 3], [3, 2, 1])
+   >>> v
+   ([1, 2, 3], [3, 2, 1])
+
 
 As you see, on output tuples are always enclosed in parentheses, so that nested
 tuples are interpreted correctly; they may be input with or without surrounding
 parentheses, although often parentheses are necessary anyway (if the tuple is
-part of a larger expression).
+part of a larger expression).  It is not possible to assign to the individual
+items of a tuple, however it is possible to create tuples which contain mutable
+objects, such as lists.
 
-Tuples have many uses.  For example: (x, y) coordinate pairs, employee records
-from a database, etc.  Tuples, like strings, are immutable: it is not possible
-to assign to the individual items of a tuple (you can simulate much of the same
-effect with slicing and concatenation, though).  It is also possible to create
-tuples which contain mutable objects, such as lists.
+Though tuples may seem similar to lists, they are often used in different
+situations and for different purposes.
+Tuples are :term:`immutable`, and usually contain an heterogeneous sequence of
+elements that are accessed via unpacking (see later in this section) or indexing
+(or even by attribute in the case of :func:`namedtuples <collections.namedtuple>`).
+Lists are :term:`mutable`, and their elements are usually homogeneous and are
+accessed by iterating over the list.
 
 A special problem is the construction of tuples containing 0 or 1 items: the
 syntax has some extra quirks to accommodate these.  Empty tuples are constructed
@@ -388,8 +402,6 @@
 sequence.  Note that multiple assignment is really just a combination of tuple
 packing and sequence unpacking.
 
-.. XXX Add a bit on the difference between tuples and lists.
-
 
 .. _tut-sets:
 
diff -r 3d0686d90f55 Doc/tutorial/inputoutput.rst
--- a/Doc/tutorial/inputoutput.rst
+++ b/Doc/tutorial/inputoutput.rst
@@ -37,7 +37,7 @@
 The :func:`str` function is meant to return representations of values which are
 fairly human-readable, while :func:`repr` is meant to generate representations
 which can be read by the interpreter (or will force a :exc:`SyntaxError` if
-there is not equivalent syntax).  For objects which don't have a particular
+there is no equivalent syntax).  For objects which don't have a particular
 representation for human consumption, :func:`str` will return the same value as
 :func:`repr`.  Many values, such as numbers or structures like lists and
 dictionaries, have the same representation using either function.  Strings, in
diff -r 3d0686d90f55 Doc/tutorial/introduction.rst
--- a/Doc/tutorial/introduction.rst
+++ b/Doc/tutorial/introduction.rst
@@ -413,7 +413,7 @@
 About Unicode
 -------------
 
-.. sectionauthor:: Marc-Andre Lemburg <mal@lemburg.com>
+.. sectionauthor:: Marc-André Lemburg <mal@lemburg.com>
 
 
 Starting with Python 3.0 all strings support Unicode (see
diff -r 3d0686d90f55 Doc/tutorial/modules.rst
--- a/Doc/tutorial/modules.rst
+++ b/Doc/tutorial/modules.rst
@@ -248,7 +248,7 @@
 are not part of the core of the language but are nevertheless built in, either
 for efficiency or to provide access to operating system primitives such as
 system calls.  The set of such modules is a configuration option which also
-depends on the underlying platform For example, the :mod:`winreg` module is only
+depends on the underlying platform.  For example, the :mod:`winreg` module is only
 provided on Windows systems. One particular module deserves some attention:
 :mod:`sys`, which is built into every Python interpreter.  The variables
 ``sys.ps1`` and ``sys.ps2`` define the strings used as primary and secondary
diff -r 3d0686d90f55 Doc/using/unix.rst
--- a/Doc/using/unix.rst
+++ b/Doc/using/unix.rst
@@ -141,7 +141,7 @@
 * http://sourceforge.net/projects/python-mode
 
 Geany is an excellent IDE with support for a lot of languages. For more
-information, read: http://geany.uvena.de/
+information, read: http://www.geany.org/
 
 Komodo edit is another extremely good IDE.  It also has support for a lot of
 languages. For more information, read:
diff -r 3d0686d90f55 Doc/whatsnew/2.3.rst
--- a/Doc/whatsnew/2.3.rst
+++ b/Doc/whatsnew/2.3.rst
@@ -366,6 +366,9 @@
 .. ======================================================================
 
 
+.. index::
+   single: universal newlines; What's new
+
 PEP 278: Universal Newline Support
 ==================================
 
@@ -376,12 +379,12 @@
 10), MacOS uses the carriage return (ASCII character 13), and Windows uses a
 two-character sequence of a carriage return plus a newline.
 
-Python's file objects can now support end of line conventions other than the one
-followed by the platform on which Python is running. Opening a file with the
-mode ``'U'`` or ``'rU'`` will open a file for reading in universal newline mode.
-All three line ending conventions will be translated to a ``'\n'`` in the
-strings returned by the various file methods such as :meth:`read` and
-:meth:`readline`.
+Python's file objects can now support end of line conventions other than the
+one followed by the platform on which Python is running. Opening a file with
+the mode ``'U'`` or ``'rU'`` will open a file for reading in :term:`universal
+newlines` mode.  All three line ending conventions will be translated to a
+``'\n'`` in the strings returned by the various file methods such as
+:meth:`read` and :meth:`readline`.
 
 Universal newline support is also used when importing modules and when executing
 a file with the :func:`execfile` function.  This means that Python modules can
diff -r 3d0686d90f55 Doc/whatsnew/2.4.rst
--- a/Doc/whatsnew/2.4.rst
+++ b/Doc/whatsnew/2.4.rst
@@ -411,6 +411,9 @@
 you can use the constant ``subprocess.PIPE`` to create a pipe between the
 subprocess and the parent.
 
+.. index::
+   single: universal newlines; What's new
+
 The constructor has a number of handy options:
 
 * *close_fds* requests that all file descriptors be closed before running the
@@ -424,7 +427,7 @@
 * *preexec_fn* is a function that gets called before the child is started.
 
 * *universal_newlines* opens the child's input and output using Python's
-  universal newline feature.
+  :term:`universal newlines` feature.
 
 Once you've created the :class:`Popen` instance,  you can call its :meth:`wait`
 method to pause until the subprocess has exited, :meth:`poll` to check if it's
@@ -1279,7 +1282,7 @@
   interface, accessed only by :mod:`atexit`.
 
 * The :mod:`tarfile` module now generates GNU-format tar files by default.
-  (Contributed by Lars Gustaebel.)
+  (Contributed by Lars Gustäbel.)
 
 * The :mod:`threading` module now has an elegantly simple way to support
   thread-local data.  The module contains a :class:`local` class whose attribute
diff -r 3d0686d90f55 Doc/whatsnew/2.5.rst
--- a/Doc/whatsnew/2.5.rst
+++ b/Doc/whatsnew/2.5.rst
@@ -1338,13 +1338,17 @@
 
   .. XXX need to provide some more detail here
 
+  .. index::
+     single: universal newlines; What's new
+
 * The :mod:`fileinput` module was made more flexible. Unicode filenames are now
   supported, and a *mode* parameter that defaults to ``"r"`` was added to the
-  :func:`input` function to allow opening files in binary or universal-newline
-  mode.  Another new parameter, *openhook*, lets you use a function other than
-  :func:`open`  to open the input files.  Once you're iterating over  the set of
-  files, the :class:`FileInput` object's new :meth:`fileno` returns the file
-  descriptor for the currently opened file. (Contributed by Georg Brandl.)
+  :func:`input` function to allow opening files in binary or :term:`universal
+  newlines` mode.  Another new parameter, *openhook*, lets you use a function
+  other than :func:`open`  to open the input files.  Once you're iterating over
+  the set of files, the :class:`FileInput` object's new :meth:`fileno` returns
+  the file descriptor for the currently opened file. (Contributed by Georg
+  Brandl.)
 
 * In the :mod:`gc` module, the new :func:`get_count` function returns a 3-tuple
   containing the current collection counts for the three GC generations.  This is
diff -r 3d0686d90f55 Doc/whatsnew/2.6.rst
--- a/Doc/whatsnew/2.6.rst
+++ b/Doc/whatsnew/2.6.rst
@@ -175,7 +175,7 @@
 
 Hosting of the Python bug tracker is kindly provided by
 `Upfront Systems <http://www.upfrontsystems.co.za/>`__
-of Stellenbosch, South Africa.  Martin von Loewis put a
+of Stellenbosch, South Africa.  Martin von Löwis put a
 lot of effort into importing existing bugs and patches from
 SourceForge; his scripts for this import operation are at
 http://svn.python.org/view/tracker/importer/ and may be useful to
@@ -193,7 +193,7 @@
     Roundup downloads and documentation.
 
   http://svn.python.org/view/tracker/importer/
-    Martin von Loewis's conversion scripts.
+    Martin von Löwis's conversion scripts.
 
 New Documentation Format: reStructuredText Using Sphinx
 -----------------------------------------------------------
@@ -1071,9 +1071,12 @@
   The :class:`BytesIO` class supports reading, writing, and seeking
   over an in-memory buffer.
 
+  .. index::
+     single: universal newlines; What's new
+
 * :class:`TextIOBase`: Provides functions for reading and writing
   strings (remember, strings will be Unicode in Python 3.0),
-  and supporting universal newlines.  :class:`TextIOBase` defines
+  and supporting :term:`universal newlines`.  :class:`TextIOBase` defines
   the :meth:`readline` method and supports iteration upon
   objects.
 
@@ -1100,7 +1103,7 @@
    :pep:`3116` - New I/O
       PEP written by Daniel Stutzbach, Mike Verdone, and Guido van Rossum.
       Code by Guido van Rossum, Georg Brandl, Walter Doerwald,
-      Jeremy Hylton, Martin von Loewis, Tony Lownds, and others.
+      Jeremy Hylton, Martin von Löwis, Tony Lownds, and others.
 
 .. ======================================================================
 
@@ -1774,7 +1777,7 @@
 ``latin-1``; the optional *errorhandler* part specifies
 what to do with characters that can't be handled by the encoding,
 and  should be one of "error", "ignore", or "replace".   (Contributed
-by Martin von Loewis.)
+by Martin von Löwis.)
 
 .. ======================================================================
 
@@ -1792,7 +1795,7 @@
   were applied.  (Maintained by Josiah Carlson; see :issue:`1736190` for
   one patch.)
 
-* The :mod:`bsddb` module also has a new maintainer, Jesús Cea Avion, and the package
+* The :mod:`bsddb` module also has a new maintainer, Jesús Cea Avión, and the package
   is now available as a standalone package.  The web page for the package is
   `www.jcea.es/programacion/pybsddb.htm
   <http://www.jcea.es/programacion/pybsddb.htm>`__.
@@ -2384,7 +2387,7 @@
   (Contributed by Pedro Werneck and Jeffrey Yasskin;
   :issue:`742598`, :issue:`1193577`.)
 
-* The :mod:`sqlite3` module, maintained by Gerhard Haering,
+* The :mod:`sqlite3` module, maintained by Gerhard Häring,
   has been updated from version 2.3.2 in Python 2.5 to
   version 2.4.1.
 
@@ -2597,7 +2600,7 @@
 
 * The Unicode database provided by the :mod:`unicodedata` module
   has been updated to version 5.1.0.  (Updated by
-  Martin von Loewis; :issue:`3811`.)
+  Martin von Löwis; :issue:`3811`.)
 
 * The :mod:`warnings` module's :func:`formatwarning` and :func:`showwarning`
   gained an optional *line* argument that can be used to supply the
@@ -3104,7 +3107,7 @@
   :file:`PCbuild` directory supports cross compilation for X64, debug
   builds and Profile Guided Optimization (PGO). PGO builds are roughly
   10% faster than normal builds.  (Contributed by Christian Heimes
-  with help from Amaury Forgeot d'Arc and Martin von Loewis.)
+  with help from Amaury Forgeot d'Arc and Martin von Löwis.)
 
 * The :mod:`msvcrt` module now supports
   both the normal and wide char variants of the console I/O
diff -r 3d0686d90f55 Doc/whatsnew/2.7.rst
--- a/Doc/whatsnew/2.7.rst
+++ b/Doc/whatsnew/2.7.rst
@@ -1434,7 +1434,7 @@
 * The :mod:`signal` module no longer re-installs the signal handler
   unless this is truly necessary, which fixes a bug that could make it
   impossible to catch the EINTR signal robustly.  (Fixed by
-  Charles-Francois Natali; :issue:`8354`.)
+  Charles-François Natali; :issue:`8354`.)
 
 * New functions: in the :mod:`site` module, three new functions
   return various site- and user-specific paths.
@@ -2331,7 +2331,7 @@
   attributes of the resulting code objects are overwritten when the
   original filename is obsolete.  This can happen if the file has been
   renamed, moved, or is accessed through different paths.  (Patch by
-  Ziga Seilnacht and Jean-Paul Calderone; :issue:`1180193`.)
+  Žiga Seilnacht and Jean-Paul Calderone; :issue:`1180193`.)
 
 * The :file:`regrtest.py` script now takes a :option:`--randseed=`
   switch that takes an integer that will be used as the random seed
diff -r 3d0686d90f55 Include/Python.h
--- a/Include/Python.h
+++ b/Include/Python.h
@@ -100,7 +100,6 @@
 #include "warnings.h"
 #include "weakrefobject.h"
 #include "structseq.h"
-#include "accu.h"
 
 #include "codecs.h"
 #include "pyerrors.h"
diff -r 3d0686d90f55 Include/node.h
--- a/Include/node.h
+++ b/Include/node.h
@@ -20,6 +20,9 @@
 PyAPI_FUNC(int) PyNode_AddChild(node *n, int type,
                                       char *str, int lineno, int col_offset);
 PyAPI_FUNC(void) PyNode_Free(node *n);
+#ifndef Py_LIMITED_API
+Py_ssize_t _PyNode_SizeOf(node *n);
+#endif
 
 /* Node access functions */
 #define NCH(n)		((n)->n_nchildren)
diff -r 3d0686d90f55 Include/patchlevel.h
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -2,7 +2,7 @@
 /* Python version identification scheme.
 
    When the major or minor version changes, the VERSION variable in
-   configure.in must also be changed.
+   configure.ac must also be changed.
 
    There is also (independent) API version information in modsupport.h.
 */
diff -r 3d0686d90f55 Include/pyport.h
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -557,6 +557,30 @@
         _Py_set_387controlword(old_387controlword)
 #endif
 
+/* get and set x87 control word for VisualStudio/x86 */
+#if defined(_MSC_VER) && !defined(_WIN64) /* x87 not supported in 64-bit */
+#define HAVE_PY_SET_53BIT_PRECISION 1
+#define _Py_SET_53BIT_PRECISION_HEADER \
+    unsigned int old_387controlword, new_387controlword, out_387controlword
+/* We use the __control87_2 function to set only the x87 control word.
+   The SSE control word is unaffected. */
+#define _Py_SET_53BIT_PRECISION_START                                   \
+    do {                                                                \
+        __control87_2(0, 0, &old_387controlword, NULL);                 \
+        new_387controlword =                                            \
+          (old_387controlword & ~(_MCW_PC | _MCW_RC)) | (_PC_53 | _RC_NEAR); \
+        if (new_387controlword != old_387controlword)                   \
+            __control87_2(new_387controlword, _MCW_PC | _MCW_RC,        \
+                          &out_387controlword, NULL);                   \
+    } while (0)
+#define _Py_SET_53BIT_PRECISION_END                                     \
+    do {                                                                \
+        if (new_387controlword != old_387controlword)                   \
+            __control87_2(old_387controlword, _MCW_PC | _MCW_RC,        \
+                          &out_387controlword, NULL);                   \
+    } while (0)
+#endif
+
 /* default definitions are empty */
 #ifndef HAVE_PY_SET_53BIT_PRECISION
 #define _Py_SET_53BIT_PRECISION_HEADER
diff -r 3d0686d90f55 Lib/__future__.py
--- a/Lib/__future__.py
+++ b/Lib/__future__.py
@@ -114,7 +114,7 @@
                     CO_FUTURE_DIVISION)
 
 absolute_import = _Feature((2, 5, 0, "alpha", 1),
-                           (2, 7, 0, "alpha", 0),
+                           (3, 0, 0, "alpha", 0),
                            CO_FUTURE_ABSOLUTE_IMPORT)
 
 with_statement = _Feature((2, 5, 0, "alpha", 1),
diff -r 3d0686d90f55 Lib/_strptime.py
--- a/Lib/_strptime.py
+++ b/Lib/_strptime.py
@@ -339,7 +339,7 @@
         raise ValueError("unconverted data remains: %s" %
                           data_string[found.end():])
 
-    year = 1900
+    year = None
     month = day = 1
     hour = minute = second = fraction = 0
     tz = -1
@@ -444,6 +444,12 @@
                     else:
                         tz = value
                         break
+    leap_year_fix = False
+    if year is None and month == 2 and day == 29:
+        year = 1904  # 1904 is first leap year of 20th century
+        leap_year_fix = True
+    elif year is None:
+        year = 1900
     # If we know the week of the year and what day of that week, we can figure
     # out the Julian day of the year.
     if julian == -1 and week_of_year != -1 and weekday != -1:
@@ -472,6 +478,12 @@
     else:
         gmtoff = None
 
+    if leap_year_fix:
+        # the caller didn't supply a year but asked for Feb 29th. We couldn't
+        # use the default of 1900 for computations. We set it back to ensure
+        # that February 29th is smaller than March 1st.
+        year = 1900
+
     return (year, month, day,
             hour, minute, second,
             weekday, julian, tz, gmtoff, tzname), fraction
diff -r 3d0686d90f55 Lib/_weakrefset.py
--- a/Lib/_weakrefset.py
+++ b/Lib/_weakrefset.py
@@ -63,7 +63,7 @@
                     yield item
 
     def __len__(self):
-        return sum(x() is not None for x in self.data)
+        return len(self.data) - len(self._pending_removals)
 
     def __contains__(self, item):
         try:
@@ -114,36 +114,21 @@
     def update(self, other):
         if self._pending_removals:
             self._commit_removals()
-        if isinstance(other, self.__class__):
-            self.data.update(other.data)
-        else:
-            for element in other:
-                self.add(element)
+        for element in other:
+            self.add(element)
 
     def __ior__(self, other):
         self.update(other)
         return self
 
-    # Helper functions for simple delegating methods.
-    def _apply(self, other, method):
-        if not isinstance(other, self.__class__):
-            other = self.__class__(other)
-        newdata = method(other.data)
-        newset = self.__class__()
-        newset.data = newdata
+    def difference(self, other):
+        newset = self.copy()
+        newset.difference_update(other)
         return newset
-
-    def difference(self, other):
-        return self._apply(other, self.data.difference)
     __sub__ = difference
 
     def difference_update(self, other):
-        if self._pending_removals:
-            self._commit_removals()
-        if self is other:
-            self.data.clear()
-        else:
-            self.data.difference_update(ref(item) for item in other)
+        self.__isub__(other)
     def __isub__(self, other):
         if self._pending_removals:
             self._commit_removals()
@@ -154,13 +139,11 @@
         return self
 
     def intersection(self, other):
-        return self._apply(other, self.data.intersection)
+        return self.__class__(item for item in other if item in self)
     __and__ = intersection
 
     def intersection_update(self, other):
-        if self._pending_removals:
-            self._commit_removals()
-        self.data.intersection_update(ref(item) for item in other)
+        self.__iand__(other)
     def __iand__(self, other):
         if self._pending_removals:
             self._commit_removals()
@@ -169,17 +152,17 @@
 
     def issubset(self, other):
         return self.data.issubset(ref(item) for item in other)
-    __lt__ = issubset
+    __le__ = issubset
 
-    def __le__(self, other):
-        return self.data <= set(ref(item) for item in other)
+    def __lt__(self, other):
+        return self.data < set(ref(item) for item in other)
 
     def issuperset(self, other):
         return self.data.issuperset(ref(item) for item in other)
-    __gt__ = issuperset
+    __ge__ = issuperset
 
-    def __ge__(self, other):
-        return self.data >= set(ref(item) for item in other)
+    def __gt__(self, other):
+        return self.data > set(ref(item) for item in other)
 
     def __eq__(self, other):
         if not isinstance(other, self.__class__):
@@ -187,27 +170,24 @@
         return self.data == set(ref(item) for item in other)
 
     def symmetric_difference(self, other):
-        return self._apply(other, self.data.symmetric_difference)
+        newset = self.copy()
+        newset.symmetric_difference_update(other)
+        return newset
     __xor__ = symmetric_difference
 
     def symmetric_difference_update(self, other):
-        if self._pending_removals:
-            self._commit_removals()
-        if self is other:
-            self.data.clear()
-        else:
-            self.data.symmetric_difference_update(ref(item) for item in other)
+        self.__ixor__(other)
     def __ixor__(self, other):
         if self._pending_removals:
             self._commit_removals()
         if self is other:
             self.data.clear()
         else:
-            self.data.symmetric_difference_update(ref(item) for item in other)
+            self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
         return self
 
     def union(self, other):
-        return self._apply(other, self.data.union)
+        return self.__class__(e for s in (self, other) for e in s)
     __or__ = union
 
     def isdisjoint(self, other):
diff -r 3d0686d90f55 Lib/argparse.py
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -736,10 +736,10 @@
 
         - default -- The value to be produced if the option is not specified.
 
-        - type -- The type which the command-line arguments should be converted
-            to, should be one of 'string', 'int', 'float', 'complex' or a
-            callable object that accepts a single string argument. If None,
-            'string' is assumed.
+        - type -- A callable that accepts a single string argument, and
+            returns the converted value.  The standard Python types str, int,
+            float, and complex are useful examples of such callables.  If None,
+            str is used.
 
         - choices -- A container of values that should be allowed. If not None,
             after a command-line argument has been converted to the appropriate
@@ -1976,7 +1976,7 @@
         for arg_string in arg_strings:
 
             # for regular arguments, just add them back into the list
-            if arg_string[0] not in self.fromfile_prefix_chars:
+            if not arg_string or arg_string[0] not in self.fromfile_prefix_chars:
                 new_arg_strings.append(arg_string)
 
             # replace arguments referencing files with the file content
@@ -2186,9 +2186,12 @@
     # Value conversion methods
     # ========================
     def _get_values(self, action, arg_strings):
-        # for everything but PARSER args, strip out '--'
+        # for everything but PARSER, REMAINDER args, strip out first '--'
         if action.nargs not in [PARSER, REMAINDER]:
-            arg_strings = [s for s in arg_strings if s != '--']
+            try:
+                arg_strings.remove('--')
+            except ValueError:
+                pass
 
         # optional argument produces a default when not present
         if not arg_strings and action.nargs == OPTIONAL:
diff -r 3d0686d90f55 Lib/asyncore.py
--- a/Lib/asyncore.py
+++ b/Lib/asyncore.py
@@ -225,6 +225,7 @@
     debug = False
     connected = False
     accepting = False
+    connecting = False
     closing = False
     addr = None
     ignore_log_types = frozenset(['warning'])
@@ -248,7 +249,7 @@
             try:
                 self.addr = sock.getpeername()
             except socket.error as err:
-                if err.args[0] == ENOTCONN:
+                if err.args[0] in (ENOTCONN, EINVAL):
                     # To handle the case where we got an unconnected
                     # socket.
                     self.connected = False
@@ -342,9 +343,11 @@
 
     def connect(self, address):
         self.connected = False
+        self.connecting = True
         err = self.socket.connect_ex(address)
         if err in (EINPROGRESS, EALREADY, EWOULDBLOCK) \
         or err == EINVAL and os.name in ('nt', 'ce'):
+            self.addr = address
             return
         if err in (0, EISCONN):
             self.addr = address
@@ -400,6 +403,7 @@
     def close(self):
         self.connected = False
         self.accepting = False
+        self.connecting = False
         self.del_channel()
         try:
             self.socket.close()
@@ -438,7 +442,8 @@
             # sockets that are connected
             self.handle_accept()
         elif not self.connected:
-            self.handle_connect_event()
+            if self.connecting:
+                self.handle_connect_event()
             self.handle_read()
         else:
             self.handle_read()
@@ -449,6 +454,7 @@
             raise socket.error(err, _strerror(err))
         self.handle_connect()
         self.connected = True
+        self.connecting = False
 
     def handle_write_event(self):
         if self.accepting:
@@ -457,12 +463,8 @@
             return
 
         if not self.connected:
-            #check for errors
-            err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
-            if err != 0:
-                raise socket.error(err, _strerror(err))
-
-            self.handle_connect_event()
+            if self.connecting:
+                self.handle_connect_event()
         self.handle_write()
 
     def handle_expt_event(self):
diff -r 3d0686d90f55 Lib/bdb.py
--- a/Lib/bdb.py
+++ b/Lib/bdb.py
@@ -22,6 +22,7 @@
         self.skip = set(skip) if skip else None
         self.breaks = {}
         self.fncache = {}
+        self.frame_returning = None
 
     def canonic(self, filename):
         if filename == "<" + filename[1:-1] + ">":
@@ -80,7 +81,11 @@
 
     def dispatch_return(self, frame, arg):
         if self.stop_here(frame) or frame == self.returnframe:
-            self.user_return(frame, arg)
+            try:
+                self.frame_returning = frame
+                self.user_return(frame, arg)
+            finally:
+                self.frame_returning = None
             if self.quitting: raise BdbQuit
         return self.trace_dispatch
 
@@ -186,6 +191,14 @@
 
     def set_step(self):
         """Stop after one line of code."""
+        # Issue #13183: pdb skips frames after hitting a breakpoint and running
+        # step commands.
+        # Restore the trace function in the caller (that may not have been set
+        # for performance reasons) when returning from the current frame.
+        if self.frame_returning:
+            caller_frame = self.frame_returning.f_back
+            if caller_frame and not caller_frame.f_trace:
+                caller_frame.f_trace = self.trace_dispatch
         self._set_stopinfo(None, None)
 
     def set_next(self, frame):
diff -r 3d0686d90f55 Lib/concurrent/futures/_base.py
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -112,12 +112,14 @@
     def __init__(self, num_pending_calls, stop_on_exception):
         self.num_pending_calls = num_pending_calls
         self.stop_on_exception = stop_on_exception
+        self.lock = threading.Lock()
         super().__init__()
 
     def _decrement_pending_calls(self):
-        self.num_pending_calls -= 1
-        if not self.num_pending_calls:
-            self.event.set()
+        with self.lock:
+            self.num_pending_calls -= 1
+            if not self.num_pending_calls:
+                self.event.set()
 
     def add_result(self, future):
         super().add_result(future)
diff -r 3d0686d90f55 Lib/ctypes/test/test_bitfields.py
--- a/Lib/ctypes/test/test_bitfields.py
+++ b/Lib/ctypes/test/test_bitfields.py
@@ -240,5 +240,25 @@
             _anonymous_ = ["_"]
             _fields_ = [("_", X)]
 
+    @unittest.skipUnless(hasattr(ctypes, "c_uint32"), "c_int32 is required")
+    def test_uint32(self):
+        class X(Structure):
+            _fields_ = [("a", c_uint32, 32)]
+        x = X()
+        x.a = 10
+        self.assertEqual(x.a, 10)
+        x.a = 0xFDCBA987
+        self.assertEqual(x.a, 0xFDCBA987)
+
+    @unittest.skipUnless(hasattr(ctypes, "c_uint64"), "c_int64 is required")
+    def test_uint64(self):
+        class X(Structure):
+            _fields_ = [("a", c_uint64, 64)]
+        x = X()
+        x.a = 10
+        self.assertEqual(x.a, 10)
+        x.a = 0xFEDCBA9876543211
+        self.assertEqual(x.a, 0xFEDCBA9876543211)
+
 if __name__ == "__main__":
     unittest.main()
diff -r 3d0686d90f55 Lib/ctypes/test/test_numbers.py
--- a/Lib/ctypes/test/test_numbers.py
+++ b/Lib/ctypes/test/test_numbers.py
@@ -217,6 +217,16 @@
         # probably be changed:
         self.assertRaises(TypeError, c_int, c_long(42))
 
+    def test_float_overflow(self):
+        import sys
+        big_int = int(sys.float_info.max) * 2
+        for t in float_types + [c_longdouble]:
+            self.assertRaises(OverflowError, t, big_int)
+            if (hasattr(t, "__ctype_be__")):
+                self.assertRaises(OverflowError, t.__ctype_be__, big_int)
+            if (hasattr(t, "__ctype_le__")):
+                self.assertRaises(OverflowError, t.__ctype_le__, big_int)
+
 ##    def test_perf(self):
 ##        check_perf()
 
diff -r 3d0686d90f55 Lib/decimal.py
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -1555,7 +1555,13 @@
 
     def __float__(self):
         """Float representation."""
-        return float(str(self))
+        if self._isnan():
+            if self.is_snan():
+                raise ValueError("Cannot convert signaling NaN to float")
+            s = "-nan" if self._sign else "nan"
+        else:
+            s = str(self)
+        return float(s)
 
     def __int__(self):
         """Converts self to an int, truncating if necessary."""
diff -r 3d0686d90f55 Lib/distutils/command/bdist_rpm.py
--- a/Lib/distutils/command/bdist_rpm.py
+++ b/Lib/distutils/command/bdist_rpm.py
@@ -3,7 +3,7 @@
 Implements the Distutils 'bdist_rpm' command (create RPM source and binary
 distributions)."""
 
-import sys, os
+import subprocess, sys, os
 from distutils.core import Command
 from distutils.debug import DEBUG
 from distutils.util import get_platform
@@ -190,7 +190,7 @@
             if self.fix_python:
                 self.python = sys.executable
             else:
-                self.python = "python"
+                self.python = "python3"
         elif self.fix_python:
             raise DistutilsOptionError(
                   "--python and --fix-python are mutually exclusive options")
@@ -320,6 +320,7 @@
             rpm_cmd.append('-bb')
         else:
             rpm_cmd.append('-ba')
+        rpm_cmd.extend(['--define', '__python %s' % self.python])
         if self.rpm3_mode:
             rpm_cmd.extend(['--define',
                              '_topdir %s' % os.path.abspath(self.rpm_base)])
@@ -405,6 +406,21 @@
             'Summary: ' + self.distribution.get_description(),
             ]
 
+        # Workaround for #14443 which affects some RPM based systems such as
+        # RHEL6 (and probably derivatives)
+        vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
+        # Generate a potential replacement value for __os_install_post (whilst
+        # normalizing the whitespace to simplify the test for whether the
+        # invocation of brp-python-bytecompile passes in __python):
+        vendor_hook = '\n'.join(['  %s \\' % line.strip()
+                                 for line in vendor_hook.splitlines()])
+        problem = "brp-python-bytecompile \\\n"
+        fixed = "brp-python-bytecompile %{__python} \\\n"
+        fixed_hook = vendor_hook.replace(problem, fixed)
+        if fixed_hook != vendor_hook:
+            spec_file.append('# Workaround for http://bugs.python.org/issue14443')
+            spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
+
         # put locale summaries into spec file
         # XXX not supported for now (hard to put a dictionary
         # in a config file -- arg!)
diff -r 3d0686d90f55 Lib/distutils/command/upload.py
--- a/Lib/distutils/command/upload.py
+++ b/Lib/distutils/command/upload.py
@@ -125,7 +125,7 @@
 
         if self.sign:
             data['gpg_signature'] = (os.path.basename(filename) + ".asc",
-                                     open(filename+".asc").read())
+                                     open(filename+".asc", "rb").read())
 
         # set up the authentication
         user_pass = (self.username + ":" + self.password).encode('ascii')
diff -r 3d0686d90f55 Lib/distutils/tests/test_bdist_msi.py
--- a/Lib/distutils/tests/test_bdist_msi.py
+++ b/Lib/distutils/tests/test_bdist_msi.py
@@ -1,12 +1,11 @@
 """Tests for distutils.command.bdist_msi."""
+import sys
 import unittest
-import sys
-
 from test.support import run_unittest
-
 from distutils.tests import support
 
-@unittest.skipUnless(sys.platform=="win32", "These tests are only for win32")
+
+@unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows')
 class BDistMSITestCase(support.TempdirManager,
                        support.LoggingSilencer,
                        unittest.TestCase):
@@ -14,10 +13,11 @@
     def test_minimal(self):
         # minimal test XXX need more tests
         from distutils.command.bdist_msi import bdist_msi
-        pkg_pth, dist = self.create_dist()
+        project_dir, dist = self.create_dist()
         cmd = bdist_msi(dist)
         cmd.ensure_finalized()
 
+
 def test_suite():
     return unittest.makeSuite(BDistMSITestCase)
 
diff -r 3d0686d90f55 Lib/distutils/tests/test_sdist.py
--- a/Lib/distutils/tests/test_sdist.py
+++ b/Lib/distutils/tests/test_sdist.py
@@ -6,6 +6,7 @@
 import zipfile
 from os.path import join
 from textwrap import dedent
+from test.support import captured_stdout, check_warnings, run_unittest
 
 try:
     import zlib
@@ -13,7 +14,6 @@
 except ImportError:
     ZLIB_SUPPORT = False
 
-from test.support import captured_stdout, check_warnings, run_unittest
 
 from distutils.command.sdist import sdist, show_formats
 from distutils.core import Distribution
@@ -326,6 +326,7 @@
         # filling data_files by pointing files in package_data
         dist.package_data = {'somecode': ['*.txt']}
         self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
+        cmd.formats = ['gztar']
         cmd.ensure_finalized()
         cmd.run()
 
diff -r 3d0686d90f55 Lib/doctest.py
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -2266,7 +2266,8 @@
         return "Doctest: " + self._dt_test.name
 
 class SkipDocTestCase(DocTestCase):
-    def __init__(self):
+    def __init__(self, module):
+        self.module = module
         DocTestCase.__init__(self, None)
 
     def setUp(self):
@@ -2276,7 +2277,10 @@
         pass
 
     def shortDescription(self):
-        return "Skipping tests from %s" % module.__name__
+        return "Skipping tests from %s" % self.module.__name__
+
+    __str__ = shortDescription
+
 
 def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
                  **options):
@@ -2324,7 +2328,7 @@
     if not tests and sys.flags.optimize >=2:
         # Skip doctests when running with -O2
         suite = unittest.TestSuite()
-        suite.addTest(SkipDocTestCase())
+        suite.addTest(SkipDocTestCase(module))
         return suite
     elif not tests:
         # Why do we want to do this? Because it reveals a bug that might
diff -r 3d0686d90f55 Lib/email/__init__.py
--- a/Lib/email/__init__.py
+++ b/Lib/email/__init__.py
@@ -11,6 +11,7 @@
     'charset',
     'encoders',
     'errors',
+    'feedparser',
     'generator',
     'header',
     'iterators',
diff -r 3d0686d90f55 Lib/email/_parseaddr.py
--- a/Lib/email/_parseaddr.py
+++ b/Lib/email/_parseaddr.py
@@ -13,7 +13,7 @@
     'quote',
     ]
 
-import time
+import time, calendar
 
 SPACE = ' '
 EMPTYSTRING = ''
@@ -152,13 +152,13 @@
 
 
 def mktime_tz(data):
-    """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
+    """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp."""
     if data[9] is None:
         # No zone info, so localtime is better assumption than GMT
         return time.mktime(data[:8] + (-1,))
     else:
-        t = time.mktime(data[:8] + (0,))
-        return t - data[9] - time.timezone
+        t = calendar.timegm(data)
+        return t - data[9]
 
 
 def quote(str):
diff -r 3d0686d90f55 Lib/email/feedparser.py
--- a/Lib/email/feedparser.py
+++ b/Lib/email/feedparser.py
@@ -19,7 +19,7 @@
 object's .defects attribute.
 """
 
-__all__ = ['FeedParser']
+__all__ = ['FeedParser', 'BytesFeedParser']
 
 import re
 
diff -r 3d0686d90f55 Lib/email/generator.py
--- a/Lib/email/generator.py
+++ b/Lib/email/generator.py
@@ -4,7 +4,7 @@
 
 """Classes to generate plain text from a message object tree."""
 
-__all__ = ['Generator', 'DecodedGenerator']
+__all__ = ['Generator', 'DecodedGenerator', 'BytesGenerator']
 
 import re
 import sys
@@ -233,7 +233,11 @@
             msg.set_boundary(boundary)
         # If there's a preamble, write it out, with a trailing CRLF
         if msg.preamble is not None:
-            self.write(msg.preamble + self._NL)
+            if self._mangle_from_:
+                preamble = fcre.sub('>From ', msg.preamble)
+            else:
+                preamble = msg.preamble
+            self.write(preamble + self._NL)
         # dash-boundary transport-padding CRLF
         self.write('--' + boundary + self._NL)
         # body-part
@@ -251,7 +255,11 @@
         self.write(self._NL + '--' + boundary + '--')
         if msg.epilogue is not None:
             self.write(self._NL)
-            self.write(msg.epilogue)
+            if self._mangle_from_:
+                epilogue = fcre.sub('>From ', msg.epilogue)
+            else:
+                epilogue = msg.epilogue
+            self.write(epilogue)
 
     def _handle_multipart_signed(self, msg):
         # The contents of signed parts has to stay unmodified in order to keep
@@ -360,7 +368,7 @@
         for h, v in msg._headers:
             self.write('%s: ' % h)
             if isinstance(v, Header):
-                self.write(v.encode(maxlinelen=self._maxheaderlen)+NL)
+                self.write(v.encode(maxlinelen=self._maxheaderlen)+self._NL)
             elif _has_surrogates(v):
                 # If we have raw 8bit data in a byte string, we have no idea
                 # what the encoding is.  There is no safe way to split this
@@ -383,6 +391,8 @@
         if msg._payload is None:
             return
         if _has_surrogates(msg._payload):
+            if self._mangle_from_:
+                msg._payload = fcre.sub(">From ", msg._payload)
             self.write(msg._payload)
         else:
             super(BytesGenerator,self)._handle_text(msg)
diff -r 3d0686d90f55 Lib/email/header.py
--- a/Lib/email/header.py
+++ b/Lib/email/header.py
@@ -283,7 +283,12 @@
         # character set, otherwise an early error is thrown.
         output_charset = charset.output_codec or 'us-ascii'
         if output_charset != _charset.UNKNOWN8BIT:
-            s.encode(output_charset, errors)
+            try:
+                s.encode(output_charset, errors)
+            except UnicodeEncodeError:
+                if output_charset!='us-ascii':
+                    raise
+                charset = UTF8
         self._chunks.append((s, charset))
 
     def encode(self, splitchars=';, \t', maxlinelen=None, linesep='\n'):
diff -r 3d0686d90f55 Lib/email/message.py
--- a/Lib/email/message.py
+++ b/Lib/email/message.py
@@ -613,17 +613,15 @@
         the form (CHARSET, LANGUAGE, VALUE).  Note that both CHARSET and
         LANGUAGE can be None, in which case you should consider VALUE to be
         encoded in the us-ascii charset.  You can usually ignore LANGUAGE.
+        The parameter value (either the returned string, or the VALUE item in
+        the 3-tuple) is always unquoted, unless unquote is set to False.
 
-        Your application should be prepared to deal with 3-tuple return
-        values, and can convert the parameter to a Unicode string like so:
+        If your application doesn't care whether the parameter was RFC 2231
+        encoded, it can turn the return value into a string as follows:
 
             param = msg.get_param('foo')
-            if isinstance(param, tuple):
-                param = unicode(param[2], param[0] or 'us-ascii')
+            param = email.utils.collapse_rfc2231_value(rawparam)
 
-        In any case, the parameter value (either the returned string, or the
-        VALUE item in the 3-tuple) is always unquoted, unless unquote is set
-        to False.
         """
         if header not in self:
             return failobj
diff -r 3d0686d90f55 Lib/email/parser.py
--- a/Lib/email/parser.py
+++ b/Lib/email/parser.py
@@ -4,7 +4,7 @@
 
 """A parser of RFC 2822 and MIME email messages."""
 
-__all__ = ['Parser', 'HeaderParser']
+__all__ = ['Parser', 'HeaderParser', 'BytesParser']
 
 import warnings
 from io import StringIO, TextIOWrapper
diff -r 3d0686d90f55 Lib/email/test/test_email.py
--- a/Lib/email/test/test_email.py
+++ b/Lib/email/test/test_email.py
@@ -20,7 +20,7 @@
 from email.charset import Charset
 from email.header import Header, decode_header, make_header
 from email.parser import Parser, HeaderParser
-from email.generator import Generator, DecodedGenerator
+from email.generator import Generator, DecodedGenerator, BytesGenerator
 from email.message import Message
 from email.mime.application import MIMEApplication
 from email.mime.audio import MIMEAudio
@@ -619,6 +619,19 @@
         msg['Dummy'] = 'dummy\nX-Injected-Header: test'
         self.assertRaises(errors.HeaderParseError, msg.as_string)
 
+    def test_unicode_header_defaults_to_utf8_encoding(self):
+        # Issue 14291
+        m = MIMEText('abc\n')
+        m['Subject'] = 'É test'
+        self.assertEqual(str(m),textwrap.dedent("""\
+            Content-Type: text/plain; charset="us-ascii"
+            MIME-Version: 1.0
+            Content-Transfer-Encoding: 7bit
+            Subject: =?utf-8?q?=C3=89_test?=
+
+            abc
+            """))
+
 # Test the email.encoders module
 class TestEncoders(unittest.TestCase):
 
@@ -1060,9 +1073,13 @@
                          'f\xfcr Offshore-Windkraftprojekte '
                          '<a-very-long-address@example.com>')
         msg['Reply-To'] = header_string
-        self.assertRaises(UnicodeEncodeError, msg.as_string)
+        eq(msg.as_string(maxheaderlen=78), """\
+Reply-To: =?utf-8?q?Britische_Regierung_gibt_gr=C3=BCnes_Licht_f=C3=BCr_Offs?=
+ =?utf-8?q?hore-Windkraftprojekte_=3Ca-very-long-address=40example=2Ecom=3E?=
+
+""")
         msg = Message()
-        msg['Reply-To'] = Header(header_string, 'utf-8',
+        msg['Reply-To'] = Header(header_string,
                                  header_name='Reply-To')
         eq(msg.as_string(maxheaderlen=78), """\
 Reply-To: =?utf-8?q?Britische_Regierung_gibt_gr=C3=BCnes_Licht_f=C3=BCr_Offs?=
@@ -1226,7 +1243,6 @@
              =?utf-8?q?_folding_white_space_works?=""")+'\n')
 
 
-
 # Test mangling of "From " lines in the body of a message
 class TestFromMangling(unittest.TestCase):
     def setUp(self):
@@ -1259,6 +1275,42 @@
 Blah blah blah
 """)
 
+    def test_mangle_from_in_preamble_and_epilog(self):
+        s = StringIO()
+        g = Generator(s, mangle_from_=True)
+        msg = email.message_from_string(textwrap.dedent("""\
+            From: foo@bar.com
+            Mime-Version: 1.0
+            Content-Type: multipart/mixed; boundary=XXX
+
+            From somewhere unknown
+
+            --XXX
+            Content-Type: text/plain
+
+            foo
+
+            --XXX--
+
+            From somewhere unknowable
+            """))
+        g.flatten(msg)
+        self.assertEqual(len([1 for x in s.getvalue().split('\n')
+                                  if x.startswith('>From ')]), 2)
+
+    def test_mangled_from_with_bad_bytes(self):
+        source = textwrap.dedent("""\
+            Content-Type: text/plain; charset="utf-8"
+            MIME-Version: 1.0
+            Content-Transfer-Encoding: 8bit
+            From: aaa@bbb.org
+
+        """).encode('utf-8')
+        msg = email.message_from_bytes(source + b'From R\xc3\xb6lli\n')
+        b = BytesIO()
+        g = BytesGenerator(b, mangle_from_=True)
+        g.flatten(msg)
+        self.assertEqual(b.getvalue(), source + b'>From R\xc3\xb6lli\n')
 
 
 # Test the basic MIMEAudio class
@@ -2502,14 +2554,11 @@
 
     def test__all__(self):
         module = __import__('email')
-        # Can't use sorted() here due to Python 2.3 compatibility
-        all = module.__all__[:]
-        all.sort()
-        self.assertEqual(all, [
-            'base64mime', 'charset', 'encoders', 'errors', 'generator',
-            'header', 'iterators', 'message', 'message_from_binary_file',
-            'message_from_bytes', 'message_from_file',
-            'message_from_string', 'mime', 'parser',
+        self.assertEqual(sorted(module.__all__), [
+            'base64mime', 'charset', 'encoders', 'errors', 'feedparser',
+            'generator', 'header', 'iterators', 'message',
+            'message_from_binary_file', 'message_from_bytes',
+            'message_from_file', 'message_from_string', 'mime', 'parser',
             'quoprimime', 'utils',
             ])
 
@@ -2572,6 +2621,12 @@
         eq(time.localtime(t)[:6], timetup[:6])
         eq(int(time.strftime('%Y', timetup[:9])), 2003)
 
+    def test_mktime_tz(self):
+        self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0,
+                                          -1, -1, -1, 0)), 0)
+        self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0,
+                                          -1, -1, -1, 1234)), -1234)
+
     def test_parsedate_y2k(self):
         """Test for parsing a date with a two-digit year.
 
@@ -3424,6 +3479,30 @@
         g.flatten(msg)
         self.assertEqual(s.getvalue(), source)
 
+    def test_bytes_generator_b_encoding_linesep(self):
+        # Issue 14062: b encoding was tacking on an extra \n.
+        m = Message()
+        # This has enough non-ascii that it should always end up b encoded.
+        m['Subject'] = Header('žluťoučký kůň')
+        s = BytesIO()
+        g = email.generator.BytesGenerator(s)
+        g.flatten(m, linesep='\r\n')
+        self.assertEqual(
+            s.getvalue(),
+            b'Subject: =?utf-8?b?xb5sdcWlb3XEjWvDvSBrxa/FiA==?=\r\n\r\n')
+
+    def test_generator_b_encoding_linesep(self):
+        # Since this broke in ByteGenerator, test Generator for completeness.
+        m = Message()
+        # This has enough non-ascii that it should always end up b encoded.
+        m['Subject'] = Header('žluťoučký kůň')
+        s = StringIO()
+        g = email.generator.Generator(s)
+        g.flatten(m, linesep='\r\n')
+        self.assertEqual(
+            s.getvalue(),
+            'Subject: =?utf-8?b?xb5sdcWlb3XEjWvDvSBrxa/FiA==?=\r\n\r\n')
+
     maxDiff = None
 
 
diff -r 3d0686d90f55 Lib/gzip.py
--- a/Lib/gzip.py
+++ b/Lib/gzip.py
@@ -159,9 +159,8 @@
         if fileobj is None:
             fileobj = self.myfileobj = builtins.open(filename, mode or 'rb')
         if filename is None:
-            if hasattr(fileobj, 'name') and isinstance(fileobj.name, str):
-                filename = fileobj.name
-            else:
+            filename = getattr(fileobj, 'name', '')
+            if not isinstance(filename, (str, bytes)):
                 filename = ''
         if mode is None:
             if hasattr(fileobj, 'mode'): mode = fileobj.mode
@@ -236,7 +235,8 @@
             # RFC 1952 requires the FNAME field to be Latin-1. Do not
             # include filenames that cannot be represented that way.
             fname = os.path.basename(self.name)
-            fname = fname.encode('latin-1')
+            if not isinstance(fname, bytes):
+                fname = fname.encode('latin-1')
             if fname.endswith(b'.gz'):
                 fname = fname[:-3]
         except UnicodeEncodeError:
diff -r 3d0686d90f55 Lib/hashlib.py
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -88,7 +88,7 @@
     except ImportError:
         pass  # no extension module, this hash is unsupported.
 
-    raise ValueError('unsupported hash type %s' % name)
+    raise ValueError('unsupported hash type ' + name)
 
 
 def __get_openssl_constructor(name):
diff -r 3d0686d90f55 Lib/html/parser.py
--- a/Lib/html/parser.py
+++ b/Lib/html/parser.py
@@ -22,7 +22,7 @@
 starttagopen = re.compile('<[a-zA-Z]')
 piclose = re.compile('>')
 commentclose = re.compile(r'--\s*>')
-tagfind = re.compile('[a-zA-Z][-.a-zA-Z0-9:_]*')
+tagfind = re.compile('([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*')
 # see http://www.w3.org/TR/html5/tokenization.html#tag-open-state
 # and http://www.w3.org/TR/html5/tokenization.html#tag-name-state
 tagfind_tolerant = re.compile('[a-zA-Z][^\t\n\r\f />\x00]*')
@@ -36,7 +36,7 @@
     r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*'
     r'(\'[^\']*\'|"[^"]*"|[^\s"\'=<>`]*))?')
 attrfind_tolerant = re.compile(
-    r'[\s/]*((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*'
+    r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*'
     r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*')
 locatestarttagend = re.compile(r"""
   <[a-zA-Z][-.a-zA-Z0-9:_]*          # tag name
@@ -327,7 +327,7 @@
         match = tagfind.match(rawdata, i+1)
         assert match, 'unexpected call to parse_starttag()'
         k = match.end()
-        self.lasttag = tag = rawdata[i+1:k].lower()
+        self.lasttag = tag = match.group(1).lower()
         while k < endpos:
             if self.strict:
                 m = attrfind.match(rawdata, k)
diff -r 3d0686d90f55 Lib/http/client.py
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -563,7 +563,7 @@
                 # a vanishingly small number of sites EOF without
                 # sending the trailer
                 break
-            if line == b"\r\n":
+            if line in (b'\r\n', b'\n', b''):
                 break
 
         # we read everything; close the "file"
@@ -715,7 +715,10 @@
             line = response.fp.readline(_MAXLINE + 1)
             if len(line) > _MAXLINE:
                 raise LineTooLong("header line")
-            if line == b'\r\n':
+            if not line:
+                # for sites which EOF without sending a trailer
+                break
+            if line in (b'\r\n', b'\n', b''):
                 break
 
     def connect(self):
@@ -994,7 +997,7 @@
 
         self.putrequest(method, url, **skips)
 
-        if body and ('content-length' not in header_names):
+        if body is not None and ('content-length' not in header_names):
             self._set_content_length(body)
         for hdr, value in headers.items():
             self.putheader(hdr, value)
diff -r 3d0686d90f55 Lib/http/cookies.py
--- a/Lib/http/cookies.py
+++ b/Lib/http/cookies.py
@@ -301,7 +301,7 @@
     from time import gmtime, time
     now = time()
     year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
-    return "%s, %02d-%3s-%4d %02d:%02d:%02d GMT" % \
+    return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \
            (weekdayname[wd], day, monthname[month], year, hh, mm, ss)
 
 
@@ -439,7 +439,7 @@
     (?P<val>                       # Start of group 'val'
     "(?:[^\\"]|\\.)*"                # Any doublequoted string
     |                                # or
-    \w{3},\s[\w\d-]{9,11}\s[\d:]{8}\sGMT  # Special case for "expires" attr
+    \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT  # Special case for "expires" attr
     |                                # or
     """ + _LegalCharsPatt + r"""*    # Any word or empty string
     )                              # End of group 'val'
diff -r 3d0686d90f55 Lib/http/server.py
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -508,13 +508,13 @@
         specified as subsequent arguments (it's just like
         printf!).
 
-        The client host and current date/time are prefixed to
+        The client ip and current date/time are prefixed to
         every message.
 
         """
 
         sys.stderr.write("%s - - [%s] %s\n" %
-                         (self.address_string(),
+                         (self.client_address[0],
                           self.log_date_time_string(),
                           format%args))
 
@@ -825,44 +825,47 @@
 
 # Utilities for CGIHTTPRequestHandler
 
-# TODO(gregory.p.smith): Move this into an appropriate library.
-def _url_collapse_path_split(path):
+def _url_collapse_path(path):
     """
     Given a URL path, remove extra '/'s and '.' path elements and collapse
-    any '..' references.
+    any '..' references and returns a colllapsed path.
 
     Implements something akin to RFC-2396 5.2 step 6 to parse relative paths.
+    The utility of this function is limited to is_cgi method and helps
+    preventing some security attacks.
 
     Returns: A tuple of (head, tail) where tail is everything after the final /
     and head is everything before it.  Head will always start with a '/' and,
     if it contains anything else, never have a trailing '/'.
 
     Raises: IndexError if too many '..' occur within the path.
+
     """
     # Similar to os.path.split(os.path.normpath(path)) but specific to URL
     # path semantics rather than local operating system semantics.
-    path_parts = []
-    for part in path.split('/'):
-        if part == '.':
-            path_parts.append('')
-        else:
-            path_parts.append(part)
-    # Filter out blank non trailing parts before consuming the '..'.
-    path_parts = [part for part in path_parts[:-1] if part] + path_parts[-1:]
+    path_parts = path.split('/')
+    head_parts = []
+    for part in path_parts[:-1]:
+        if part == '..':
+            head_parts.pop() # IndexError if more '..' than prior parts
+        elif part and part != '.':
+            head_parts.append( part )
     if path_parts:
         tail_part = path_parts.pop()
+        if tail_part:
+            if tail_part == '..':
+                head_parts.pop()
+                tail_part = ''
+            elif tail_part == '.':
+                tail_part = ''
     else:
         tail_part = ''
-    head_parts = []
-    for part in path_parts:
-        if part == '..':
-            head_parts.pop()
-        else:
-            head_parts.append(part)
-    if tail_part and tail_part == '..':
-        head_parts.pop()
-        tail_part = ''
-    return ('/' + '/'.join(head_parts), tail_part)
+
+    splitpath = ('/' + '/'.join(head_parts), tail_part)
+    collapsed_path = "/".join(splitpath)
+
+    return collapsed_path
+
 
 
 nobody = None
@@ -943,13 +946,15 @@
         (and the next character is a '/' or the end of the string).
 
         """
-
-        splitpath = _url_collapse_path_split(self.path)
-        if splitpath[0] in self.cgi_directories:
-            self.cgi_info = splitpath
+        collapsed_path = _url_collapse_path(self.path)
+        dir_sep = collapsed_path.find('/', 1)
+        head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
+        if head in self.cgi_directories:
+            self.cgi_info = head, tail
             return True
         return False
 
+
     cgi_directories = ['/cgi-bin', '/htbin']
 
     def is_executable(self, path):
diff -r 3d0686d90f55 Lib/idlelib/AutoComplete.py
--- a/Lib/idlelib/AutoComplete.py
+++ b/Lib/idlelib/AutoComplete.py
@@ -124,19 +124,26 @@
         curline = self.text.get("insert linestart", "insert")
         i = j = len(curline)
         if hp.is_in_string() and (not mode or mode==COMPLETE_FILES):
+            # Find the beginning of the string
+            # fetch_completions will look at the file system to determine whether the
+            # string value constitutes an actual file name
+            # XXX could consider raw strings here and unescape the string value if it's
+            # not raw.
             self._remove_autocomplete_window()
             mode = COMPLETE_FILES
-            while i and curline[i-1] in FILENAME_CHARS:
+            # Find last separator or string start
+            while i and curline[i-1] not in "'\"" + SEPS:
                 i -= 1
             comp_start = curline[i:j]
             j = i
-            while i and curline[i-1] in FILENAME_CHARS + SEPS:
+            # Find string start
+            while i and curline[i-1] not in "'\"":
                 i -= 1
             comp_what = curline[i:j]
         elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES):
             self._remove_autocomplete_window()
             mode = COMPLETE_ATTRIBUTES
-            while i and curline[i-1] in ID_CHARS:
+            while i and (curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127):
                 i -= 1
             comp_start = curline[i:j]
             if i and curline[i-1] == '.':
diff -r 3d0686d90f55 Lib/idlelib/AutoCompleteWindow.py
--- a/Lib/idlelib/AutoCompleteWindow.py
+++ b/Lib/idlelib/AutoCompleteWindow.py
@@ -354,6 +354,15 @@
             # A modifier key, so ignore
             return
 
+        elif event.char and event.char >= ' ':
+            # Regular character with a non-length-1 keycode
+            self._change_start(self.start + event.char)
+            self.lasttypedstart = self.start
+            self.listbox.select_clear(0, int(self.listbox.curselection()[0]))
+            self.listbox.select_set(self._binary_search(self.start))
+            self._selection_changed()
+            return "break"
+
         else:
             # Unknown event, close the window and let it through.
             self.hide_window()
diff -r 3d0686d90f55 Lib/idlelib/CallTipWindow.py
--- a/Lib/idlelib/CallTipWindow.py
+++ b/Lib/idlelib/CallTipWindow.py
@@ -22,6 +22,7 @@
         self.parenline = self.parencol = None
         self.lastline = None
         self.hideid = self.checkhideid = None
+        self.checkhide_after_id = None
 
     def position_window(self):
         """Check if needs to reposition the window, and if so - do it."""
@@ -102,7 +103,10 @@
             self.hidetip()
         else:
             self.position_window()
-            self.widget.after(CHECKHIDE_TIME, self.checkhide_event)
+            if self.checkhide_after_id is not None:
+                self.widget.after_cancel(self.checkhide_after_id)
+            self.checkhide_after_id = \
+                self.widget.after(CHECKHIDE_TIME, self.checkhide_event)
 
     def hide_event(self, event):
         if not self.tipwindow:
diff -r 3d0686d90f55 Lib/idlelib/CallTips.py
--- a/Lib/idlelib/CallTips.py
+++ b/Lib/idlelib/CallTips.py
@@ -67,18 +67,18 @@
         if not sur_paren:
             return
         hp.set_index(sur_paren[0])
-        name = hp.get_expression()
-        if not name:
+        expression  = hp.get_expression()
+        if not expression:
             return
-        if not evalfuncs and (name.find('(') != -1):
+        if not evalfuncs and (expression.find('(') != -1):
             return
-        argspec = self.fetch_tip(name)
+        argspec = self.fetch_tip(expression)
         if not argspec:
             return
         self.active_calltip = self._calltip_window()
         self.active_calltip.showtip(argspec, sur_paren[0], sur_paren[1])
 
-    def fetch_tip(self, name):
+    def fetch_tip(self, expression):
         """Return the argument list and docstring of a function or class.
 
         If there is a Python subprocess, get the calltip there.  Otherwise,
@@ -94,53 +94,59 @@
         """
         try:
             rpcclt = self.editwin.flist.pyshell.interp.rpcclt
-        except:
+        except AttributeError:
             rpcclt = None
         if rpcclt:
             return rpcclt.remotecall("exec", "get_the_calltip",
-                                     (name,), {})
+                                     (expression,), {})
         else:
-            entity = self.get_entity(name)
-            return get_argspec(entity)
+            return get_argspec(get_entity(expression))
 
-    def get_entity(self, name):
-        "Lookup name in a namespace spanning sys.modules and __main.dict__."
-        if name:
-            namespace = sys.modules.copy()
-            namespace.update(__main__.__dict__)
-            try:
-                return eval(name, namespace)
-            except (NameError, AttributeError):
-                return None
+def get_entity(expression):
+    """Return the object corresponding to expression evaluated
+    in a namespace spanning sys.modules and __main.dict__.
+    """
+    if expression:
+        namespace = sys.modules.copy()
+        namespace.update(__main__.__dict__)
+        try:
+            return eval(expression, namespace)
+        except BaseException:
+            # An uncaught exception closes idle, and eval can raise any
+            # exception, especially if user classes are involved.
+            return None
 
-def _find_constructor(class_ob):
-    "Find the nearest __init__() in the class tree."
-    try:
-        return class_ob.__init__.__func__
-    except AttributeError:
-        for base in class_ob.__bases__:
-            init = _find_constructor(base)
-            if init:
-                return init
-        return None
+# The following are used in both get_argspec and tests
+_first_param = re.compile('(?<=\()\w*\,?\s*')
+_default_callable_argspec = "No docstring, see docs."
 
 def get_argspec(ob):
-    """Get a string describing the arguments for the given object."""
+    '''Return a string describing the arguments and return of a callable object.
+
+    For Python-coded functions and methods, the first line is introspected.
+    Delete 'self' parameter for classes (.__init__) and bound methods.
+    The last line is the first line of the doc string.  For builtins, this typically
+    includes the arguments in addition to the return value.
+
+    '''
     argspec = ""
-    if ob is not None:
+    if hasattr(ob, '__call__'):
         if isinstance(ob, type):
-            fob = _find_constructor(ob)
-            if fob is None:
-                fob = lambda: None
-        elif isinstance(ob, types.MethodType):
-            fob = ob.__func__
+            fob = getattr(ob, '__init__', None)
+        elif isinstance(ob.__call__, types.MethodType):
+            fob = ob.__call__
         else:
             fob = ob
-        if isinstance(fob, (types.FunctionType, types.LambdaType)):
+        if isinstance(fob, (types.FunctionType, types.MethodType)):
             argspec = inspect.formatargspec(*inspect.getfullargspec(fob))
-            pat = re.compile('self\,?\s*')
-            argspec = pat.sub("", argspec)
-        doc = getattr(ob, "__doc__", "")
+            if (isinstance(ob, (type, types.MethodType)) or
+                    isinstance(ob.__call__, types.MethodType)):
+                argspec = _first_param.sub("", argspec)
+
+        if isinstance(ob.__call__, types.MethodType):
+            doc = ob.__call__.__doc__
+        else:
+            doc = getattr(ob, "__doc__", "")
         if doc:
             doc = doc.lstrip()
             pos = doc.find("\n")
@@ -149,55 +155,113 @@
             if argspec:
                 argspec += "\n"
             argspec += doc[:pos]
+        if not argspec:
+            argspec = _default_callable_argspec
     return argspec
 
 #################################################
 #
-# Test code
-#
+# Test code tests CallTips.fetch_tip, get_entity, and get_argspec
+
 def main():
+    # Putting expected in docstrings results in doubled tips for test
     def t1(): "()"
     def t2(a, b=None): "(a, b=None)"
     def t3(a, *args): "(a, *args)"
     def t4(*args): "(*args)"
-    def t5(a, *args): "(a, *args)"
-    def t6(a, b=None, *args, **kw): "(a, b=None, *args, **kw)"
+    def t5(a, b=None, *args, **kw): "(a, b=None, *args, **kw)"
 
     class TC(object):
         "(ai=None, *b)"
-        def __init__(self, ai=None, *b): "(ai=None, *b)"
-        def t1(self): "()"
-        def t2(self, ai, b=None): "(ai, b=None)"
-        def t3(self, ai, *args): "(ai, *args)"
-        def t4(self, *args): "(*args)"
-        def t5(self, ai, *args): "(ai, *args)"
-        def t6(self, ai, b=None, *args, **kw): "(ai, b=None, *args, **kw)"
-
-    __main__.__dict__.update(locals())
-
-    def test(tests):
-        ct = CallTips()
-        failed=[]
-        for t in tests:
-            expected = t.__doc__ + "\n" + t.__doc__
-            name = t.__name__
-            # exercise fetch_tip(), not just get_argspec()
-            try:
-                qualified_name = "%s.%s" % (t.__self__.__class__.__name__, name)
-            except AttributeError:
-                qualified_name = name
-            argspec = ct.fetch_tip(qualified_name)
-            if argspec != expected:
-                failed.append(t)
-                fmt = "%s - expected %s, but got %s"
-                print(fmt % (t.__name__, expected, get_argspec(t)))
-        print("%d of %d tests failed" % (len(failed), len(tests)))
+        def __init__(self, ai=None, *b): "(self, ai=None, *b)"
+        def t1(self): "(self)"
+        def t2(self, ai, b=None): "(self, ai, b=None)"
+        def t3(self, ai, *args): "(self, ai, *args)"
+        def t4(self, *args): "(self, *args)"
+        def t5(self, ai, b=None, *args, **kw): "(self, ai, b=None, *args, **kw)"
+        def t6(no, self): "(no, self)"
+        @classmethod
+        def cm(cls, a): "(cls, a)"
+        @staticmethod
+        def sm(b): "(b)"
+        def __call__(self, ci): "(self, ci)"
 
     tc = TC()
-    tests = (t1, t2, t3, t4, t5, t6,
-             TC, tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6)
 
-    test(tests)
+    # Python classes that inherit builtin methods
+    class Int(int):  "Int(x[, base]) -> integer"
+    class List(list): "List() -> new empty list"
+    # Simulate builtin with no docstring for default argspec test
+    class SB:  __call__ = None
+
+    __main__.__dict__.update(locals())  # required for get_entity eval()
+
+    num_tests = num_fail = 0
+    tip = CallTips().fetch_tip
+
+    def test(expression, expected):
+        nonlocal num_tests, num_fail
+        num_tests += 1
+        argspec = tip(expression)
+        if argspec != expected:
+            num_fail += 1
+            fmt = "%s - expected\n%r\n - but got\n%r"
+            print(fmt % (expression, expected, argspec))
+
+    def test_builtins():
+        # if first line of a possibly multiline compiled docstring changes,
+        # must change corresponding test string
+        test('int',  "int(x[, base]) -> integer")
+        test('Int',  Int.__doc__)
+        test('types.MethodType', "method(function, instance)")
+        test('list', "list() -> new empty list")
+        test('List', List.__doc__)
+        test('list.__new__',
+               'T.__new__(S, ...) -> a new object with type S, a subtype of T')
+        test('list.__init__',
+               'x.__init__(...) initializes x; see help(type(x)) for signature')
+        append_doc =  "L.append(object) -> None -- append object to end"
+        test('list.append', append_doc)
+        test('[].append', append_doc)
+        test('List.append', append_doc)
+        test('SB()', _default_callable_argspec)
+
+    def test_funcs():
+        for func  in (t1, t2, t3, t4, t5, TC,):
+            fdoc = func.__doc__
+            test(func.__name__, fdoc + "\n" + fdoc)
+        for func in (TC.t1, TC.t2, TC.t3, TC.t4, TC.t5, TC.t6, TC.sm,
+                     TC.__call__):
+            fdoc = func.__doc__
+            test('TC.'+func.__name__, fdoc + "\n" + fdoc)
+        fdoc = TC.cm.__func__.__doc__
+        test('TC.cm.__func__', fdoc + "\n" + fdoc)
+
+    def test_methods():
+        # test that first parameter is correctly removed from argspec
+        # using _first_param re to calculate expected masks re errors
+        for meth, mdoc  in ((tc.t1, "()"), (tc.t4, "(*args)"), (tc.t6, "(self)"),
+                            (TC.cm, "(a)"),):
+            test('tc.'+meth.__name__, mdoc + "\n" + meth.__doc__)
+        test('tc', "(ci)" + "\n" + tc.__call__.__doc__)
+        # directly test that re works to delete unicode parameter name
+        uni = "(A\u0391\u0410\u05d0\u0627\u0905\u1e00\u3042, a)"  # various As
+        assert _first_param.sub('', uni) == '(a)'
+
+    def test_non_callables():
+        # expression evaluates, but not to a callable
+        for expr in ('0', '0.0' 'num_tests', b'num_tests', '[]', '{}'):
+            test(expr, '')
+        # expression does not evaluate, but raises an exception
+        for expr in ('1a', 'xyx', 'num_tests.xyz', '[int][1]', '{0:int}[1]'):
+            test(expr, '')
+
+    test_builtins()
+    test_funcs()
+    test_non_callables()
+    test_methods()
+
+    print("%d of %d tests failed" % (num_fail, num_tests))
 
 if __name__ == '__main__':
     main()
diff -r 3d0686d90f55 Lib/idlelib/EditorWindow.py
--- a/Lib/idlelib/EditorWindow.py
+++ b/Lib/idlelib/EditorWindow.py
@@ -860,7 +860,7 @@
         # for each edit window instance, construct the recent files menu
         for instance in self.top.instance_dict:
             menu = instance.recent_files_menu
-            menu.delete(1, END)  # clear, and rebuild:
+            menu.delete(0, END)  # clear, and rebuild:
             for i, file_name in enumerate(rf_list):
                 file_name = file_name.rstrip()  # zap \n
                 # make unicode string to display non-ASCII chars correctly
diff -r 3d0686d90f55 Lib/idlelib/GrepDialog.py
--- a/Lib/idlelib/GrepDialog.py
+++ b/Lib/idlelib/GrepDialog.py
@@ -81,7 +81,7 @@
         hits = 0
         for fn in list:
             try:
-                f = open(fn)
+                f = open(fn, errors='replace')
             except IOError as msg:
                 print(msg)
                 continue
diff -r 3d0686d90f55 Lib/idlelib/IOBinding.py
--- a/Lib/idlelib/IOBinding.py
+++ b/Lib/idlelib/IOBinding.py
@@ -156,29 +156,33 @@
                 self.filename_change_hook()
 
     def open(self, event=None, editFile=None):
-        if self.editwin.flist:
+        flist = self.editwin.flist
+        # Save in case parent window is closed (ie, during askopenfile()).
+        if flist:
             if not editFile:
                 filename = self.askopenfile()
             else:
                 filename=editFile
             if filename:
-                # If the current window has no filename and hasn't been
-                # modified, we replace its contents (no loss).  Otherwise
-                # we open a new window.  But we won't replace the
-                # shell window (which has an interp(reter) attribute), which
-                # gets set to "not modified" at every new prompt.
-                try:
-                    interp = self.editwin.interp
-                except AttributeError:
-                    interp = None
-                if not self.filename and self.get_saved() and not interp:
-                    self.editwin.flist.open(filename, self.loadfile)
+                # If editFile is valid and already open, flist.open will
+                # shift focus to its existing window.
+                # If the current window exists and is a fresh unnamed,
+                # unmodified editor window (not an interpreter shell),
+                # pass self.loadfile to flist.open so it will load the file
+                # in the current window (if the file is not already open)
+                # instead of a new window.
+                if (self.editwin and
+                        not getattr(self.editwin, 'interp', None) and
+                        not self.filename and
+                        self.get_saved()):
+                    flist.open(filename, self.loadfile)
                 else:
-                    self.editwin.flist.open(filename)
+                    flist.open(filename)
             else:
-                self.text.focus_set()
+                if self.text:
+                    self.text.focus_set()
             return "break"
-        #
+
         # Code for use outside IDLE:
         if self.get_saved():
             reply = self.maybesave()
diff -r 3d0686d90f55 Lib/idlelib/NEWS.txt
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,3 +1,39 @@
+What's New in IDLE 3.2.4?
+=========================
+
+- Issue #7163: Propagate return value of sys.stdout.write.
+
+- Issue #15318: Prevent writing to sys.stdin.
+
+- Issue #13532, #15319: Check that arguments to sys.stdout.write are strings.
+
+- Issue # 12510: Attempt to get certain tool tips no longer crashes IDLE.
+  Erroneous tool tips have been corrected. Default added for callables.
+
+- Issue10365: File open dialog now works instead of crashing even when
+  parent window is closed while dialog is open.
+
+- Issue 14876: use user-selected font for highlight configuration.
+
+- Issue #14937: Perform auto-completion of filenames in strings even for
+  non-ASCII filenames. Likewise for identifiers.
+
+- Issue #14018: Update checks for unstable system Tcl/Tk versions on OS X
+  to include versions shipped with OS X 10.7 and 10.8 in addition to 10.6.
+
+
+What's New in IDLE 3.2.3?
+=========================
+
+- Issue #14409: IDLE now properly executes commands in the Shell window
+  when it cannot read the normal config files on startup and
+  has to use the built-in default key bindings.
+  There was previously a bug in one of the defaults.
+
+- Issue #3573: IDLE hangs when passing invalid command line args
+  (directory(ies) instead of file(s)).
+
+
 What's New in IDLE 3.2.1?
 =========================
 
diff -r 3d0686d90f55 Lib/idlelib/OutputWindow.py
--- a/Lib/idlelib/OutputWindow.py
+++ b/Lib/idlelib/OutputWindow.py
@@ -40,6 +40,7 @@
         self.text.insert(mark, s, tags)
         self.text.see(mark)
         self.text.update()
+        return len(s)
 
     def writelines(self, lines):
         for line in lines:
diff -r 3d0686d90f55 Lib/idlelib/PyShell.py
--- a/Lib/idlelib/PyShell.py
+++ b/Lib/idlelib/PyShell.py
@@ -12,6 +12,7 @@
 import tokenize
 import traceback
 import types
+import io
 
 import linecache
 from code import InteractiveInterpreter
@@ -247,8 +248,8 @@
     def ranges_to_linenumbers(self, ranges):
         lines = []
         for index in range(0, len(ranges), 2):
-            lineno = int(float(ranges[index]))
-            end = int(float(ranges[index+1]))
+            lineno = int(float(ranges[index].string))
+            end = int(float(ranges[index+1].string))
             while lineno < end:
                 lines.append(lineno)
                 lineno += 1
@@ -309,6 +310,11 @@
             "console": idleConf.GetHighlight(theme, "console"),
         })
 
+    def removecolors(self):
+        # Don't remove shell color tags before "iomark"
+        for tag in self.tagdefs:
+            self.tag_remove(tag, "iomark", "end")
+
 class ModifiedUndoDelegator(UndoDelegator):
     "Extend base class: forbid insert/delete before the I/O mark"
 
@@ -405,6 +411,9 @@
         except socket.timeout as err:
             self.display_no_subprocess_error()
             return None
+        # Can't regiter self.tkconsole.stdin, since run.py wants to
+        # call non-TextIO methods on it (such as getvar)
+        # XXX should be renamed to "console"
         self.rpcclt.register("stdin", self.tkconsole)
         self.rpcclt.register("stdout", self.tkconsole.stdout)
         self.rpcclt.register("stderr", self.tkconsole.stderr)
@@ -751,7 +760,7 @@
 
     def write(self, s):
         "Override base class method"
-        self.tkconsole.stderr.write(s)
+        return self.tkconsole.stderr.write(s)
 
     def display_port_binding_error(self):
         tkMessageBox.showerror(
@@ -845,13 +854,14 @@
         self.save_stderr = sys.stderr
         self.save_stdin = sys.stdin
         from idlelib import IOBinding
+        self.stdin = PseudoInputFile(self)
         self.stdout = PseudoFile(self, "stdout", IOBinding.encoding)
         self.stderr = PseudoFile(self, "stderr", IOBinding.encoding)
         self.console = PseudoFile(self, "console", IOBinding.encoding)
         if not use_subprocess:
             sys.stdout = self.stdout
             sys.stderr = self.stderr
-            sys.stdin = self
+            sys.stdin = self.stdin
         try:
             # page help() text to shell.
             import pydoc # import must be done here to capture i/o rebinding.
@@ -1219,7 +1229,7 @@
     def write(self, s, tags=()):
         try:
             self.text.mark_gravity("iomark", "right")
-            OutputWindow.write(self, s, tags, "iomark")
+            count = OutputWindow.write(self, s, tags, "iomark")
             self.text.mark_gravity("iomark", "left")
         except:
             raise ###pass  # ### 11Aug07 KBK if we are expecting exceptions
@@ -1228,6 +1238,7 @@
             self.canceled = 0
             if not use_subprocess:
                 raise KeyboardInterrupt
+        return count
 
 class PseudoFile(object):
 
@@ -1237,7 +1248,9 @@
         self.encoding = encoding
 
     def write(self, s):
-        self.shell.write(s, self.tags)
+        if not isinstance(s, str):
+            raise TypeError('must be str, not ' + type(s).__name__)
+        return self.shell.write(s, self.tags)
 
     def writelines(self, lines):
         for line in lines:
@@ -1249,6 +1262,15 @@
     def isatty(self):
         return True
 
+class PseudoInputFile(object):
+    def __init__(self, shell):
+        self.readline = shell.readline
+        self.isatty = shell.isatty
+
+    def write(self, s):
+        raise io.UnsupportedOperation("not writable")
+    writelines = write
+
 
 usage_msg = """\
 
@@ -1389,8 +1411,10 @@
 
     if enable_edit:
         if not (cmd or script):
-            for filename in args:
-                flist.open(filename)
+            for filename in args[:]:
+                if flist.open(filename) is None:
+                    # filename is a directory actually, disconsider it
+                    args.remove(filename)
             if not args:
                 flist.new()
     if enable_shell:
@@ -1433,7 +1457,8 @@
     if tkversionwarning:
         shell.interp.runcommand(''.join(("print('", tkversionwarning, "')")))
 
-    root.mainloop()
+    while flist.inversedict:  # keep IDLE running while files are open.
+        root.mainloop()
     root.destroy()
 
 if __name__ == "__main__":
diff -r 3d0686d90f55 Lib/idlelib/ReplaceDialog.py
--- a/Lib/idlelib/ReplaceDialog.py
+++ b/Lib/idlelib/ReplaceDialog.py
@@ -2,6 +2,8 @@
 
 from idlelib import SearchEngine
 from idlelib.SearchDialogBase import SearchDialogBase
+import re
+
 
 def replace(text):
     root = text._root()
@@ -11,6 +13,7 @@
     dialog = engine._replacedialog
     dialog.open(text)
 
+
 class ReplaceDialog(SearchDialogBase):
 
     title = "Replace Dialog"
@@ -55,8 +58,23 @@
 
     def default_command(self, event=None):
         if self.do_find(self.ok):
-            self.do_replace()
-            self.do_find(0)
+            if self.do_replace():   # Only find next match if replace succeeded.
+                                    # A bad re can cause a it to fail.
+                self.do_find(0)
+
+    def _replace_expand(self, m, repl):
+        """ Helper function for expanding a regular expression
+            in the replace field, if needed. """
+        if self.engine.isre():
+            try:
+                new = m.expand(repl)
+            except re.error:
+                self.engine.report_error(repl, 'Invalid Replace Expression')
+                new = None
+        else:
+            new = repl
+
+        return new
 
     def replace_all(self, event=None):
         prog = self.engine.getprog()
@@ -86,7 +104,9 @@
             line, m = res
             chars = text.get("%d.0" % line, "%d.0" % (line+1))
             orig = m.group()
-            new = m.expand(repl)
+            new = self._replace_expand(m, repl)
+            if new is None:
+                break
             i, j = m.span()
             first = "%d.%d" % (line, i)
             last = "%d.%d" % (line, j)
@@ -103,7 +123,6 @@
         text.undo_block_stop()
         if first and last:
             self.show_hit(first, last)
-        self.close()
 
     def do_find(self, ok=0):
         if not self.engine.getprog():
@@ -138,7 +157,9 @@
         m = prog.match(chars, col)
         if not prog:
             return False
-        new = m.expand(self.replvar.get())
+        new = self._replace_expand(m, self.replvar.get())
+        if new is None:
+            return False
         text.mark_set("insert", first)
         text.undo_block_start()
         if m.group():
diff -r 3d0686d90f55 Lib/idlelib/configDialog.py
--- a/Lib/idlelib/configDialog.py
+++ b/Lib/idlelib/configDialog.py
@@ -187,7 +187,7 @@
                               text=' Highlighting Theme ')
         #frameCustom
         self.textHighlightSample=Text(frameCustom,relief=SOLID,borderwidth=1,
-            font=('courier',12,''),cursor='hand2',width=21,height=10,
+            font=('courier',12,''),cursor='hand2',width=21,height=11,
             takefocus=FALSE,highlightthickness=0,wrap=NONE)
         text=self.textHighlightSample
         text.bind('<Double-Button-1>',lambda e: 'break')
@@ -821,8 +821,10 @@
             fontWeight=tkFont.BOLD
         else:
             fontWeight=tkFont.NORMAL
-        self.editFont.config(size=self.fontSize.get(),
+        size=self.fontSize.get()
+        self.editFont.config(size=size,
                 weight=fontWeight,family=fontName)
+        self.textHighlightSample.configure(font=(fontName, size, fontWeight))
 
     def SetHighlightTarget(self):
         if self.highlightTarget.get()=='Cursor': #bg not possible
diff -r 3d0686d90f55 Lib/idlelib/configHandler.py
--- a/Lib/idlelib/configHandler.py
+++ b/Lib/idlelib/configHandler.py
@@ -595,7 +595,7 @@
             '<<replace>>': ['<Control-h>'],
             '<<goto-line>>': ['<Alt-g>'],
             '<<smart-backspace>>': ['<Key-BackSpace>'],
-            '<<newline-and-indent>>': ['<Key-Return> <Key-KP_Enter>'],
+            '<<newline-and-indent>>': ['<Key-Return>', '<Key-KP_Enter>'],
             '<<smart-indent>>': ['<Key-Tab>'],
             '<<indent-region>>': ['<Control-Key-bracketright>'],
             '<<dedent-region>>': ['<Control-Key-bracketleft>'],
diff -r 3d0686d90f55 Lib/idlelib/macosxSupport.py
--- a/Lib/idlelib/macosxSupport.py
+++ b/Lib/idlelib/macosxSupport.py
@@ -37,17 +37,21 @@
 def tkVersionWarning(root):
     """
     Returns a string warning message if the Tk version in use appears to
-    be one known to cause problems with IDLE.  The Apple Cocoa-based Tk 8.5
-    that was shipped with Mac OS X 10.6.
+    be one known to cause problems with IDLE.
+    1. Apple Cocoa-based Tk 8.5.7 shipped with Mac OS X 10.6 is unusable.
+    2. Apple Cocoa-based Tk 8.5.9 in OS X 10.7 and 10.8 is better but
+        can still crash unexpectedly.
     """
 
     if (runningAsOSXApp() and
-            ('AppKit' in root.tk.call('winfo', 'server', '.')) and
-            (root.tk.call('info', 'patchlevel') == '8.5.7') ):
-        return (r"WARNING: The version of Tcl/Tk (8.5.7) in use may"
+            ('AppKit' in root.tk.call('winfo', 'server', '.')) ):
+        patchlevel = root.tk.call('info', 'patchlevel')
+        if patchlevel not in ('8.5.7', '8.5.9'):
+            return False
+        return (r"WARNING: The version of Tcl/Tk ({0}) in use may"
                 r" be unstable.\n"
                 r"Visit http://www.python.org/download/mac/tcltk/"
-                r" for current information.")
+                r" for current information.".format(patchlevel))
     else:
         return False
 
diff -r 3d0686d90f55 Lib/idlelib/run.py
--- a/Lib/idlelib/run.py
+++ b/Lib/idlelib/run.py
@@ -1,4 +1,5 @@
 import sys
+import io
 import linecache
 import time
 import socket
@@ -244,6 +245,45 @@
             quitting = True
             thread.interrupt_main()
 
+class _RPCFile(io.TextIOBase):
+    """Wrapper class for the RPC proxy to typecheck arguments
+    that may not support pickling. The base class is there only
+    to support type tests; all implementations come from the remote
+    object."""
+
+    def __init__(self, rpc):
+        super.__setattr__(self, 'rpc', rpc)
+
+    def __getattribute__(self, name):
+        # When accessing the 'rpc' attribute, or 'write', use ours
+        if name in ('rpc', 'write', 'writelines'):
+            return io.TextIOBase.__getattribute__(self, name)
+        # Else only look into the remote object only
+        return getattr(self.rpc, name)
+
+    def __setattr__(self, name, value):
+        return setattr(self.rpc, name, value)
+
+    @staticmethod
+    def _ensure_string(func):
+        def f(self, s):
+            if not isinstance(s, str):
+                raise TypeError('must be str, not ' + type(s).__name__)
+            return func(self, s)
+        return f
+
+class _RPCOutputFile(_RPCFile):
+    @_RPCFile._ensure_string
+    def write(self, s):
+        if not isinstance(s, str):
+            raise TypeError('must be str, not ' + type(s).__name__)
+        return self.rpc.write(s)
+
+class _RPCInputFile(_RPCFile):
+    @_RPCFile._ensure_string
+    def write(self, s):
+        raise io.UnsupportedOperation("not writable")
+    writelines = write
 
 class MyHandler(rpc.RPCHandler):
 
@@ -251,9 +291,10 @@
         """Override base method"""
         executive = Executive(self)
         self.register("exec", executive)
-        sys.stdin = self.console = self.get_remote_proxy("stdin")
-        sys.stdout = self.get_remote_proxy("stdout")
-        sys.stderr = self.get_remote_proxy("stderr")
+        self.console = self.get_remote_proxy("stdin")
+        sys.stdin = _RPCInputFile(self.console)
+        sys.stdout = _RPCOutputFile(self.get_remote_proxy("stdout"))
+        sys.stderr = _RPCOutputFile(self.get_remote_proxy("stderr"))
         # page help() text to shell.
         import pydoc # import must be done here to capture i/o binding
         pydoc.pager = pydoc.plainpager
diff -r 3d0686d90f55 Lib/idlelib/tabbedpages.py
--- a/Lib/idlelib/tabbedpages.py
+++ b/Lib/idlelib/tabbedpages.py
@@ -78,7 +78,7 @@
     def remove_tab(self, tab_name):
         """Remove the tab named <tab_name>"""
         if not tab_name in self._tab_names:
-            raise KeyError("No such Tab: '%s" % page_name)
+            raise KeyError("No such Tab: '%s" % tab_name)
 
         self._tab_names.remove(tab_name)
         self._arrange_tabs()
@@ -88,7 +88,7 @@
         if tab_name == self._selected_tab:
             return
         if tab_name is not None and tab_name not in self._tabs:
-            raise KeyError("No such Tab: '%s" % page_name)
+            raise KeyError("No such Tab: '%s" % tab_name)
 
         # deselect the current selected tab
         if self._selected_tab is not None:
diff -r 3d0686d90f55 Lib/imaplib.py
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -22,7 +22,7 @@
 
 __version__ = "2.58"
 
-import binascii, errno, random, re, socket, subprocess, sys, time
+import binascii, errno, random, re, socket, subprocess, sys, time, calendar
 
 try:
     import ssl
@@ -1340,19 +1340,9 @@
         zone = -zone
 
     tt = (year, mon, day, hour, min, sec, -1, -1, -1)
+    utc = calendar.timegm(tt) - zone
 
-    utc = time.mktime(tt)
-
-    # Following is necessary because the time module has no 'mkgmtime'.
-    # 'mktime' assumes arg in local timezone, so adds timezone/altzone.
-
-    lt = time.localtime(utc)
-    if time.daylight and lt[-1]:
-        zone = zone + time.altzone
-    else:
-        zone = zone + time.timezone
-
-    return time.localtime(utc - zone)
+    return time.localtime(utc)
 
 
 
diff -r 3d0686d90f55 Lib/io.py
--- a/Lib/io.py
+++ b/Lib/io.py
@@ -34,15 +34,6 @@
 """
 # New I/O library conforming to PEP 3116.
 
-# XXX edge cases when switching between reading/writing
-# XXX need to support 1 meaning line-buffered
-# XXX whenever an argument is None, use the default value
-# XXX read/write ops should check readable/writable
-# XXX buffered readinto should work with arbitrary buffer objects
-# XXX use incremental encoder for text output, at least for UTF-16 and UTF-8-SIG
-# XXX check writable, readable and seekable in appropriate places
-
-
 __author__ = ("Guido van Rossum <guido@python.org>, "
               "Mike Verdone <mike.verdone@gmail.com>, "
               "Mark Russell <mark.russell@zen.co.uk>, "
diff -r 3d0686d90f55 Lib/json/__init__.py
--- a/Lib/json/__init__.py
+++ b/Lib/json/__init__.py
@@ -97,7 +97,7 @@
         "json": "obj"
     }
     $ echo '{ 1.2:3.4}' | python -m json.tool
-    Expecting property name: line 1 column 2 (char 2)
+    Expecting property name enclosed in double quotes: line 1 column 2 (char 2)
 """
 __version__ = '2.0.9'
 __all__ = [
diff -r 3d0686d90f55 Lib/json/decoder.py
--- a/Lib/json/decoder.py
+++ b/Lib/json/decoder.py
@@ -173,7 +173,8 @@
                 pairs = object_hook(pairs)
             return pairs, end + 1
         elif nextchar != '"':
-            raise ValueError(errmsg("Expecting property name", s, end))
+            raise ValueError(errmsg(
+                "Expecting property name enclosed in double quotes", s, end))
     end += 1
     while True:
         key, end = scanstring(s, end, strict)
@@ -183,7 +184,7 @@
         if s[end:end + 1] != ':':
             end = _w(s, end).end()
             if s[end:end + 1] != ':':
-                raise ValueError(errmsg("Expecting : delimiter", s, end))
+                raise ValueError(errmsg("Expecting ':' delimiter", s, end))
         end += 1
 
         try:
@@ -211,12 +212,13 @@
         if nextchar == '}':
             break
         elif nextchar != ',':
-            raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+            raise ValueError(errmsg("Expecting ',' delimiter", s, end - 1))
         end = _w(s, end).end()
         nextchar = s[end:end + 1]
         end += 1
         if nextchar != '"':
-            raise ValueError(errmsg("Expecting property name", s, end - 1))
+            raise ValueError(errmsg(
+                "Expecting property name enclosed in double quotes", s, end - 1))
     if object_pairs_hook is not None:
         result = object_pairs_hook(pairs)
         return result, end
@@ -250,7 +252,7 @@
         if nextchar == ']':
             break
         elif nextchar != ',':
-            raise ValueError(errmsg("Expecting , delimiter", s, end))
+            raise ValueError(errmsg("Expecting ',' delimiter", s, end))
         try:
             if s[end] in _ws:
                 end += 1
diff -r 3d0686d90f55 Lib/json/encoder.py
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -27,8 +27,7 @@
     ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
     #ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
 
-# Assume this produces an infinity on all machines (probably not guaranteed)
-INFINITY = float('1e66666')
+INFINITY = float('inf')
 FLOAT_REPR = repr
 
 def encode_basestring(s):
diff -r 3d0686d90f55 Lib/json/tool.py
--- a/Lib/json/tool.py
+++ b/Lib/json/tool.py
@@ -7,7 +7,7 @@
         "json": "obj"
     }
     $ echo '{ 1.2:3.4}' | python -m json.tool
-    Expecting property name: line 1 column 2 (char 2)
+    Expecting property name enclosed in double quotes: line 1 column 2 (char 2)
 
 """
 import sys
diff -r 3d0686d90f55 Lib/lib2to3/tests/test_parser.py
--- a/Lib/lib2to3/tests/test_parser.py
+++ b/Lib/lib2to3/tests/test_parser.py
@@ -11,10 +11,14 @@
 # Testing imports
 from . import support
 from .support import driver, test_dir
+from test.support import verbose
 
 # Python imports
 import os
+import sys
 import unittest
+import warnings
+import subprocess
 
 # Local imports
 from lib2to3.pgen2 import tokenize
@@ -171,10 +175,12 @@
             try:
                 tree = driver.parse_string(source)
             except ParseError as err:
-                print('ParseError on file', filepath, err)
+                if verbose > 0:
+                    warnings.warn('ParseError on file %s (%s)' % (filepath, err))
                 continue
             new = str(tree)
-            if diff(filepath, new):
+            x = diff(filepath, new)
+            if x:
                 self.fail("Idempotency failed: %s" % filepath)
 
     def test_extended_unpacking(self):
@@ -183,6 +189,7 @@
         driver.parse_string("(z, *y, w) = m\n")
         driver.parse_string("for *z, m in d: pass\n")
 
+
 class TestLiterals(GrammarTest):
 
     def validate(self, s):
@@ -221,7 +228,7 @@
         with open('@', 'w') as f:
             f.write(str(result))
         fn = fn.replace('"', '\\"')
-        return os.system('diff -u "%s" @' % fn)
+        return subprocess.call(['diff', '-u', fn, '@'], stdout=(subprocess.DEVNULL if verbose < 1 else None))
     finally:
         try:
             os.remove("@")
diff -r 3d0686d90f55 Lib/logging/__init__.py
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved.
 #
 # Permission to use, copy, modify, and distribute this software and its
 # documentation for any purpose and without fee is hereby granted,
@@ -16,9 +16,9 @@
 
 """
 Logging package for Python. Based on PEP 282 and comments thereto in
-comp.lang.python, and influenced by Apache's log4j system.
+comp.lang.python.
 
-Copyright (C) 2001-2011 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved.
 
 To use, simply 'import logging' and log away!
 """
@@ -917,8 +917,12 @@
         """
         Flushes the stream.
         """
-        if self.stream and hasattr(self.stream, "flush"):
-            self.stream.flush()
+        self.acquire()
+        try:
+            if self.stream and hasattr(self.stream, "flush"):
+                self.stream.flush()
+        finally:
+            self.release()
 
     def emit(self, record):
         """
@@ -969,12 +973,16 @@
         """
         Closes the stream.
         """
-        if self.stream:
-            self.flush()
-            if hasattr(self.stream, "close"):
-                self.stream.close()
-            StreamHandler.close(self)
-            self.stream = None
+        self.acquire()
+        try:
+            if self.stream:
+                self.flush()
+                if hasattr(self.stream, "close"):
+                    self.stream.close()
+                StreamHandler.close(self)
+                self.stream = None
+        finally:
+            self.release()
 
     def _open(self):
         """
diff -r 3d0686d90f55 Lib/logging/handlers.py
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2010 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved.
 #
 # Permission to use, copy, modify, and distribute this software and its
 # documentation for any purpose and without fee is hereby granted,
@@ -16,15 +16,14 @@
 
 """
 Additional handlers for the logging package for Python. The core package is
-based on PEP 282 and comments thereto in comp.lang.python, and influenced by
-Apache's log4j system.
+based on PEP 282 and comments thereto in comp.lang.python.
 
-Copyright (C) 2001-2010 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved.
 
 To use, simply 'import logging.handlers' and log away!
 """
 
-import logging, socket, os, pickle, struct, time, re
+import errno, logging, socket, os, pickle, struct, time, re
 from stat import ST_DEV, ST_INO, ST_MTIME
 import queue
 try:
@@ -271,9 +270,10 @@
                         dstAtRollover = time.localtime(newRolloverAt)[-1]
                         if dstNow != dstAtRollover:
                             if not dstNow:  # DST kicks in before next rollover, so we need to deduct an hour
-                                newRolloverAt = newRolloverAt - 3600
+                                addend = -3600
                             else:           # DST bows out before next rollover, so we need to add an hour
-                                newRolloverAt = newRolloverAt + 3600
+                                addend = 3600
+                            newRolloverAt += addend
                     result = newRolloverAt
         return result
 
@@ -324,11 +324,20 @@
             self.stream.close()
             self.stream = None
         # get the time that this sequence started at and make it a TimeTuple
+        currentTime = int(time.time())
+        dstNow = time.localtime(currentTime)[-1]
         t = self.rolloverAt - self.interval
         if self.utc:
             timeTuple = time.gmtime(t)
         else:
             timeTuple = time.localtime(t)
+            dstThen = timeTuple[-1]
+            if dstNow != dstThen:
+                if dstNow:
+                    addend = 3600
+                else:
+                    addend = -3600
+                timeTuple = time.localtime(t + addend)
         dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
         if os.path.exists(dfn):
             os.remove(dfn)
@@ -338,19 +347,18 @@
                 os.remove(s)
         self.mode = 'w'
         self.stream = self._open()
-        currentTime = int(time.time())
         newRolloverAt = self.computeRollover(currentTime)
         while newRolloverAt <= currentTime:
             newRolloverAt = newRolloverAt + self.interval
         #If DST changes and midnight or weekly rollover, adjust for this.
         if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc:
-            dstNow = time.localtime(currentTime)[-1]
             dstAtRollover = time.localtime(newRolloverAt)[-1]
             if dstNow != dstAtRollover:
                 if not dstNow:  # DST kicks in before next rollover, so we need to deduct an hour
-                    newRolloverAt = newRolloverAt - 3600
+                    addend = -3600
                 else:           # DST bows out before next rollover, so we need to add an hour
-                    newRolloverAt = newRolloverAt + 3600
+                    addend = 3600
+                newRolloverAt += addend
         self.rolloverAt = newRolloverAt
 
 class WatchedFileHandler(logging.FileHandler):
@@ -375,11 +383,13 @@
     """
     def __init__(self, filename, mode='a', encoding=None, delay=0):
         logging.FileHandler.__init__(self, filename, mode, encoding, delay)
-        if not os.path.exists(self.baseFilename):
-            self.dev, self.ino = -1, -1
-        else:
-            stat = os.stat(self.baseFilename)
-            self.dev, self.ino = stat[ST_DEV], stat[ST_INO]
+        self.dev, self.ino = -1, -1
+        self._statstream()
+
+    def _statstream(self):
+        if self.stream:
+            sres = os.fstat(self.stream.fileno())
+            self.dev, self.ino = sres[ST_DEV], sres[ST_INO]
 
     def emit(self, record):
         """
@@ -389,21 +399,30 @@
         has, close the old stream and reopen the file to get the
         current stream.
         """
-        if not os.path.exists(self.baseFilename):
-            stat = None
-            changed = 1
-        else:
-            stat = os.stat(self.baseFilename)
-            changed = (stat[ST_DEV] != self.dev) or (stat[ST_INO] != self.ino)
-        if changed and self.stream is not None:
-            self.stream.flush()
-            self.stream.close()
-            self.stream = self._open()
-            if stat is None:
-                stat = os.stat(self.baseFilename)
-            self.dev, self.ino = stat[ST_DEV], stat[ST_INO]
+        # Reduce the chance of race conditions by stat'ing by path only
+        # once and then fstat'ing our new fd if we opened a new log stream.
+        # See issue #14632: Thanks to John Mulligan for the problem report
+        # and patch.
+        try:
+            # stat the file by path, checking for existence
+            sres = os.stat(self.baseFilename)
+        except OSError as err:
+            if err.errno == errno.ENOENT:
+                sres = None
+            else:
+                raise
+        # compare file system stat with that of our stream file handle
+        if not sres or sres[ST_DEV] != self.dev or sres[ST_INO] != self.ino:
+            if self.stream is not None:
+                # we have an open file handle, clean it up
+                self.stream.flush()
+                self.stream.close()
+                # open a new file handle and get new stat info from that fd
+                self.stream = self._open()
+                self._statstream()
         logging.FileHandler.emit(self, record)
 
+
 class SocketHandler(logging.Handler):
     """
     A handler class which writes logging records, in pickle format, to
@@ -511,11 +530,16 @@
         """
         ei = record.exc_info
         if ei:
-            dummy = self.format(record) # just to get traceback text into record.exc_text
-            record.exc_info = None  # to avoid Unpickleable error
-        s = pickle.dumps(record.__dict__, 1)
-        if ei:
-            record.exc_info = ei  # for next handler
+            # just to get traceback text into record.exc_text ...
+            dummy = self.format(record)
+        # See issue #14436: If msg or args are objects, they may not be
+        # available on the receiving end. So we convert the msg % args
+        # to a string, save it as msg and zap the args.
+        d = dict(record.__dict__)
+        d['msg'] = record.getMessage()
+        d['args'] = None
+        d['exc_info'] = None
+        s = pickle.dumps(d, 1)
         slen = struct.pack(">L", len(s))
         return slen + s
 
@@ -554,10 +578,14 @@
         """
         Closes the socket.
         """
-        if self.sock:
-            self.sock.close()
-            self.sock = None
-        logging.Handler.close(self)
+        self.acquire()
+        try:
+            if self.sock:
+                self.sock.close()
+                self.sock = None
+            logging.Handler.close(self)
+        finally:
+            self.release()
 
 class DatagramHandler(SocketHandler):
     """
@@ -733,7 +761,11 @@
         except socket.error:
             self.socket.close()
             self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            self.socket.connect(address)
+            try:
+                self.socket.connect(address)
+            except socket.error:
+                self.socket.close()
+                raise
 
     def encodePriority(self, facility, priority):
         """
@@ -752,9 +784,13 @@
         """
         Closes the socket.
         """
-        if self.unixsocket:
-            self.socket.close()
-        logging.Handler.close(self)
+        self.acquire()
+        try:
+            if self.unixsocket:
+                self.socket.close()
+            logging.Handler.close(self)
+        finally:
+            self.release()
 
     def mapPriority(self, levelName):
         """
@@ -787,8 +823,6 @@
         prio = prio.encode('utf-8')
         # Message is a string. Convert to bytes as required by RFC 5424
         msg = msg.encode('utf-8')
-        if codecs:
-            msg = codecs.BOM_UTF8 + msg
         msg = prio + msg
         try:
             if self.unixsocket:
@@ -841,6 +875,7 @@
         self.toaddrs = toaddrs
         self.subject = subject
         self.secure = secure
+        self._timeout = 5.0
 
     def getSubject(self, record):
         """
@@ -863,7 +898,7 @@
             port = self.mailport
             if not port:
                 port = smtplib.SMTP_PORT
-            smtp = smtplib.SMTP(self.mailhost, port)
+            smtp = smtplib.SMTP(self.mailhost, port, timeout=self._timeout)
             msg = self.format(record)
             msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
                             self.fromaddr,
@@ -1095,7 +1130,11 @@
 
         This version just zaps the buffer to empty.
         """
-        self.buffer = []
+        self.acquire()
+        try:
+            self.buffer = []
+        finally:
+            self.release()
 
     def close(self):
         """
@@ -1145,18 +1184,26 @@
 
         The record buffer is also cleared by this operation.
         """
-        if self.target:
-            for record in self.buffer:
-                self.target.handle(record)
-            self.buffer = []
+        self.acquire()
+        try:
+            if self.target:
+                for record in self.buffer:
+                    self.target.handle(record)
+                self.buffer = []
+        finally:
+            self.release()
 
     def close(self):
         """
         Flush, set the target to None and lose the buffer.
         """
         self.flush()
-        self.target = None
-        BufferingHandler.close(self)
+        self.acquire()
+        try:
+            self.target = None
+            BufferingHandler.close(self)
+        finally:
+            self.release()
 
 
 class QueueHandler(logging.Handler):
diff -r 3d0686d90f55 Lib/mailbox.py
--- a/Lib/mailbox.py
+++ b/Lib/mailbox.py
@@ -587,16 +587,19 @@
         self._file = f
         self._toc = None
         self._next_key = 0
-        self._pending = False   # No changes require rewriting the file.
+        self._pending = False       # No changes require rewriting the file.
+        self._pending_sync = False  # No need to sync the file
         self._locked = False
-        self._file_length = None        # Used to record mailbox size
+        self._file_length = None    # Used to record mailbox size
 
     def add(self, message):
         """Add message and return assigned key."""
         self._lookup()
         self._toc[self._next_key] = self._append_message(message)
         self._next_key += 1
-        self._pending = True
+        # _append_message appends the message to the mailbox file. We
+        # don't need a full rewrite + rename, sync is enough.
+        self._pending_sync = True
         return self._next_key - 1
 
     def remove(self, key):
@@ -642,6 +645,11 @@
     def flush(self):
         """Write any pending changes to disk."""
         if not self._pending:
+            if self._pending_sync:
+                # Messages have only been added, so syncing the file
+                # is enough.
+                _sync_flush(self._file)
+                self._pending_sync = False
             return
 
         # In order to be writing anything out at all, self._toc must
@@ -675,6 +683,7 @@
                     new_file.write(buffer)
                 new_toc[key] = (new_start, new_file.tell())
                 self._post_message_hook(new_file)
+            self._file_length = new_file.tell()
         except:
             new_file.close()
             os.remove(new_file.name)
@@ -682,6 +691,9 @@
         _sync_close(new_file)
         # self._file is about to get replaced, so no need to sync.
         self._file.close()
+        # Make sure the new file's mode is the same as the old file's
+        mode = os.stat(self._path).st_mode
+        os.chmod(new_file.name, mode)
         try:
             os.rename(new_file.name, self._path)
         except OSError as e:
@@ -694,6 +706,7 @@
         self._file = open(self._path, 'rb+')
         self._toc = new_toc
         self._pending = False
+        self._pending_sync = False
         if self._locked:
             _lock_file(self._file, dotlock=False)
 
@@ -730,6 +743,12 @@
         """Append message to mailbox and return (start, stop) offsets."""
         self._file.seek(0, 2)
         before = self._file.tell()
+        if len(self._toc) == 0 and not self._pending:
+            # This is the first message, and the _pre_mailbox_hook
+            # hasn't yet been called. If self._pending is True,
+            # messages have been removed, so _pre_mailbox_hook must
+            # have been called already.
+            self._pre_mailbox_hook(self._file)
         try:
             self._pre_message_hook(self._file)
             offsets = self._install_message(message)
@@ -1424,17 +1443,24 @@
                     line = line[:-1] + b'\n'
                 self._file.write(line.replace(b'\n', linesep))
                 if line == b'\n' or not line:
-                    self._file.write(b'*** EOOH ***' + linesep)
                     if first_pass:
                         first_pass = False
+                        self._file.write(b'*** EOOH ***' + linesep)
                         message.seek(original_pos)
                     else:
                         break
             while True:
-                buffer = message.read(4096)     # Buffer size is arbitrary.
-                if not buffer:
+                line = message.readline()
+                if not line:
                     break
-                self._file.write(buffer.replace(b'\n', linesep))
+                # Universal newline support.
+                if line.endswith(b'\r\n'):
+                    line = line[:-2] + linesep
+                elif line.endswith(b'\r'):
+                    line = line[:-1] + linesep
+                elif line.endswith(b'\n'):
+                    line = line[:-1] + linesep
+                self._file.write(line)
         else:
             raise TypeError('Invalid message type: %s' % type(message))
         stop = self._file.tell()
diff -r 3d0686d90f55 Lib/mimetypes.py
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -430,7 +430,7 @@
         '.jpe'    : 'image/jpeg',
         '.jpeg'   : 'image/jpeg',
         '.jpg'    : 'image/jpeg',
-        '.js'     : 'application/x-javascript',
+        '.js'     : 'application/javascript',
         '.ksh'    : 'text/plain',
         '.latex'  : 'application/x-latex',
         '.m1v'    : 'video/mpeg',
diff -r 3d0686d90f55 Lib/multiprocessing/connection.py
--- a/Lib/multiprocessing/connection.py
+++ b/Lib/multiprocessing/connection.py
@@ -94,6 +94,17 @@
     else:
         raise ValueError('unrecognized family')
 
+def _validate_family(family):
+    '''
+    Checks if the family is valid for the current environment.
+    '''
+    if sys.platform != 'win32' and family == 'AF_PIPE':
+        raise ValueError('Family %s is not recognized.' % family)
+
+    if sys.platform == 'win32' and family == 'AF_UNIX':
+        # double check
+        if not hasattr(socket, family):
+            raise ValueError('Family %s is not recognized.' % family)
 
 def address_type(address):
     '''
@@ -126,6 +137,7 @@
                  or default_family
         address = address or arbitrary_address(family)
 
+        _validate_family(family)
         if family == 'AF_PIPE':
             self._listener = PipeListener(address, backlog)
         else:
@@ -163,6 +175,7 @@
     Returns a connection to the address of a `Listener`
     '''
     family = family or address_type(address)
+    _validate_family(family)
     if family == 'AF_PIPE':
         c = PipeClient(address)
     else:
@@ -186,6 +199,8 @@
         '''
         if duplex:
             s1, s2 = socket.socketpair()
+            s1.setblocking(True)
+            s2.setblocking(True)
             c1 = _multiprocessing.Connection(os.dup(s1.fileno()))
             c2 = _multiprocessing.Connection(os.dup(s2.fileno()))
             s1.close()
@@ -251,6 +266,7 @@
         self._socket = socket.socket(getattr(socket, family))
         try:
             self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+            self._socket.setblocking(True)
             self._socket.bind(address)
             self._socket.listen(backlog)
             self._address = self._socket.getsockname()
@@ -269,6 +285,7 @@
 
     def accept(self):
         s, self._last_accepted = self._socket.accept()
+        s.setblocking(True)
         fd = duplicate(s.fileno())
         conn = _multiprocessing.Connection(fd)
         s.close()
@@ -286,6 +303,7 @@
     '''
     family = address_type(address)
     with socket.socket( getattr(socket, family) ) as s:
+        s.setblocking(True)
         t = _init_timeout()
 
         while 1:
@@ -347,7 +365,10 @@
             try:
                 win32.ConnectNamedPipe(handle, win32.NULL)
             except WindowsError as e:
-                if e.args[0] != win32.ERROR_PIPE_CONNECTED:
+                # ERROR_NO_DATA can occur if a client has already connected,
+                # written data and then disconnected -- see Issue 14725.
+                if e.args[0] not in (win32.ERROR_PIPE_CONNECTED,
+                                     win32.ERROR_NO_DATA):
                     raise
             return _multiprocessing.PipeConnection(handle)
 
diff -r 3d0686d90f55 Lib/multiprocessing/dummy/__init__.py
--- a/Lib/multiprocessing/dummy/__init__.py
+++ b/Lib/multiprocessing/dummy/__init__.py
@@ -70,7 +70,8 @@
     def start(self):
         assert self._parent is current_process()
         self._start_called = True
-        self._parent._children[self] = None
+        if hasattr(self._parent, '_children'):
+            self._parent._children[self] = None
         threading.Thread.start(self)
 
     @property
diff -r 3d0686d90f55 Lib/multiprocessing/forking.py
--- a/Lib/multiprocessing/forking.py
+++ b/Lib/multiprocessing/forking.py
@@ -331,7 +331,7 @@
         '''
         Returns prefix of command line used for spawning a child process
         '''
-        if process.current_process()._identity==() and is_forking(sys.argv):
+        if getattr(process.current_process(), '_inheriting', False):
             raise RuntimeError('''
             Attempt to start a new process before the current process
             has finished its bootstrapping phase.
diff -r 3d0686d90f55 Lib/multiprocessing/pool.py
--- a/Lib/multiprocessing/pool.py
+++ b/Lib/multiprocessing/pool.py
@@ -493,7 +493,8 @@
         # We must wait for the worker handler to exit before terminating
         # workers because we don't want workers to be restarted behind our back.
         debug('joining worker handler')
-        worker_handler.join()
+        if threading.current_thread() is not worker_handler:
+            worker_handler.join()
 
         # Terminate workers which haven't already finished.
         if pool and hasattr(pool[0], 'terminate'):
@@ -503,10 +504,12 @@
                     p.terminate()
 
         debug('joining task handler')
-        task_handler.join()
+        if threading.current_thread() is not task_handler:
+            task_handler.join()
 
         debug('joining result handler')
-        result_handler.join()
+        if threading.current_thread() is not result_handler:
+            result_handler.join()
 
         if pool and hasattr(pool[0], 'terminate'):
             debug('joining pool workers')
@@ -584,6 +587,7 @@
         if chunksize <= 0:
             self._number_left = 0
             self._ready = True
+            del cache[self._job]
         else:
             self._number_left = length//chunksize + bool(length % chunksize)
 
diff -r 3d0686d90f55 Lib/multiprocessing/process.py
--- a/Lib/multiprocessing/process.py
+++ b/Lib/multiprocessing/process.py
@@ -271,11 +271,11 @@
         except SystemExit as e:
             if not e.args:
                 exitcode = 1
-            elif type(e.args[0]) is int:
+            elif isinstance(e.args[0], int):
                 exitcode = e.args[0]
             else:
-                sys.stderr.write(e.args[0] + '\n')
-                exitcode = 1
+                sys.stderr.write(str(e.args[0]) + '\n')
+                exitcode = 0 if isinstance(e.args[0], str) else 1
         except:
             exitcode = 1
             import traceback
diff -r 3d0686d90f55 Lib/os.py
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -152,8 +152,17 @@
         mkdir(name, mode)
     except OSError as e:
         import stat as st
-        if not (e.errno == errno.EEXIST and exist_ok and path.isdir(name) and
-                st.S_IMODE(lstat(name).st_mode) == _get_masked_mode(mode)):
+        dir_exists = path.isdir(name)
+        expected_mode = _get_masked_mode(mode)
+        if dir_exists:
+            # S_ISGID is automatically copied by the OS from parent to child
+            # directories on mkdir.  Don't consider it being set to be a mode
+            # mismatch as mkdir does not unset it when not specified in mode.
+            actual_mode = st.S_IMODE(lstat(name).st_mode) & ~st.S_ISGID
+        else:
+            actual_mode = -1
+        if not (e.errno == errno.EEXIST and exist_ok and dir_exists and
+                actual_mode == expected_mode):
             raise
 
 def removedirs(name):
diff -r 3d0686d90f55 Lib/posixpath.py
--- a/Lib/posixpath.py
+++ b/Lib/posixpath.py
@@ -71,16 +71,25 @@
 def join(a, *p):
     """Join two or more pathname components, inserting '/' as needed.
     If any component is an absolute path, all previous path components
-    will be discarded."""
+    will be discarded.  An empty last part will result in a path that
+    ends with a separator."""
     sep = _get_sep(a)
     path = a
-    for b in p:
-        if b.startswith(sep):
-            path = b
-        elif not path or path.endswith(sep):
-            path +=  b
-        else:
-            path += sep + b
+    try:
+        for b in p:
+            if b.startswith(sep):
+                path = b
+            elif not path or path.endswith(sep):
+                path += b
+            else:
+                path += sep + b
+    except TypeError:
+        valid_types = all(isinstance(s, (str, bytes, bytearray))
+                          for s in (a, ) + p)
+        if valid_types:
+            # Must have a mixture of text and binary data
+            raise TypeError("Can't mix strings and bytes in path components.")
+        raise
     return path
 
 
@@ -266,8 +275,8 @@
         root = b'/'
     else:
         root = '/'
-    userhome = userhome.rstrip(root) or userhome
-    return userhome + path[i:]
+    userhome = userhome.rstrip(root)
+    return (userhome + path[i:]) or root
 
 
 # Expand paths containing shell variable substitutions.
diff -r 3d0686d90f55 Lib/pprint.py
--- a/Lib/pprint.py
+++ b/Lib/pprint.py
@@ -86,7 +86,11 @@
         self.obj = obj
 
     def __lt__(self, other):
-        rv = self.obj.__lt__(other.obj)
+        try:
+            rv = self.obj.__lt__(other.obj)
+        except TypeError:
+            rv = NotImplemented
+
         if rv is NotImplemented:
             rv = (str(type(self.obj)), id(self.obj)) < \
                  (str(type(other.obj)), id(other.obj))
diff -r 3d0686d90f55 Lib/pyclbr.py
--- a/Lib/pyclbr.py
+++ b/Lib/pyclbr.py
@@ -128,6 +128,8 @@
         parent = _readmodule(package, path, inpackage)
         if inpackage is not None:
             package = "%s.%s" % (inpackage, package)
+        if not '__path__' in parent:
+            raise ImportError('No package named {}'.format(package))
         return _readmodule(submodule, parent['__path__'], package)
 
     # Search the path for the module
diff -r 3d0686d90f55 Lib/pydoc.py
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -1525,7 +1525,8 @@
             raise ImportError('no Python documentation found for %r' % thing)
         return object, thing
     else:
-        return thing, getattr(thing, '__name__', None)
+        name = getattr(thing, '__name__', None)
+        return thing, name if isinstance(name, str) else None
 
 def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
         renderer=None):
@@ -1829,7 +1830,7 @@
 Welcome to Python %s!  This is the online help utility.
 
 If this is your first time using Python, you should definitely check out
-the tutorial on the Internet at http://docs.python.org/tutorial/.
+the tutorial on the Internet at http://docs.python.org/%s/tutorial/.
 
 Enter the name of any module, keyword, or topic to get help on writing
 Python programs and using Python modules.  To quit this help utility and
@@ -1839,7 +1840,7 @@
 "keywords", or "topics".  Each module also comes with a one-line summary
 of what it does; to list the modules whose summaries contain a given word
 such as "spam", type "modules spam".
-''' % sys.version[:3])
+''' % tuple([sys.version[:3]]*2))
 
     def list(self, items, columns=4, width=80):
         items = list(sorted(items))
diff -r 3d0686d90f55 Lib/random.py
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -96,7 +96,7 @@
         None or no argument seeds from current time or from an operating
         system specific randomness source if available.
 
-        For version 2 (the default), all of the bits are used if *a *is a str,
+        For version 2 (the default), all of the bits are used if *a* is a str,
         bytes, or bytearray.  For version 1, the hash() of *a* is used instead.
 
         If *a* is an int, all bits are used.
diff -r 3d0686d90f55 Lib/re.py
--- a/Lib/re.py
+++ b/Lib/re.py
@@ -69,8 +69,11 @@
              In string patterns without the ASCII flag, it will match the whole
              range of Unicode digits.
     \D       Matches any non-digit character; equivalent to [^\d].
-    \s       Matches any whitespace character; equivalent to [ \t\n\r\f\v].
-    \S       Matches any non-whitespace character; equiv. to [^ \t\n\r\f\v].
+    \s       Matches any whitespace character; equivalent to [ \t\n\r\f\v] in
+             bytes patterns or string patterns with the ASCII flag.
+             In string patterns without the ASCII flag, it will match the whole
+             range of Unicode whitespace characters.
+    \S       Matches any non-whitespace character; equivalent to [^\s].
     \w       Matches any alphanumeric character; equivalent to [a-zA-Z0-9_]
              in bytes patterns or string patterns with the ASCII flag.
              In string patterns without the ASCII flag, it will match the
@@ -179,14 +182,19 @@
 
 def split(pattern, string, maxsplit=0, flags=0):
     """Split the source string by the occurrences of the pattern,
-    returning a list containing the resulting substrings."""
+    returning a list containing the resulting substrings.  If
+    capturing parentheses are used in pattern, then the text of all
+    groups in the pattern are also returned as part of the resulting
+    list.  If maxsplit is nonzero, at most maxsplit splits occur,
+    and the remainder of the string is returned as the final element
+    of the list."""
     return _compile(pattern, flags).split(string, maxsplit)
 
 def findall(pattern, string, flags=0):
     """Return a list of all non-overlapping matches in the string.
 
-    If one or more groups are present in the pattern, return a
-    list of groups; this will be a list of tuples if the pattern
+    If one or more capturing groups are present in the pattern, return
+    a list of groups; this will be a list of tuples if the pattern
     has more than one group.
 
     Empty matches are included in the result."""
diff -r 3d0686d90f55 Lib/rlcompleter.py
--- a/Lib/rlcompleter.py
+++ b/Lib/rlcompleter.py
@@ -1,13 +1,11 @@
-"""Word completion for GNU readline 2.0.
+"""Word completion for GNU readline.
 
-This requires the latest extension to the readline module. The completer
-completes keywords, built-ins and globals in a selectable namespace (which
-defaults to __main__); when completing NAME.NAME..., it evaluates (!) the
-expression up to the last dot and completes its attributes.
+The completer completes keywords, built-ins and globals in a selectable
+namespace (which defaults to __main__); when completing NAME.NAME..., it
+evaluates (!) the expression up to the last dot and completes its attributes.
 
-It's very cool to do "import sys" type "sys.", hit the
-completion key (twice), and see the list of names defined by the
-sys module!
+It's very cool to do "import sys" type "sys.", hit the completion key (twice),
+and see the list of names defined by the sys module!
 
 Tip: to use the tab key as the completion key, call
 
@@ -15,21 +13,19 @@
 
 Notes:
 
-- Exceptions raised by the completer function are *ignored* (and
-generally cause the completion to fail).  This is a feature -- since
-readline sets the tty device in raw (or cbreak) mode, printing a
-traceback wouldn't work well without some complicated hoopla to save,
-reset and restore the tty state.
+- Exceptions raised by the completer function are *ignored* (and generally cause
+  the completion to fail).  This is a feature -- since readline sets the tty
+  device in raw (or cbreak) mode, printing a traceback wouldn't work well
+  without some complicated hoopla to save, reset and restore the tty state.
 
-- The evaluation of the NAME.NAME... form may cause arbitrary
-application defined code to be executed if an object with a
-__getattr__ hook is found.  Since it is the responsibility of the
-application (or the user) to enable this feature, I consider this an
-acceptable risk.  More complicated expressions (e.g. function calls or
-indexing operations) are *not* evaluated.
+- The evaluation of the NAME.NAME... form may cause arbitrary application
+  defined code to be executed if an object with a __getattr__ hook is found.
+  Since it is the responsibility of the application (or the user) to enable this
+  feature, I consider this an acceptable risk.  More complicated expressions
+  (e.g. function calls or indexing operations) are *not* evaluated.
 
 - When the original stdin is not a tty device, GNU readline is never
-used, and this module (and the readline module) are silently inactive.
+  used, and this module (and the readline module) are silently inactive.
 
 """
 
diff -r 3d0686d90f55 Lib/runpy.py
--- a/Lib/runpy.py
+++ b/Lib/runpy.py
@@ -68,6 +68,7 @@
     run_globals.update(__name__ = mod_name,
                        __file__ = mod_fname,
                        __cached__ = None,
+                       __doc__ = None,
                        __loader__ = mod_loader,
                        __package__ = pkg_name)
     exec(code, run_globals)
@@ -242,12 +243,14 @@
     """
     if run_name is None:
         run_name = "<run_path>"
+    pkg_name = run_name.rpartition(".")[0]
     importer = _get_importer(path_name)
     if isinstance(importer, imp.NullImporter):
         # Not a valid sys.path entry, so run the code directly
         # execfile() doesn't help as we want to allow compiled files
         code = _get_code_from_file(path_name)
-        return _run_module_code(code, init_globals, run_name, path_name)
+        return _run_module_code(code, init_globals, run_name, path_name,
+                                pkg_name=pkg_name)
     else:
         # Importer is defined for path, so add it to
         # the start of sys.path
@@ -266,7 +269,6 @@
                 mod_name, loader, code, fname = _get_main_module_details()
             finally:
                 sys.modules[main_name] = saved_main
-            pkg_name = ""
             with _TempModule(run_name) as temp_module, \
                  _ModifiedArgv0(path_name):
                 mod_globals = temp_module.module.__dict__
diff -r 3d0686d90f55 Lib/shutil.py
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -118,8 +118,10 @@
         try:
             os.chflags(dst, st.st_flags)
         except OSError as why:
-            if (not hasattr(errno, 'EOPNOTSUPP') or
-                why.errno != errno.EOPNOTSUPP):
+            for err in 'EOPNOTSUPP', 'ENOTSUP':
+                if hasattr(errno, err) and why.errno == getattr(errno, err):
+                    break
+            else:
                 raise
 
 def copy(src, dst):
@@ -234,7 +236,7 @@
             # Copying file access times may fail on Windows
             pass
         else:
-            errors.extend((src, dst, str(why)))
+            errors.append((src, dst, str(why)))
     if errors:
         raise Error(errors)
 
diff -r 3d0686d90f55 Lib/socket.py
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -197,6 +197,17 @@
         if self._io_refs <= 0:
             self._real_close()
 
+    def detach(self):
+        """detach() -> file descriptor
+
+        Close the socket object without closing the underlying file descriptor.
+        The object cannot be used after this call, but the file descriptor
+        can be reused for other purposes.  The file descriptor is returned.
+        """
+        self._closed = True
+        return super().detach()
+
+
 def fromfd(fd, family, type, proto=0):
     """ fromfd(fd, family, type[, proto]) -> socket object
 
diff -r 3d0686d90f55 Lib/socketserver.py
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -133,6 +133,7 @@
 import select
 import sys
 import os
+import errno
 try:
     import threading
 except ImportError:
@@ -147,6 +148,15 @@
                     "ThreadingUnixStreamServer",
                     "ThreadingUnixDatagramServer"])
 
+def _eintr_retry(func, *args):
+    """restart a system call interrupted by EINTR"""
+    while True:
+        try:
+            return func(*args)
+        except (OSError, select.error) as e:
+            if e.args[0] != errno.EINTR:
+                raise
+
 class BaseServer:
 
     """Base class for server classes.
@@ -222,7 +232,8 @@
                 # connecting to the socket to wake this up instead of
                 # polling. Polling reduces our responsiveness to a
                 # shutdown request and wastes cpu at all other times.
-                r, w, e = select.select([self], [], [], poll_interval)
+                r, w, e = _eintr_retry(select.select, [self], [], [],
+                                       poll_interval)
                 if self in r:
                     self._handle_request_noblock()
         finally:
@@ -262,7 +273,7 @@
             timeout = self.timeout
         elif self.timeout is not None:
             timeout = min(timeout, self.timeout)
-        fd_sets = select.select([self], [], [], timeout)
+        fd_sets = _eintr_retry(select.select, [self], [], [], timeout)
         if not fd_sets[0]:
             self.handle_timeout()
             return
diff -r 3d0686d90f55 Lib/string.py
--- a/Lib/string.py
+++ b/Lib/string.py
@@ -236,12 +236,14 @@
 
     def convert_field(self, value, conversion):
         # do any conversion on the resulting object
-        if conversion == 'r':
-            return repr(value)
+        if conversion is None:
+            return value
         elif conversion == 's':
             return str(value)
-        elif conversion is None:
-            return value
+        elif conversion == 'r':
+            return repr(value)
+        elif conversion == 'a':
+            return ascii(value)
         raise ValueError("Unknown conversion specifier {0!s}".format(conversion))
 
 
diff -r 3d0686d90f55 Lib/subprocess.py
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -755,8 +755,8 @@
 
 
     def _translate_newlines(self, data, encoding):
-        data = data.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
-        return data.decode(encoding)
+        data = data.decode(encoding)
+        return data.replace("\r\n", "\n").replace("\r", "\n")
 
     def __enter__(self):
         return self
@@ -1075,7 +1075,17 @@
         def terminate(self):
             """Terminates the process
             """
-            _subprocess.TerminateProcess(self._handle, 1)
+            try:
+                _subprocess.TerminateProcess(self._handle, 1)
+            except OSError as e:
+                # ERROR_ACCESS_DENIED (winerror 5) is received when the
+                # process already died.
+                if e.winerror != 5:
+                    raise
+                rc = _subprocess.GetExitCodeProcess(self._handle)
+                if rc == _subprocess.STILL_ACTIVE:
+                    raise
+                self.returncode = rc
 
         kill = terminate
 
diff -r 3d0686d90f55 Lib/tarfile.py
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -2496,7 +2496,7 @@
         """
         if tarinfo.issym():
             # Always search the entire archive.
-            linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
+            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
             limit = None
         else:
             # Search the archive before the link, because a hard link is
diff -r 3d0686d90f55 Lib/telnetlib.py
--- a/Lib/telnetlib.py
+++ b/Lib/telnetlib.py
@@ -34,6 +34,7 @@
 
 
 # Imported modules
+import errno
 import sys
 import socket
 import select
@@ -205,6 +206,7 @@
         self.sb = 0 # flag for SB and SE sequence.
         self.sbdataq = b''
         self.option_callback = None
+        self._has_poll = hasattr(select, 'poll')
         if host is not None:
             self.open(host, port, timeout)
 
@@ -287,6 +289,61 @@
         is closed and no cooked data is available.
 
         """
+        if self._has_poll:
+            return self._read_until_with_poll(match, timeout)
+        else:
+            return self._read_until_with_select(match, timeout)
+
+    def _read_until_with_poll(self, match, timeout):
+        """Read until a given string is encountered or until timeout.
+
+        This method uses select.poll() to implement the timeout.
+        """
+        n = len(match)
+        call_timeout = timeout
+        if timeout is not None:
+            from time import time
+            time_start = time()
+        self.process_rawq()
+        i = self.cookedq.find(match)
+        if i < 0:
+            poller = select.poll()
+            poll_in_or_priority_flags = select.POLLIN | select.POLLPRI
+            poller.register(self, poll_in_or_priority_flags)
+            while i < 0 and not self.eof:
+                try:
+                    ready = poller.poll(call_timeout)
+                except select.error as e:
+                    if e.errno == errno.EINTR:
+                        if timeout is not None:
+                            elapsed = time() - time_start
+                            call_timeout = timeout-elapsed
+                        continue
+                    raise
+                for fd, mode in ready:
+                    if mode & poll_in_or_priority_flags:
+                        i = max(0, len(self.cookedq)-n)
+                        self.fill_rawq()
+                        self.process_rawq()
+                        i = self.cookedq.find(match, i)
+                if timeout is not None:
+                    elapsed = time() - time_start
+                    if elapsed >= timeout:
+                        break
+                    call_timeout = timeout-elapsed
+            poller.unregister(self)
+        if i >= 0:
+            i = i + n
+            buf = self.cookedq[:i]
+            self.cookedq = self.cookedq[i:]
+            return buf
+        return self.read_very_lazy()
+
+    def _read_until_with_select(self, match, timeout=None):
+        """Read until a given string is encountered or until timeout.
+
+        The timeout is implemented using select.select().
+        """
         n = len(match)
         self.process_rawq()
         i = self.cookedq.find(match)
@@ -589,6 +646,79 @@
         results are undeterministic, and may depend on the I/O timing.
 
         """
+        if self._has_poll:
+            return self._expect_with_poll(list, timeout)
+        else:
+            return self._expect_with_select(list, timeout)
+
+    def _expect_with_poll(self, expect_list, timeout=None):
+        """Read until one from a list of a regular expressions matches.
+
+        This method uses select.poll() to implement the timeout.
+        """
+        re = None
+        expect_list = expect_list[:]
+        indices = range(len(expect_list))
+        for i in indices:
+            if not hasattr(expect_list[i], "search"):
+                if not re: import re
+                expect_list[i] = re.compile(expect_list[i])
+        call_timeout = timeout
+        if timeout is not None:
+            from time import time
+            time_start = time()
+        self.process_rawq()
+        m = None
+        for i in indices:
+            m = expect_list[i].search(self.cookedq)
+            if m:
+                e = m.end()
+                text = self.cookedq[:e]
+                self.cookedq = self.cookedq[e:]
+                break
+        if not m:
+            poller = select.poll()
+            poll_in_or_priority_flags = select.POLLIN | select.POLLPRI
+            poller.register(self, poll_in_or_priority_flags)
+            while not m and not self.eof:
+                try:
+                    ready = poller.poll(call_timeout)
+                except select.error as e:
+                    if e.errno == errno.EINTR:
+                        if timeout is not None:
+                            elapsed = time() - time_start
+                            call_timeout = timeout-elapsed
+                        continue
+                    raise
+                for fd, mode in ready:
+                    if mode & poll_in_or_priority_flags:
+                        self.fill_rawq()
+                        self.process_rawq()
+                        for i in indices:
+                            m = expect_list[i].search(self.cookedq)
+                            if m:
+                                e = m.end()
+                                text = self.cookedq[:e]
+                                self.cookedq = self.cookedq[e:]
+                                break
+                if timeout is not None:
+                    elapsed = time() - time_start
+                    if elapsed >= timeout:
+                        break
+                    call_timeout = timeout-elapsed
+            poller.unregister(self)
+        if m:
+            return (i, m, text)
+        text = self.read_very_lazy()
+        if not text and self.eof:
+            raise EOFError
+        return (-1, None, text)
+
+    def _expect_with_select(self, list, timeout=None):
+        """Read until one from a list of a regular expressions matches.
+
+        The timeout is implemented using select.select().
+        """
         re = None
         list = list[:]
         indices = range(len(list))
diff -r 3d0686d90f55 Lib/test/json_tests/test_decode.py
--- a/Lib/test/json_tests/test_decode.py
+++ b/Lib/test/json_tests/test_decode.py
@@ -54,6 +54,15 @@
         self.check_keys_reuse(s, self.loads)
         self.check_keys_reuse(s, self.json.decoder.JSONDecoder().decode)
 
+    def test_extra_data(self):
+        s = '[1, 2, 3]5'
+        msg = 'Extra data'
+        self.assertRaisesRegex(ValueError, msg, self.loads, s)
+
+    def test_invalid_escape(self):
+        s = '["abc\\y"]'
+        msg = 'escape'
+        self.assertRaisesRegex(ValueError, msg, self.loads, s)
 
 class TestPyDecode(TestDecode, PyTest): pass
 class TestCDecode(TestDecode, CTest): pass
diff -r 3d0686d90f55 Lib/test/mp_fork_bomb.py
--- /dev/null
+++ b/Lib/test/mp_fork_bomb.py
@@ -0,0 +1,13 @@
+import multiprocessing, sys
+
+def foo():
+    print("123")
+
+# Because "if __name__ == '__main__'" is missing this will not work
+# correctly on Windows.  However, we should get a RuntimeError rather
+# than the Windows equivalent of a fork bomb.
+
+p = multiprocessing.Process(target=foo)
+p.start()
+p.join()
+sys.exit(p.exitcode)
diff -r 3d0686d90f55 Lib/test/pickletester.py
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -707,6 +707,11 @@
     def test_getinitargs(self):
         pass
 
+    def test_pop_empty_stack(self):
+        # Test issue7455
+        s = b'0'
+        self.assertRaises((pickle.UnpicklingError, IndexError), self.loads, s)
+
     def test_metaclass(self):
         a = use_metaclass()
         for proto in protocols:
@@ -1383,9 +1388,6 @@
         # Test issue4298
         s = bytes([0x58, 0, 0, 0, 0x54])
         self.assertRaises(EOFError, pickle.loads, s)
-        # Test issue7455
-        s = b'0'
-        self.assertRaises(pickle.UnpicklingError, pickle.loads, s)
 
 
 class AbstractPersistentPicklerTests(unittest.TestCase):
diff -r 3d0686d90f55 Lib/test/regrtest.py
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -550,16 +550,7 @@
         from subprocess import Popen, PIPE
         debug_output_pat = re.compile(r"\[\d+ refs\]$")
         output = Queue()
-        def tests_and_args():
-            for test in tests:
-                args_tuple = (
-                    (test, verbose, quiet),
-                    dict(huntrleaks=huntrleaks, use_resources=use_resources,
-                         debug=debug, output_on_failure=verbose3,
-                         failfast=failfast, match_tests=match_tests)
-                )
-                yield (test, args_tuple)
-        pending = tests_and_args()
+        pending = MultiprocessTests(tests)
         opt_args = support.args_from_interpreter_flags()
         base_cmd = [sys.executable] + opt_args + ['-m', 'test.regrtest']
         def work():
@@ -567,15 +558,25 @@
             try:
                 while True:
                     try:
-                        test, args_tuple = next(pending)
+                        test = next(pending)
                     except StopIteration:
                         output.put((None, None, None, None))
                         return
+                    args_tuple = (
+                        (test, verbose, quiet),
+                        dict(huntrleaks=huntrleaks, use_resources=use_resources,
+                             debug=debug, output_on_failure=verbose3,
+                             failfast=failfast, match_tests=match_tests)
+                    )
                     # -E is needed by some tests, e.g. test_import
+                    # Running the child from the same working directory ensures
+                    # that TEMPDIR for the child is the same when
+                    # sysconfig.is_python_build() is true. See issue 15300.
                     popen = Popen(base_cmd + ['--slaveargs', json.dumps(args_tuple)],
                                    stdout=PIPE, stderr=PIPE,
                                    universal_newlines=True,
-                                   close_fds=(os.name != 'nt'))
+                                   close_fds=(os.name != 'nt'),
+                                   cwd=support.SAVEDCWD)
                     stdout, stderr = popen.communicate()
                     # Strip last refcount output line if it exists, since it
                     # comes from the shutdown of the interpreter in the subcommand.
@@ -610,13 +611,15 @@
                     print(stdout)
                 if stderr:
                     print(stderr, file=sys.stderr)
+                sys.stdout.flush()
+                sys.stderr.flush()
                 if result[0] == INTERRUPTED:
                     assert result[1] == 'KeyboardInterrupt'
                     raise KeyboardInterrupt   # What else?
                 test_index += 1
         except KeyboardInterrupt:
             interrupted = True
-            pending.close()
+            pending.interrupted = True
         for worker in workers:
             worker.join()
     else:
@@ -677,10 +680,10 @@
         if bad:
             print(count(len(bad), "test"), "failed:")
             printlist(bad)
-        if environment_changed:
-            print("{} altered the execution environment:".format(
-                     count(len(environment_changed), "test")))
-            printlist(environment_changed)
+    if environment_changed:
+        print("{} altered the execution environment:".format(
+                 count(len(environment_changed), "test")))
+        printlist(environment_changed)
     if skipped and not quiet:
         print(count(len(skipped), "test"), "skipped:")
         printlist(skipped)
@@ -760,6 +763,25 @@
             tests.append(modname)
     return stdtests + sorted(tests)
 
+# We do not use a generator so multiple threads can call next().
+class MultiprocessTests(object):
+
+    """A thread-safe iterator over tests for multiprocess mode."""
+
+    def __init__(self, tests):
+        self.interrupted = False
+        self.lock = threading.Lock()
+        self.tests = tests
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        with self.lock:
+            if self.interrupted:
+                raise StopIteration('tests interrupted')
+            return next(self.tests)
+
 def replace_stdout():
     """Set stdout encoder error handler to backslashreplace (as stderr error
     handler) to avoid UnicodeEncodeError when printing a traceback"""
@@ -890,7 +912,9 @@
                  'logging._handlers', 'logging._handlerList',
                  'shutil.archive_formats', 'shutil.unpack_formats',
                  'sys.warnoptions', 'threading._dangling',
-                 'multiprocessing.process._dangling')
+                 'multiprocessing.process._dangling',
+                 'support.TESTFN',
+                )
 
     def get_sys_argv(self):
         return id(sys.argv), sys.argv, sys.argv[:]
@@ -1020,6 +1044,21 @@
         multiprocessing.process._dangling.clear()
         multiprocessing.process._dangling.update(saved)
 
+    def get_support_TESTFN(self):
+        if os.path.isfile(support.TESTFN):
+            result = 'f'
+        elif os.path.isdir(support.TESTFN):
+            result = 'd'
+        else:
+            result = None
+        return result
+    def restore_support_TESTFN(self, saved_value):
+        if saved_value is None:
+            if os.path.isfile(support.TESTFN):
+                os.unlink(support.TESTFN)
+            elif os.path.isdir(support.TESTFN):
+                shutil.rmtree(support.TESTFN)
+
     def resource_info(self):
         for name in self.resources:
             method_suffix = name.replace('.', '_')
diff -r 3d0686d90f55 Lib/test/sha256.pem
--- a/Lib/test/sha256.pem
+++ b/Lib/test/sha256.pem
@@ -2,41 +2,40 @@
  0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=sha-256 production/CN=sha256.tbs-internet.com
    i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC
 -----BEGIN CERTIFICATE-----
-MIIGXTCCBUWgAwIBAgIRAMmag+ygSAdxZsbyzYjhuW0wDQYJKoZIhvcNAQELBQAw
+MIIGXDCCBUSgAwIBAgIRAKpVmHgg9nfCodAVwcP4siwwDQYJKoZIhvcNAQELBQAw
 gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl
 bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u
 ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv
 cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg
-Q0EgU0dDMB4XDTEwMDIxODAwMDAwMFoXDTEyMDIxOTIzNTk1OVowgcsxCzAJBgNV
+Q0EgU0dDMB4XDTEyMDEwNDAwMDAwMFoXDTE0MDIxNzIzNTk1OVowgcsxCzAJBgNV
 BAYTAkZSMQ4wDAYDVQQREwUxNDAwMDERMA8GA1UECBMIQ2FsdmFkb3MxDTALBgNV
 BAcTBENBRU4xGzAZBgNVBAkTEjIyIHJ1ZSBkZSBCcmV0YWduZTEVMBMGA1UEChMM
 VEJTIElOVEVSTkVUMRcwFQYDVQQLEw4wMDAyIDQ0MDQ0MzgxMDEbMBkGA1UECxMS
 c2hhLTI1NiBwcm9kdWN0aW9uMSAwHgYDVQQDExdzaGEyNTYudGJzLWludGVybmV0
-LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKbuM8VT7f0nntwu
-N3F7v9KIBlhKNAxqCrziOXU5iqUt8HrQB3DtHbdmII+CpVUlwlmepsx6G+srEZ9a
-MIGAy0nxi5aLb7watkyIdPjJTMvTUBQ/+RPWzt5JtYbbY9BlJ+yci0dctP74f4NU
-ISLtlrEjUbf2gTohLrcE01TfmOF6PDEbB5PKDi38cB3NzKfizWfrOaJW6Q1C1qOJ
-y4/4jkUREX1UFUIxzx7v62VfjXSGlcjGpBX1fvtABQOSLeE0a6gciDZs1REqroFf
-5eXtqYphpTa14Z83ITXMfgg5Nze1VtMnzI9Qx4blYBw4dgQVEuIsYr7FDBOITDzc
-VEVXZx0CAwEAAaOCAj8wggI7MB8GA1UdIwQYMBaAFAdEdoWTKLx/bXjSCuv6TEvf
-2YIfMB0GA1UdDgQWBBSJKI/AYVI9RQNY0QPIqc8ej2QivTAOBgNVHQ8BAf8EBAMC
+LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQIX/zdJcyxty0m
+PM1XQSoSSifueS3AVcgqMsaIKS/u+rYzsv4hQ/qA6vLn5m5/ewUcZDj7zdi6rBVf
+PaVNXJ6YinLX0tkaW8TEjeVuZG5yksGZlhCt1CJ1Ho9XLiLaP4uJ7MCoNUntpJ+E
+LfrOdgsIj91kPmwjDJeztVcQCvKzhjVJA/KxdInc0JvOATn7rpaSmQI5bvIjufgo
+qVsTPwVFzuUYULXBk7KxRT7MiEqnd5HvviNh0285QC478zl3v0I0Fb5El4yD3p49
+IthcRnxzMKc0UhU5ogi0SbONyBfm/mzONVfSxpM+MlyvZmJqrbuuLoEDzJD+t8PU
+xSuzgbcCAwEAAaOCAj4wggI6MB8GA1UdIwQYMBaAFAdEdoWTKLx/bXjSCuv6TEvf
+2YIfMB0GA1UdDgQWBBT/qTGYdaj+f61c2IRFL/B1eEsM8DAOBgNVHQ8BAf8EBAMC
 BaAwDAYDVR0TAQH/BAIwADA0BgNVHSUELTArBggrBgEFBQcDAQYIKwYBBQUHAwIG
-CisGAQQBgjcKAwMGCWCGSAGG+EIEATBMBgNVHSAERTBDMEEGCysGAQQBgOU3AgQB
-MDIwMAYIKwYBBQUHAgEWJGh0dHBzOi8vd3d3LnRicy1pbnRlcm5ldC5jb20vQ0Ev
-Q1BTNDBtBgNVHR8EZjBkMDKgMKAuhixodHRwOi8vY3JsLnRicy1pbnRlcm5ldC5j
-b20vVEJTWDUwOUNBU0dDLmNybDAuoCygKoYoaHR0cDovL2NybC50YnMteDUwOS5j
-b20vVEJTWDUwOUNBU0dDLmNybDCBpgYIKwYBBQUHAQEEgZkwgZYwOAYIKwYBBQUH
-MAKGLGh0dHA6Ly9jcnQudGJzLWludGVybmV0LmNvbS9UQlNYNTA5Q0FTR0MuY3J0
-MDQGCCsGAQUFBzAChihodHRwOi8vY3J0LnRicy14NTA5LmNvbS9UQlNYNTA5Q0FT
-R0MuY3J0MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC50YnMteDUwOS5jb20wPwYD
-VR0RBDgwNoIXc2hhMjU2LnRicy1pbnRlcm5ldC5jb22CG3d3dy5zaGEyNTYudGJz
-LWludGVybmV0LmNvbTANBgkqhkiG9w0BAQsFAAOCAQEAA5NL0D4QSqhErhlkdPmz
-XtiMvdGL+ZehM4coTRIpasM/Agt36Rc0NzCvnQwKE+wkngg1Gy2qe7Q0E/ziqBtB
-fZYzdVgu1zdiL4kTaf+wFKYAFGsFbyeEmXysy+CMwaNoF2vpSjCU1UD56bEnTX/W
-fxVZYxtBQUpnu2wOsm8cDZuZRv9XrYgAhGj9Tt6F0aVHSDGn59uwShG1+BVF/uju
-SCyPTTjL1oc7YElJUzR/x4mQJYvtQI8gDIDAGEOs7v3R/gKa5EMfbUQUI4C84UbI
-Yz09Jdnws/MkC/Hm1BZEqk89u7Hvfv+oHqEb0XaUo0TDfsxE0M1sMdnLb91QNQBm
-UQ==
+CisGAQQBgjcKAwMGCWCGSAGG+EIEATBLBgNVHSAERDBCMEAGCisGAQQB5TcCBAEw
+MjAwBggrBgEFBQcCARYkaHR0cHM6Ly93d3cudGJzLWludGVybmV0LmNvbS9DQS9D
+UFM0MG0GA1UdHwRmMGQwMqAwoC6GLGh0dHA6Ly9jcmwudGJzLWludGVybmV0LmNv
+bS9UQlNYNTA5Q0FTR0MuY3JsMC6gLKAqhihodHRwOi8vY3JsLnRicy14NTA5LmNv
+bS9UQlNYNTA5Q0FTR0MuY3JsMIGmBggrBgEFBQcBAQSBmTCBljA4BggrBgEFBQcw
+AoYsaHR0cDovL2NydC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQVNHQy5jcnQw
+NAYIKwYBBQUHMAKGKGh0dHA6Ly9jcnQudGJzLXg1MDkuY29tL1RCU1g1MDlDQVNH
+Qy5jcnQwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLnRicy14NTA5LmNvbTA/BgNV
+HREEODA2ghdzaGEyNTYudGJzLWludGVybmV0LmNvbYIbd3d3LnNoYTI1Ni50YnMt
+aW50ZXJuZXQuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQA0pOuL8QvAa5yksTbGShzX
+ABApagunUGoEydv4YJT1MXy9tTp7DrWaozZSlsqBxrYAXP1d9r2fuKbEniYHxaQ0
+UYaf1VSIlDo1yuC8wE7wxbHDIpQ/E5KAyxiaJ8obtDhFstWAPAH+UoGXq0kj2teN
+21sFQ5dXgA95nldvVFsFhrRUNB6xXAcaj0VZFhttI0ZfQZmQwEI/P+N9Jr40OGun
+aa+Dn0TMeUH4U20YntfLbu2nDcJcYfyurm+8/0Tr4HznLnedXu9pCPYj0TaddrgT
+XO0oFiyy7qGaY6+qKh71yD64Y3ycCJ/HR9Wm39mjZYc9ezYwT4noP6r7Lk8YO7/q
 -----END CERTIFICATE-----
  1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC
    i:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root
diff -r 3d0686d90f55 Lib/test/support.py
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -23,6 +23,9 @@
 import sysconfig
 import fnmatch
 import logging.handlers
+import struct
+import tempfile
+import _testcapi
 
 try:
     import _thread, threading
@@ -53,7 +56,7 @@
     "reap_children", "cpython_only", "check_impl_detail", "get_attribute",
     "swap_item", "swap_attr", "requires_IEEE_754",
     "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink",
-    "import_fresh_module", "failfast",
+    "import_fresh_module", "failfast", "run_with_tz"
     ]
 
 class Error(Exception):
@@ -197,17 +200,81 @@
     except KeyError:
         pass
 
+if sys.platform.startswith("win"):
+    def _waitfor(func, pathname, waitall=False):
+        # Peform the operation
+        func(pathname)
+        # Now setup the wait loop
+        if waitall:
+            dirname = pathname
+        else:
+            dirname, name = os.path.split(pathname)
+            dirname = dirname or '.'
+        # Check for `pathname` to be removed from the filesystem.
+        # The exponential backoff of the timeout amounts to a total
+        # of ~1 second after which the deletion is probably an error
+        # anyway.
+        # Testing on a i7@4.3GHz shows that usually only 1 iteration is
+        # required when contention occurs.
+        timeout = 0.001
+        while timeout < 1.0:
+            # Note we are only testing for the existance of the file(s) in
+            # the contents of the directory regardless of any security or
+            # access rights.  If we have made it this far, we have sufficient
+            # permissions to do that much using Python's equivalent of the
+            # Windows API FindFirstFile.
+            # Other Windows APIs can fail or give incorrect results when
+            # dealing with files that are pending deletion.
+            L = os.listdir(dirname)
+            if not (L if waitall else name in L):
+                return
+            # Increase the timeout and try again
+            time.sleep(timeout)
+            timeout *= 2
+        warnings.warn('tests may fail, delete still pending for ' + pathname,
+                      RuntimeWarning, stacklevel=4)
+
+    def _unlink(filename):
+        _waitfor(os.unlink, filename)
+
+    def _rmdir(dirname):
+        _waitfor(os.rmdir, dirname)
+
+    def _rmtree(path):
+        def _rmtree_inner(path):
+            for name in os.listdir(path):
+                fullname = os.path.join(path, name)
+                if os.path.isdir(fullname):
+                    _waitfor(_rmtree_inner, fullname, waitall=True)
+                    os.rmdir(fullname)
+                else:
+                    os.unlink(fullname)
+        _waitfor(_rmtree_inner, path, waitall=True)
+        _waitfor(os.rmdir, path)
+else:
+    _unlink = os.unlink
+    _rmdir = os.rmdir
+    _rmtree = shutil.rmtree
+
 def unlink(filename):
     try:
-        os.unlink(filename)
+        _unlink(filename)
     except OSError as error:
         # The filename need not exist.
         if error.errno not in (errno.ENOENT, errno.ENOTDIR):
             raise
 
+def rmdir(dirname):
+    try:
+        _rmdir(dirname)
+    except OSError as error:
+        # The directory need not exist.
+        if error.errno != errno.ENOENT:
+            raise
+
 def rmtree(path):
     try:
-        shutil.rmtree(path)
+        _rmtree(path)
     except OSError as error:
         # Unix returns ENOENT, Windows returns ESRCH.
         if error.errno not in (errno.ENOENT, errno.ESRCH):
@@ -984,6 +1051,31 @@
     return final_opt and final_opt != '-O0'
 
 
+_header = '2P'
+if hasattr(sys, "gettotalrefcount"):
+    _header = '2P' + _header
+_vheader = _header + 'P'
+
+def calcobjsize(fmt):
+    return struct.calcsize(_header + fmt + '0P')
+
+def calcvobjsize(fmt):
+    return struct.calcsize(_vheader + fmt + '0P')
+
+
+_TPFLAGS_HAVE_GC = 1<<14
+_TPFLAGS_HEAPTYPE = 1<<9
+
+def check_sizeof(test, o, size):
+    result = sys.getsizeof(o)
+    # add GC header size
+    if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\
+        ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))):
+        size += _testcapi.SIZEOF_PYGC_HEAD
+    msg = 'wrong size for %s: got %d, expected %d' \
+            % (type(o), result, size)
+    test.assertEqual(result, size, msg)
+
 #=======================================================================
 # Decorator for running a function in a different locale, correctly resetting
 # it afterwards.
@@ -1021,6 +1113,39 @@
     return decorator
 
 #=======================================================================
+# Decorator for running a function in a specific timezone, correctly
+# resetting it afterwards.
+
+def run_with_tz(tz):
+    def decorator(func):
+        def inner(*args, **kwds):
+            try:
+                tzset = time.tzset
+            except AttributeError:
+                raise unittest.SkipTest("tzset required")
+            if 'TZ' in os.environ:
+                orig_tz = os.environ['TZ']
+            else:
+                orig_tz = None
+            os.environ['TZ'] = tz
+            tzset()
+
+            # now run the function, resetting the tz on exceptions
+            try:
+                return func(*args, **kwds)
+            finally:
+                if orig_tz == None:
+                    del os.environ['TZ']
+                else:
+                    os.environ['TZ'] = orig_tz
+                time.tzset()
+
+        inner.__name__ = func.__name__
+        inner.__doc__ = func.__doc__
+        return inner
+    return decorator
+
+#=======================================================================
 # Big-memory-test support. Separate from 'resources' because memory use
 # should be configurable.
 
diff -r 3d0686d90f55 Lib/test/test_aifc.py
--- a/Lib/test/test_aifc.py
+++ b/Lib/test/test_aifc.py
@@ -1,7 +1,8 @@
-from test.support import findfile, run_unittest, TESTFN
+from test.support import findfile, run_unittest, TESTFN, captured_stdout, unlink
 import unittest
 import os
 import io
+import struct
 
 import aifc
 
@@ -20,10 +21,8 @@
                 self.fout.close()
             except (aifc.Error, AttributeError):
                 pass
-        try:
-            os.remove(TESTFN)
-        except OSError:
-            pass
+        unlink(TESTFN)
+        unlink(TESTFN + '.aiff')
 
     def test_skipunknown(self):
         #Issue 2245
@@ -32,6 +31,7 @@
 
     def test_params(self):
         f = self.f = aifc.open(self.sndfilepath)
+        self.assertEqual(f.getfp().name, self.sndfilepath)
         self.assertEqual(f.getnchannels(), 2)
         self.assertEqual(f.getsampwidth(), 2)
         self.assertEqual(f.getframerate(), 48000)
@@ -45,6 +45,7 @@
 
     def test_read(self):
         f = self.f = aifc.open(self.sndfilepath)
+        self.assertEqual(f.readframes(0), b'')
         self.assertEqual(f.tell(), 0)
         self.assertEqual(f.readframes(2), b'\x00\x00\x00\x00\x0b\xd4\x0b\xd4')
         f.rewind()
@@ -58,6 +59,10 @@
         self.assertEqual(f.readframes(2), b'\x17t\x17t"\xad"\xad')
         f.setpos(pos0)
         self.assertEqual(f.readframes(2), b'\x00\x00\x00\x00\x0b\xd4\x0b\xd4')
+        with self.assertRaises(aifc.Error):
+            f.setpos(-1)
+        with self.assertRaises(aifc.Error):
+            f.setpos(f.getnframes() + 1)
 
     def test_write(self):
         f = self.f = aifc.open(self.sndfilepath)
@@ -92,8 +97,6 @@
         self.assertEqual(f.getparams()[0:3], fout.getparams()[0:3])
         self.assertEqual(fout.getcomptype(), b'ULAW')
         self.assertEqual(fout.getcompname(), b'foo')
-        # XXX: this test fails, not sure if it should succeed or not
-        # self.assertEqual(f.readframes(5), fout.readframes(5))
 
     def test_close(self):
         class Wrapfile(object):
@@ -112,7 +115,7 @@
 
     def test_write_header_comptype_sampwidth(self):
         for comptype in (b'ULAW', b'ulaw', b'ALAW', b'alaw', b'G722'):
-            fout = self.fout = aifc.open(io.BytesIO(), 'wb')
+            fout = aifc.open(io.BytesIO(), 'wb')
             fout.setnchannels(1)
             fout.setframerate(1)
             fout.setcomptype(comptype, b'')
@@ -121,7 +124,7 @@
             fout.initfp(None)
 
     def test_write_markers_values(self):
-        fout = self.fout = aifc.open(io.BytesIO(), 'wb')
+        fout = aifc.open(io.BytesIO(), 'wb')
         self.assertEqual(fout.getmarkers(), None)
         fout.setmark(1, 0, b'foo1')
         fout.setmark(1, 1, b'foo2')
@@ -179,6 +182,148 @@
         with self.assertRaises(ValueError):
             aifc._write_string(f, b'too long' * 255)
 
+    def test_wrong_open_mode(self):
+        with self.assertRaises(aifc.Error):
+            aifc.open(TESTFN, 'wrong_mode')
+
+    def test_read_wrong_form(self):
+        b1 = io.BytesIO(b'WRNG' + struct.pack('>L', 0))
+        b2 = io.BytesIO(b'FORM' + struct.pack('>L', 4) + b'WRNG')
+        self.assertRaises(aifc.Error, aifc.open, b1)
+        self.assertRaises(aifc.Error, aifc.open, b2)
+
+    def test_read_no_comm_chunk(self):
+        b = io.BytesIO(b'FORM' + struct.pack('>L', 4) + b'AIFF')
+        self.assertRaises(aifc.Error, aifc.open, b)
+
+    def test_read_wrong_compression_type(self):
+        b = b'FORM' + struct.pack('>L', 4) + b'AIFC'
+        b += b'COMM' + struct.pack('>LhlhhLL', 23, 0, 0, 0, 0, 0, 0)
+        b += b'WRNG' + struct.pack('B', 0)
+        self.assertRaises(aifc.Error, aifc.open, io.BytesIO(b))
+
+    def test_read_wrong_marks(self):
+        b = b'FORM' + struct.pack('>L', 4) + b'AIFF'
+        b += b'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0)
+        b += b'SSND' + struct.pack('>L', 8) + b'\x00' * 8
+        b += b'MARK' + struct.pack('>LhB', 3, 1, 1)
+        with captured_stdout() as s:
+            f = aifc.open(io.BytesIO(b))
+        self.assertEqual(
+            s.getvalue(),
+            'Warning: MARK chunk contains only 0 markers instead of 1\n')
+        self.assertEqual(f.getmarkers(), None)
+
+    def test_read_comm_kludge_compname_even(self):
+        b = b'FORM' + struct.pack('>L', 4) + b'AIFC'
+        b += b'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0)
+        b += b'NONE' + struct.pack('B', 4) + b'even' + b'\x00'
+        b += b'SSND' + struct.pack('>L', 8) + b'\x00' * 8
+        with captured_stdout() as s:
+            f = aifc.open(io.BytesIO(b))
+        self.assertEqual(s.getvalue(), 'Warning: bad COMM chunk size\n')
+        self.assertEqual(f.getcompname(), b'even')
+
+    def test_read_comm_kludge_compname_odd(self):
+        b = b'FORM' + struct.pack('>L', 4) + b'AIFC'
+        b += b'COMM' + struct.pack('>LhlhhLL', 18, 0, 0, 0, 0, 0, 0)
+        b += b'NONE' + struct.pack('B', 3) + b'odd'
+        b += b'SSND' + struct.pack('>L', 8) + b'\x00' * 8
+        with captured_stdout() as s:
+            f = aifc.open(io.BytesIO(b))
+        self.assertEqual(s.getvalue(), 'Warning: bad COMM chunk size\n')
+        self.assertEqual(f.getcompname(), b'odd')
+
+    def test_write_params_raises(self):
+        fout = aifc.open(io.BytesIO(), 'wb')
+        wrong_params = (0, 0, 0, 0, b'WRNG', '')
+        self.assertRaises(aifc.Error, fout.setparams, wrong_params)
+        self.assertRaises(aifc.Error, fout.getparams)
+        self.assertRaises(aifc.Error, fout.setnchannels, 0)
+        self.assertRaises(aifc.Error, fout.getnchannels)
+        self.assertRaises(aifc.Error, fout.setsampwidth, 0)
+        self.assertRaises(aifc.Error, fout.getsampwidth)
+        self.assertRaises(aifc.Error, fout.setframerate, 0)
+        self.assertRaises(aifc.Error, fout.getframerate)
+        self.assertRaises(aifc.Error, fout.setcomptype, b'WRNG', '')
+        fout.aiff()
+        fout.setnchannels(1)
+        fout.setsampwidth(1)
+        fout.setframerate(1)
+        fout.setnframes(1)
+        fout.writeframes(b'\x00')
+        self.assertRaises(aifc.Error, fout.setparams, (1, 1, 1, 1, 1, 1))
+        self.assertRaises(aifc.Error, fout.setnchannels, 1)
+        self.assertRaises(aifc.Error, fout.setsampwidth, 1)
+        self.assertRaises(aifc.Error, fout.setframerate, 1)
+        self.assertRaises(aifc.Error, fout.setnframes, 1)
+        self.assertRaises(aifc.Error, fout.setcomptype, b'NONE', '')
+        self.assertRaises(aifc.Error, fout.aiff)
+        self.assertRaises(aifc.Error, fout.aifc)
+
+    def test_write_params_singles(self):
+        fout = aifc.open(io.BytesIO(), 'wb')
+        fout.aifc()
+        fout.setnchannels(1)
+        fout.setsampwidth(2)
+        fout.setframerate(3)
+        fout.setnframes(4)
+        fout.setcomptype(b'NONE', b'name')
+        self.assertEqual(fout.getnchannels(), 1)
+        self.assertEqual(fout.getsampwidth(), 2)
+        self.assertEqual(fout.getframerate(), 3)
+        self.assertEqual(fout.getnframes(), 0)
+        self.assertEqual(fout.tell(), 0)
+        self.assertEqual(fout.getcomptype(), b'NONE')
+        self.assertEqual(fout.getcompname(), b'name')
+        fout.writeframes(b'\x00' * 4 * fout.getsampwidth() * fout.getnchannels())
+        self.assertEqual(fout.getnframes(), 4)
+        self.assertEqual(fout.tell(), 4)
+
+    def test_write_params_bunch(self):
+        fout = aifc.open(io.BytesIO(), 'wb')
+        fout.aifc()
+        p = (1, 2, 3, 4, b'NONE', b'name')
+        fout.setparams(p)
+        self.assertEqual(fout.getparams(), p)
+        fout.initfp(None)
+
+    def test_write_header_raises(self):
+        fout = aifc.open(io.BytesIO(), 'wb')
+        self.assertRaises(aifc.Error, fout.close)
+        fout.setnchannels(1)
+        self.assertRaises(aifc.Error, fout.close)
+        fout.setsampwidth(1)
+        self.assertRaises(aifc.Error, fout.close)
+        fout.initfp(None)
+
+    def test_write_header_comptype_raises(self):
+        for comptype in (b'ULAW', b'ulaw', b'ALAW', b'alaw', b'G722'):
+            fout = aifc.open(io.BytesIO(), 'wb')
+            fout.setsampwidth(1)
+            fout.setcomptype(comptype, b'')
+            self.assertRaises(aifc.Error, fout.close)
+            fout.initfp(None)
+
+    def test_write_markers_raises(self):
+        fout = aifc.open(io.BytesIO(), 'wb')
+        self.assertRaises(aifc.Error, fout.setmark, 0, 0, b'')
+        self.assertRaises(aifc.Error, fout.setmark, 1, -1, b'')
+        self.assertRaises(aifc.Error, fout.setmark, 1, 0, None)
+        self.assertRaises(aifc.Error, fout.getmark, 1)
+        fout.initfp(None)
+
+    def test_write_aiff_by_extension(self):
+        sampwidth = 2
+        fout = self.fout = aifc.open(TESTFN + '.aiff', 'wb')
+        fout.setparams((1, sampwidth, 1, 1, b'ULAW', b''))
+        frames = b'\x00' * fout.getnchannels() * sampwidth
+        fout.writeframes(frames)
+        fout.close()
+        f = self.f = aifc.open(TESTFN + '.aiff', 'rb')
+        self.assertEqual(f.getcomptype(), b'NONE')
+        f.close()
+
 
 def test_main():
     run_unittest(AIFCTest)
diff -r 3d0686d90f55 Lib/test/test_argparse.py
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -1371,6 +1371,7 @@
         ('X @hello', NS(a=None, x='X', y=['hello world!'])),
         ('-a B @recursive Y Z', NS(a='A', x='hello world!', y=['Y', 'Z'])),
         ('X @recursive Z -a B', NS(a='B', x='X', y=['hello world!', 'Z'])),
+        (["-a", "", "X", "Y"], NS(a='', x='X', y=['Y'])),
     ]
 
 
@@ -1760,6 +1761,14 @@
         parser2.add_argument('-y', choices='123', help='y help')
         parser2.add_argument('z', type=complex, nargs='*', help='z help')
 
+        # add third sub-parser
+        parser3_kwargs = dict(description='3 description')
+        if subparser_help:
+            parser3_kwargs['help'] = '3 help'
+        parser3 = subparsers.add_parser('3', **parser3_kwargs)
+        parser3.add_argument('t', type=int, help='t help')
+        parser3.add_argument('u', nargs='...', help='u help')
+
         # return the main parser
         return parser
 
@@ -1789,6 +1798,10 @@
             self.parser.parse_args('--foo 0.125 1 c'.split()),
             NS(foo=True, bar=0.125, w=None, x='c'),
         )
+        self.assertEqual(
+            self.parser.parse_args('-1.5 3 11 -- a --foo 7 -- b'.split()),
+            NS(foo=False, bar=-1.5, t=11, u=['a', '--foo', '7', '--', 'b']),
+        )
 
     def test_parse_known_args(self):
         self.assertEqual(
@@ -1823,15 +1836,15 @@
 
     def test_help(self):
         self.assertEqual(self.parser.format_usage(),
-                         'usage: PROG [-h] [--foo] bar {1,2} ...\n')
+                         'usage: PROG [-h] [--foo] bar {1,2,3} ...\n')
         self.assertEqual(self.parser.format_help(), textwrap.dedent('''\
-            usage: PROG [-h] [--foo] bar {1,2} ...
+            usage: PROG [-h] [--foo] bar {1,2,3} ...
 
             main description
 
             positional arguments:
               bar         bar help
-              {1,2}       command help
+              {1,2,3}     command help
 
             optional arguments:
               -h, --help  show this help message and exit
@@ -1842,15 +1855,15 @@
         # Make sure - is still used for help if it is a non-first prefix char
         parser = self._get_parser(prefix_chars='+:-')
         self.assertEqual(parser.format_usage(),
-                         'usage: PROG [-h] [++foo] bar {1,2} ...\n')
+                         'usage: PROG [-h] [++foo] bar {1,2,3} ...\n')
         self.assertEqual(parser.format_help(), textwrap.dedent('''\
-            usage: PROG [-h] [++foo] bar {1,2} ...
+            usage: PROG [-h] [++foo] bar {1,2,3} ...
 
             main description
 
             positional arguments:
               bar         bar help
-              {1,2}       command help
+              {1,2,3}     command help
 
             optional arguments:
               -h, --help  show this help message and exit
@@ -1861,15 +1874,15 @@
     def test_help_alternate_prefix_chars(self):
         parser = self._get_parser(prefix_chars='+:/')
         self.assertEqual(parser.format_usage(),
-                         'usage: PROG [+h] [++foo] bar {1,2} ...\n')
+                         'usage: PROG [+h] [++foo] bar {1,2,3} ...\n')
         self.assertEqual(parser.format_help(), textwrap.dedent('''\
-            usage: PROG [+h] [++foo] bar {1,2} ...
+            usage: PROG [+h] [++foo] bar {1,2,3} ...
 
             main description
 
             positional arguments:
               bar         bar help
-              {1,2}       command help
+              {1,2,3}     command help
 
             optional arguments:
               +h, ++help  show this help message and exit
@@ -1878,18 +1891,19 @@
 
     def test_parser_command_help(self):
         self.assertEqual(self.command_help_parser.format_usage(),
-                         'usage: PROG [-h] [--foo] bar {1,2} ...\n')
+                         'usage: PROG [-h] [--foo] bar {1,2,3} ...\n')
         self.assertEqual(self.command_help_parser.format_help(),
                          textwrap.dedent('''\
-            usage: PROG [-h] [--foo] bar {1,2} ...
+            usage: PROG [-h] [--foo] bar {1,2,3} ...
 
             main description
 
             positional arguments:
               bar         bar help
-              {1,2}       command help
+              {1,2,3}     command help
                 1         1 help
                 2         2 help
+                3         3 help
 
             optional arguments:
               -h, --help  show this help message and exit
@@ -2000,6 +2014,7 @@
                 1 (1alias1, 1alias2)
                                     1 help
                 2                   2 help
+                3                   3 help
             """))
 
 # ============
diff -r 3d0686d90f55 Lib/test/test_array.py
--- a/Lib/test/test_array.py
+++ b/Lib/test/test_array.py
@@ -988,6 +988,19 @@
         a = array.array('H', b"1234")
         self.assertEqual(len(a) * a.itemsize, 4)
 
+    @support.cpython_only
+    def test_sizeof_with_buffer(self):
+        a = array.array(self.typecode, self.example)
+        basesize = support.calcvobjsize('4Pi')
+        buffer_size = a.buffer_info()[1] * a.itemsize
+        support.check_sizeof(self, a, basesize + buffer_size)
+
+    @support.cpython_only
+    def test_sizeof_without_buffer(self):
+        a = array.array(self.typecode)
+        basesize = support.calcvobjsize('4Pi')
+        support.check_sizeof(self, a, basesize)
+
 
 class StringTest(BaseTest):
 
diff -r 3d0686d90f55 Lib/test/test_ast.py
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -195,12 +195,6 @@
         with self.assertRaises(AttributeError):
             x.vararg
 
-        with self.assertRaises(AttributeError):
-            x.foobar = 21
-
-        with self.assertRaises(AttributeError):
-            ast.AST(lineno=2)
-
         with self.assertRaises(TypeError):
             # "_ast.AST constructor takes 0 positional arguments"
             ast.AST(2)
@@ -224,6 +218,12 @@
         im = ast.parse("from . import y").body[0]
         self.assertIsNone(im.module)
 
+    def test_non_interned_future_from_ast(self):
+        mod = ast.parse("from __future__ import division")
+        self.assertIsInstance(mod.body[0], ast.ImportFrom)
+        mod.body[0].module = " __future__ ".strip()
+        compile(mod, "<test>", "exec")
+
     def test_base_classes(self):
         self.assertTrue(issubclass(ast.For, ast.stmt))
         self.assertTrue(issubclass(ast.Name, ast.expr))
diff -r 3d0686d90f55 Lib/test/test_asyncore.py
--- a/Lib/test/test_asyncore.py
+++ b/Lib/test/test_asyncore.py
@@ -7,6 +7,7 @@
 import time
 import warnings
 import errno
+import struct
 
 from test import support
 from test.support import TESTFN, run_unittest, unlink
@@ -730,6 +731,25 @@
         finally:
             sock.close()
 
+    @unittest.skipUnless(threading, 'Threading required for this test.')
+    @support.reap_threads
+    def test_quick_connect(self):
+        # see: http://bugs.python.org/issue10340
+        server = TCPServer()
+        t = threading.Thread(target=lambda: asyncore.loop(timeout=0.1, count=500))
+        t.start()
+
+        for x in range(20):
+            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+            s.settimeout(.2)
+            s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER,
+                         struct.pack('ii', 1, 0))
+            try:
+                s.connect(server.address)
+            except socket.error:
+                pass
+            finally:
+                s.close()
 
 class TestAPI_UseSelect(BaseTestAPI):
     use_poll = False
diff -r 3d0686d90f55 Lib/test/test_base64.py
--- a/Lib/test/test_base64.py
+++ b/Lib/test/test_base64.py
@@ -2,6 +2,7 @@
 from test import support
 import base64
 import binascii
+import os
 import sys
 import subprocess
 
@@ -227,6 +228,10 @@
 
 
 class TestMain(unittest.TestCase):
+    def tearDown(self):
+        if os.path.exists(support.TESTFN):
+            os.unlink(support.TESTFN)
+
     def get_output(self, *args, **options):
         args = (sys.executable, '-m', 'base64') + args
         return subprocess.check_output(args, **options)
diff -r 3d0686d90f55 Lib/test/test_bisect.py
--- a/Lib/test/test_bisect.py
+++ b/Lib/test/test_bisect.py
@@ -23,6 +23,28 @@
 import bisect as c_bisect
 
 
+class Range(object):
+    """A trivial range()-like object without any integer width limitations."""
+    def __init__(self, start, stop):
+        self.start = start
+        self.stop = stop
+        self.last_insert = None
+
+    def __len__(self):
+        return self.stop - self.start
+
+    def __getitem__(self, idx):
+        n = self.stop - self.start
+        if idx < 0:
+            idx += n
+        if idx >= n:
+            raise IndexError(idx)
+        return self.start + idx
+
+    def insert(self, idx, item):
+        self.last_insert = idx, item
+
+
 class TestBisect(unittest.TestCase):
     module = None
 
@@ -122,6 +144,32 @@
         self.assertRaises(ValueError, mod.insort_left, [1, 2, 3], 5, -1, 3),
         self.assertRaises(ValueError, mod.insort_right, [1, 2, 3], 5, -1, 3),
 
+    def test_large_range(self):
+        # Issue 13496
+        mod = self.module
+        n = sys.maxsize
+        data = range(n-1)
+        self.assertEqual(mod.bisect_left(data, n-3), n-3)
+        self.assertEqual(mod.bisect_right(data, n-3), n-2)
+        self.assertEqual(mod.bisect_left(data, n-3, n-10, n), n-3)
+        self.assertEqual(mod.bisect_right(data, n-3, n-10, n), n-2)
+
+    def test_large_pyrange(self):
+        # Same as above, but without C-imposed limits on range() parameters
+        mod = self.module
+        n = sys.maxsize
+        data = Range(0, n-1)
+        self.assertEqual(mod.bisect_left(data, n-3), n-3)
+        self.assertEqual(mod.bisect_right(data, n-3), n-2)
+        self.assertEqual(mod.bisect_left(data, n-3, n-10, n), n-3)
+        self.assertEqual(mod.bisect_right(data, n-3, n-10, n), n-2)
+        x = n - 100
+        mod.insort_left(data, x, x - 50, x + 50)
+        self.assertEqual(data.last_insert, (x, x))
+        x = n - 200
+        mod.insort_right(data, x, x - 50, x + 50)
+        self.assertEqual(data.last_insert, (x + 1, x))
+
     def test_random(self, n=25):
         from random import randrange
         for i in range(n):
diff -r 3d0686d90f55 Lib/test/test_capi.py
--- a/Lib/test/test_capi.py
+++ b/Lib/test/test_capi.py
@@ -11,6 +11,10 @@
 import unittest
 from test import support
 try:
+    import _posixsubprocess
+except ImportError:
+    _posixsubprocess = None
+try:
     import threading
 except ImportError:
     threading = None
@@ -55,6 +59,33 @@
     def test_memoryview_from_NULL_pointer(self):
         self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer)
 
+    @unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
+    def test_seq_bytes_to_charp_array(self):
+        # Issue #15732: crash in _PySequence_BytesToCharpArray()
+        class Z(object):
+            def __len__(self):
+                return 1
+        self.assertRaises(TypeError, _posixsubprocess.fork_exec,
+                          1,Z(),3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
+        # Issue #15736: overflow in _PySequence_BytesToCharpArray()
+        class Z(object):
+            def __len__(self):
+                return sys.maxsize
+            def __getitem__(self, i):
+                return b'x'
+        self.assertRaises(MemoryError, _posixsubprocess.fork_exec,
+                          1,Z(),3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
+
+    @unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
+    def test_subprocess_fork_exec(self):
+        class Z(object):
+            def __len__(self):
+                return 1
+
+        # Issue #15738: crash in subprocess_fork_exec()
+        self.assertRaises(TypeError, _posixsubprocess.fork_exec,
+                          Z(),[b'1'],3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
+
 @unittest.skipUnless(threading, 'Threading required for this test.')
 class TestPendingCalls(unittest.TestCase):
 
diff -r 3d0686d90f55 Lib/test/test_cgi.py
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -118,6 +118,11 @@
 
 class CgiTests(unittest.TestCase):
 
+    def test_escape(self):
+        self.assertEqual("test &amp; string", cgi.escape("test & string"))
+        self.assertEqual("&lt;test string&gt;", cgi.escape("<test string>"))
+        self.assertEqual("&quot;test string&quot;", cgi.escape('"test string"', True))
+
     def test_strict(self):
         for orig, expect in parse_strict_test_cases:
             # Test basic parsing
diff -r 3d0686d90f55 Lib/test/test_cmath.py
--- a/Lib/test/test_cmath.py
+++ b/Lib/test/test_cmath.py
@@ -519,15 +519,11 @@
     # of zero, then atan and atanh will also have difficulties with
     # the sign of complex zeros.
     @requires_IEEE_754
-    @unittest.skipIf(sysconfig.get_config_var('LOG1P_DROPS_ZERO_SIGN'),
-                     "system log1p() function doesn't preserve the sign")
     def testAtanSign(self):
         for z in complex_zeros:
             self.assertComplexIdentical(cmath.atan(z), z)
 
     @requires_IEEE_754
-    @unittest.skipIf(sysconfig.get_config_var('LOG1P_DROPS_ZERO_SIGN'),
-                     "system log1p() function doesn't preserve the sign")
     def testAtanhSign(self):
         for z in complex_zeros:
             self.assertComplexIdentical(cmath.atanh(z), z)
diff -r 3d0686d90f55 Lib/test/test_cmd_line.py
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -265,6 +265,23 @@
             "print(repr(input()))",
             b"'abc'")
 
+    def test_output_newline(self):
+        # Issue 13119 Newline for print() should be \r\n on Windows.
+        code = """if 1:
+            import sys
+            print(1)
+            print(2)
+            print(3, file=sys.stderr)
+            print(4, file=sys.stderr)"""
+        rc, out, err = assert_python_ok('-c', code)
+
+        if sys.platform == 'win32':
+            self.assertEqual(b'1\r\n2\r\n', out)
+            self.assertEqual(b'3\r\n4', err)
+        else:
+            self.assertEqual(b'1\n2\n', out)
+            self.assertEqual(b'3\n4', err)
+
     def test_unmached_quote(self):
         # Issue #10206: python program starting with unmatched quote
         # spewed spaces to stdout
diff -r 3d0686d90f55 Lib/test/test_cmd_line_script.py
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -13,6 +13,8 @@
 
 verbose = support.verbose
 
+example_args = ['test1', 'test2', 'test3']
+
 test_source = """\
 # Script may be run with optimisation enabled, so don't rely on assert
 # statements being executed
@@ -36,6 +38,9 @@
 # Check the sys module
 import sys
 assertIdentical(globals(), sys.modules[__name__].__dict__)
+from test import test_cmd_line_script
+example_args_list = test_cmd_line_script.example_args
+assertEqual(sys.argv[1:], example_args_list)
 print('sys.argv[0]==%a' % sys.argv[0])
 print('sys.path[0]==%a' % sys.path[0])
 # Check the working directory
@@ -100,7 +105,7 @@
                             *cmd_line_switches):
         if not __debug__:
             cmd_line_switches += ('-' + 'O' * sys.flags.optimize,)
-        run_args = cmd_line_switches + (script_name,)
+        run_args = cmd_line_switches + (script_name,) + tuple(example_args)
         rc, out, err = assert_python_ok(*run_args)
         self._check_output(script_name, rc, out + err, expected_file,
                            expected_argv0, expected_path0, expected_package)
@@ -240,9 +245,9 @@
                 pkg_dir = os.path.join(script_dir, 'test_pkg')
                 make_pkg(pkg_dir, "import sys; print('init_argv0==%r' % sys.argv[0])")
                 script_name = _make_test_script(pkg_dir, 'script')
-                rc, out, err = assert_python_ok('-m', 'test_pkg.script')
+                rc, out, err = assert_python_ok('-m', 'test_pkg.script', *example_args)
                 if verbose > 1:
-                    print(data)
+                    print(out)
                 expected = "init_argv0==%r" % '-m'
                 self.assertIn(expected.encode('utf-8'), out)
                 self._check_output(script_name, rc, out,
@@ -270,10 +275,25 @@
             with support.temp_cwd(path=script_dir):
                 with open("-m", "w") as f:
                     f.write("data")
-                    rc, out, err = assert_python_ok('-m', 'other')
+                    rc, out, err = assert_python_ok('-m', 'other', *example_args)
                     self._check_output(script_name, rc, out,
                                       script_name, script_name, '', '')
 
+    def test_dash_m_error_code_is_one(self):
+        # If a module is invoked with the -m command line flag
+        # and results in an error that the return code to the
+        # shell is '1'
+        with temp_dir() as script_dir:
+            with support.temp_cwd(path=script_dir):
+                pkg_dir = os.path.join(script_dir, 'test_pkg')
+                make_pkg(pkg_dir)
+                script_name = _make_test_script(pkg_dir, 'other',
+                                                "if __name__ == '__main__': raise ValueError")
+                rc, out, err = assert_python_failure('-m', 'test_pkg.other', *example_args)
+                if verbose > 1:
+                    print(out)
+                self.assertEqual(rc, 1)
+
 def test_main():
     support.run_unittest(CmdLineTest)
     support.reap_children()
diff -r 3d0686d90f55 Lib/test/test_codecs.py
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -540,8 +540,19 @@
         )
 
     def test_errors(self):
-        self.assertRaises(UnicodeDecodeError, codecs.utf_16_le_decode,
-                          b"\xff", "strict", True)
+        tests = [
+            (b'\xff', '\ufffd'),
+            (b'A\x00Z', 'A\ufffd'),
+            (b'A\x00B\x00C\x00D\x00Z', 'ABCD\ufffd'),
+            (b'\x00\xd8', '\ufffd'),
+            (b'\x00\xd8A', '\ufffd'),
+            (b'\x00\xd8A\x00', '\ufffdA'),
+            (b'\x00\xdcA\x00', '\ufffdA'),
+        ]
+        for raw, expected in tests:
+            self.assertRaises(UnicodeDecodeError, codecs.utf_16_le_decode,
+                              raw, 'strict', True)
+            self.assertEqual(raw.decode('utf-16le', 'replace'), expected)
 
     def test_nonbmp(self):
         self.assertEqual("\U00010203".encode(self.encoding),
@@ -568,8 +579,19 @@
         )
 
     def test_errors(self):
-        self.assertRaises(UnicodeDecodeError, codecs.utf_16_be_decode,
-                          b"\xff", "strict", True)
+        tests = [
+            (b'\xff', '\ufffd'),
+            (b'\x00A\xff', 'A\ufffd'),
+            (b'\x00A\x00B\x00C\x00DZ', 'ABCD\ufffd'),
+            (b'\xd8\x00', '\ufffd'),
+            (b'\xd8\x00\xdc', '\ufffd'),
+            (b'\xd8\x00\x00A', '\ufffdA'),
+            (b'\xdc\x00\x00A', '\ufffdA'),
+        ]
+        for raw, expected in tests:
+            self.assertRaises(UnicodeDecodeError, codecs.utf_16_be_decode,
+                              raw, 'strict', True)
+            self.assertEqual(raw.decode('utf-16be', 'replace'), expected)
 
     def test_nonbmp(self):
         self.assertEqual("\U00010203".encode(self.encoding),
diff -r 3d0686d90f55 Lib/test/test_concurrent_futures.py
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -183,7 +183,9 @@
         for p in processes:
             p.join()
 
+
 class WaitTests(unittest.TestCase):
+
     def test_first_completed(self):
         future1 = self.executor.submit(mul, 21, 2)
         future2 = self.executor.submit(time.sleep, 1.5)
@@ -284,7 +286,21 @@
 
 
 class ThreadPoolWaitTests(ThreadPoolMixin, WaitTests):
-    pass
+
+    def test_pending_calls_race(self):
+        # Issue #14406: multi-threaded race condition when waiting on all
+        # futures.
+        event = threading.Event()
+        def future_func():
+            event.wait()
+        oldswitchinterval = sys.getswitchinterval()
+        sys.setswitchinterval(1e-6)
+        try:
+            fs = {self.executor.submit(future_func) for i in range(100)}
+            event.set()
+            futures.wait(fs, return_when=futures.ALL_COMPLETED)
+        finally:
+            sys.setswitchinterval(oldswitchinterval)
 
 
 class ProcessPoolWaitTests(ProcessPoolMixin, WaitTests):
diff -r 3d0686d90f55 Lib/test/test_decimal.py
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -1516,7 +1516,17 @@
         for d, n, r in test_triples:
             self.assertEqual(str(round(Decimal(d), n)), r)
 
+    def test_nan_to_float(self):
+        # Test conversions of decimal NANs to float.
+        # See http://bugs.python.org/issue15544
+        for s in ('nan', 'nan1234', '-nan', '-nan2468'):
+            f = float(Decimal(s))
+            self.assertTrue(math.isnan(f))
 
+    def test_snan_to_float(self):
+        for s in ('snan', '-snan', 'snan1357', '-snan1234'):
+            d = Decimal(s)
+            self.assertRaises(ValueError, float, d)
 
     def test_eval_round_trip(self):
 
diff -r 3d0686d90f55 Lib/test/test_deque.py
--- a/Lib/test/test_deque.py
+++ b/Lib/test/test_deque.py
@@ -7,6 +7,7 @@
 import pickle
 from io import StringIO
 import random
+import struct
 
 BIG = 100000
 
@@ -518,6 +519,21 @@
             gc.collect()
             self.assertTrue(ref() is None, "Cycle was not collected")
 
+    check_sizeof = support.check_sizeof
+
+    @support.cpython_only
+    def test_sizeof(self):
+        BLOCKLEN = 62
+        basesize = support.calcobjsize('2P4PlP')
+        blocksize = struct.calcsize('2P%dP' % BLOCKLEN)
+        self.assertEqual(object.__sizeof__(deque()), basesize)
+        check = self.check_sizeof
+        check(deque(), basesize + blocksize)
+        check(deque('a'), basesize + blocksize)
+        check(deque('a' * (BLOCKLEN // 2)), basesize + blocksize)
+        check(deque('a' * (BLOCKLEN // 2 + 1)), basesize + 2 * blocksize)
+        check(deque('a' * (42 * BLOCKLEN)), basesize + 43 * blocksize)
+
 class TestVariousIteratorArgs(unittest.TestCase):
 
     def test_constructor(self):
diff -r 3d0686d90f55 Lib/test/test_descr.py
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1,8 +1,10 @@
 import builtins
+import gc
 import sys
 import types
 import math
 import unittest
+import weakref
 
 from copy import deepcopy
 from test import support
@@ -1186,7 +1188,6 @@
         self.assertEqual(Counted.counter, 0)
 
         # Test lookup leaks [SF bug 572567]
-        import gc
         if hasattr(gc, 'get_objects'):
             class G(object):
                 def __eq__(self, other):
@@ -1457,6 +1458,22 @@
         self.assertEqual(x, spam.spamlist)
         self.assertEqual(a, a1)
         self.assertEqual(d, d1)
+        spam_cm = spam.spamlist.__dict__['classmeth']
+        x2, a2, d2 = spam_cm(spam.spamlist, *a, **d)
+        self.assertEqual(x2, spam.spamlist)
+        self.assertEqual(a2, a1)
+        self.assertEqual(d2, d1)
+        class SubSpam(spam.spamlist): pass
+        x2, a2, d2 = spam_cm(SubSpam, *a, **d)
+        self.assertEqual(x2, SubSpam)
+        self.assertEqual(a2, a1)
+        self.assertEqual(d2, d1)
+        with self.assertRaises(TypeError):
+            spam_cm()
+        with self.assertRaises(TypeError):
+            spam_cm(spam.spamlist())
+        with self.assertRaises(TypeError):
+            spam_cm(list)
 
     def test_staticmethods(self):
         # Testing static methods...
@@ -4380,7 +4397,6 @@
         self.assertRaises(AttributeError, getattr, C(), "attr")
         self.assertEqual(descr.counter, 4)
 
-        import gc
         class EvilGetattribute(object):
             # This used to segfault
             def __getattr__(self, name):
@@ -4393,6 +4409,9 @@
 
         self.assertRaises(AttributeError, getattr, EvilGetattribute(), "attr")
 
+    def test_type___getattribute__(self):
+        self.assertRaises(TypeError, type.__getattribute__, list, type)
+
     def test_abstractmethods(self):
         # type pretends not to have __abstractmethods__.
         self.assertRaises(AttributeError, getattr, type, "__abstractmethods__")
@@ -4427,7 +4446,30 @@
             pass
         Foo.__repr__ = Foo.__str__
         foo = Foo()
-        str(foo)
+        self.assertRaises(RuntimeError, str, foo)
+        self.assertRaises(RuntimeError, repr, foo)
+
+    def test_mixing_slot_wrappers(self):
+        class X(dict):
+            __setattr__ = dict.__setitem__
+        x = X()
+        x.y = 42
+        self.assertEqual(x["y"], 42)
+
+    def test_cycle_through_dict(self):
+        # See bug #1469629
+        class X(dict):
+            def __init__(self):
+                dict.__init__(self)
+                self.__dict__ = self
+        x = X()
+        x.attr = 42
+        wr = weakref.ref(x)
+        del x
+        support.gc_collect()
+        self.assertIsNone(wr())
+        for o in gc.get_objects():
+            self.assertIsNot(type(o), X)
 
 class DictProxyTests(unittest.TestCase):
     def setUp(self):
diff -r 3d0686d90f55 Lib/test/test_dict.py
--- a/Lib/test/test_dict.py
+++ b/Lib/test/test_dict.py
@@ -299,6 +299,26 @@
         x.fail = True
         self.assertRaises(Exc, d.setdefault, x, [])
 
+    def test_setdefault_atomic(self):
+        # Issue #13521: setdefault() calls __hash__ and __eq__ only once.
+        class Hashed(object):
+            def __init__(self):
+                self.hash_count = 0
+                self.eq_count = 0
+            def __hash__(self):
+                self.hash_count += 1
+                return 42
+            def __eq__(self, other):
+                self.eq_count += 1
+                return id(self) == id(other)
+        hashed1 = Hashed()
+        y = {hashed1: 5}
+        hashed2 = Hashed()
+        y.setdefault(hashed2, [])
+        self.assertEqual(hashed1.hash_count, 1)
+        self.assertEqual(hashed2.hash_count, 1)
+        self.assertEqual(hashed1.eq_count + hashed2.eq_count, 1)
+
     def test_popitem(self):
         # dict.popitem()
         for copymode in -1, +1:
diff -r 3d0686d90f55 Lib/test/test_exceptions.py
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -38,7 +38,7 @@
         try:
             try:
                 import marshal
-                marshal.loads('')
+                marshal.loads(b'')
             except EOFError:
                 pass
         finally:
diff -r 3d0686d90f55 Lib/test/test_file_eintr.py
--- /dev/null
+++ b/Lib/test/test_file_eintr.py
@@ -0,0 +1,236 @@
+# Written to test interrupted system calls interfering with our many buffered
+# IO implementations.  http://bugs.python.org/issue12268
+#
+# It was suggested that this code could be merged into test_io and the tests
+# made to work using the same method as the existing signal tests in test_io.
+# I was unable to get single process tests using alarm or setitimer that way
+# to reproduce the EINTR problems.  This process based test suite reproduces
+# the problems prior to the issue12268 patch reliably on Linux and OSX.
+#  - gregory.p.smith
+
+import os
+import select
+import signal
+import subprocess
+import sys
+from test.support import run_unittest
+import time
+import unittest
+
+# Test import all of the things we're about to try testing up front.
+from _io import FileIO
+
+
+@unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.')
+class TestFileIOSignalInterrupt(unittest.TestCase):
+    def setUp(self):
+        self._process = None
+
+    def tearDown(self):
+        if self._process and self._process.poll() is None:
+            try:
+                self._process.kill()
+            except OSError:
+                pass
+
+    def _generate_infile_setup_code(self):
+        """Returns the infile = ... line of code for the reader process.
+
+        subclasseses should override this to test different IO objects.
+        """
+        return ('import _io ;'
+                'infile = _io.FileIO(sys.stdin.fileno(), "rb")')
+
+    def fail_with_process_info(self, why, stdout=b'', stderr=b'',
+                               communicate=True):
+        """A common way to cleanup and fail with useful debug output.
+
+        Kills the process if it is still running, collects remaining output
+        and fails the test with an error message including the output.
+
+        Args:
+            why: Text to go after "Error from IO process" in the message.
+            stdout, stderr: standard output and error from the process so
+                far to include in the error message.
+            communicate: bool, when True we call communicate() on the process
+                after killing it to gather additional output.
+        """
+        if self._process.poll() is None:
+            time.sleep(0.1)  # give it time to finish printing the error.
+            try:
+                self._process.terminate()  # Ensure it dies.
+            except OSError:
+                pass
+        if communicate:
+            stdout_end, stderr_end = self._process.communicate()
+            stdout += stdout_end
+            stderr += stderr_end
+        self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' %
+                  (why, stdout.decode(), stderr.decode()))
+
+    def _test_reading(self, data_to_write, read_and_verify_code):
+        """Generic buffered read method test harness to validate EINTR behavior.
+
+        Also validates that Python signal handlers are run during the read.
+
+        Args:
+            data_to_write: String to write to the child process for reading
+                before sending it a signal, confirming the signal was handled,
+                writing a final newline and closing the infile pipe.
+            read_and_verify_code: Single "line" of code to read from a file
+                object named 'infile' and validate the result.  This will be
+                executed as part of a python subprocess fed data_to_write.
+        """
+        infile_setup_code = self._generate_infile_setup_code()
+        # Total pipe IO in this function is smaller than the minimum posix OS
+        # pipe buffer size of 512 bytes.  No writer should block.
+        assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.'
+
+        # Start a subprocess to call our read method while handling a signal.
+        self._process = subprocess.Popen(
+                [sys.executable, '-u', '-c',
+                 'import signal, sys ;'
+                 'signal.signal(signal.SIGINT, '
+                               'lambda s, f: sys.stderr.write("$\\n")) ;'
+                 + infile_setup_code + ' ;' +
+                 'sys.stderr.write("Worm Sign!\\n") ;'
+                 + read_and_verify_code + ' ;' +
+                 'infile.close()'
+                ],
+                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE)
+
+        # Wait for the signal handler to be installed.
+        worm_sign = self._process.stderr.read(len(b'Worm Sign!\n'))
+        if worm_sign != b'Worm Sign!\n':  # See also, Dune by Frank Herbert.
+            self.fail_with_process_info('while awaiting a sign',
+                                        stderr=worm_sign)
+        self._process.stdin.write(data_to_write)
+
+        signals_sent = 0
+        rlist = []
+        # We don't know when the read_and_verify_code in our child is actually
+        # executing within the read system call we want to interrupt.  This
+        # loop waits for a bit before sending the first signal to increase
+        # the likelihood of that.  Implementations without correct EINTR
+        # and signal handling usually fail this test.
+        while not rlist:
+            rlist, _, _ = select.select([self._process.stderr], (), (), 0.05)
+            self._process.send_signal(signal.SIGINT)
+            signals_sent += 1
+            if signals_sent > 200:
+                self._process.kill()
+                self.fail('reader process failed to handle our signals.')
+        # This assumes anything unexpected that writes to stderr will also
+        # write a newline.  That is true of the traceback printing code.
+        signal_line = self._process.stderr.readline()
+        if signal_line != b'$\n':
+            self.fail_with_process_info('while awaiting signal',
+                                        stderr=signal_line)
+
+        # We append a newline to our input so that a readline call can
+        # end on its own before the EOF is seen and so that we're testing
+        # the read call that was interrupted by a signal before the end of
+        # the data stream has been reached.
+        stdout, stderr = self._process.communicate(input=b'\n')
+        if self._process.returncode:
+            self.fail_with_process_info(
+                    'exited rc=%d' % self._process.returncode,
+                    stdout, stderr, communicate=False)
+        # PASS!
+
+    # String format for the read_and_verify_code used by read methods.
+    _READING_CODE_TEMPLATE = (
+            'got = infile.{read_method_name}() ;'
+            'expected = {expected!r} ;'
+            'assert got == expected, ('
+                    '"{read_method_name} returned wrong data.\\n"'
+                    '"got data %r\\nexpected %r" % (got, expected))'
+            )
+
+    def test_readline(self):
+        """readline() must handle signals and not lose data."""
+        self._test_reading(
+                data_to_write=b'hello, world!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='readline',
+                        expected=b'hello, world!\n'))
+
+    def test_readlines(self):
+        """readlines() must handle signals and not lose data."""
+        self._test_reading(
+                data_to_write=b'hello\nworld!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='readlines',
+                        expected=[b'hello\n', b'world!\n']))
+
+    def test_readall(self):
+        """readall() must handle signals and not lose data."""
+        self._test_reading(
+                data_to_write=b'hello\nworld!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='readall',
+                        expected=b'hello\nworld!\n'))
+        # read() is the same thing as readall().
+        self._test_reading(
+                data_to_write=b'hello\nworld!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='read',
+                        expected=b'hello\nworld!\n'))
+
+
+class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt):
+    def _generate_infile_setup_code(self):
+        """Returns the infile = ... line of code to make a BufferedReader."""
+        return ('infile = open(sys.stdin.fileno(), "rb") ;'
+                'import _io ;assert isinstance(infile, _io.BufferedReader)')
+
+    def test_readall(self):
+        """BufferedReader.read() must handle signals and not lose data."""
+        self._test_reading(
+                data_to_write=b'hello\nworld!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='read',
+                        expected=b'hello\nworld!\n'))
+
+
+class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt):
+    def _generate_infile_setup_code(self):
+        """Returns the infile = ... line of code to make a TextIOWrapper."""
+        return ('infile = open(sys.stdin.fileno(), "rt", newline=None) ;'
+                'import _io ;assert isinstance(infile, _io.TextIOWrapper)')
+
+    def test_readline(self):
+        """readline() must handle signals and not lose data."""
+        self._test_reading(
+                data_to_write=b'hello, world!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='readline',
+                        expected='hello, world!\n'))
+
+    def test_readlines(self):
+        """readlines() must handle signals and not lose data."""
+        self._test_reading(
+                data_to_write=b'hello\r\nworld!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='readlines',
+                        expected=['hello\n', 'world!\n']))
+
+    def test_readall(self):
+        """read() must handle signals and not lose data."""
+        self._test_reading(
+                data_to_write=b'hello\nworld!',
+                read_and_verify_code=self._READING_CODE_TEMPLATE.format(
+                        read_method_name='read',
+                        expected="hello\nworld!\n"))
+
+
+def test_main():
+    test_cases = [
+            tc for tc in globals().values()
+            if isinstance(tc, type) and issubclass(tc, unittest.TestCase)]
+    run_unittest(*test_cases)
+
+
+if __name__ == '__main__':
+    test_main()
diff -r 3d0686d90f55 Lib/test/test_filecmp.py
--- a/Lib/test/test_filecmp.py
+++ b/Lib/test/test_filecmp.py
@@ -98,7 +98,10 @@
 
     def test_dircmp(self):
         # Check attributes for comparison of two identical directories
-        d = filecmp.dircmp(self.dir, self.dir_same)
+        left_dir, right_dir = self.dir, self.dir_same
+        d = filecmp.dircmp(left_dir, right_dir)
+        self.assertEqual(d.left, left_dir)
+        self.assertEqual(d.right, right_dir)
         if self.caseinsensitive:
             self.assertEqual([d.left_list, d.right_list],[['file'], ['FiLe']])
         else:
@@ -109,7 +112,10 @@
         self.assertEqual(d.diff_files, [])
 
         # Check attributes for comparison of two different directories
-        d = filecmp.dircmp(self.dir, self.dir_diff)
+        left_dir, right_dir = self.dir, self.dir_diff
+        d = filecmp.dircmp(left_dir, right_dir)
+        self.assertEqual(d.left, left_dir)
+        self.assertEqual(d.right, right_dir)
         self.assertEqual(d.left_list, ['file'])
         self.assertTrue(d.right_list == ['file', 'file2'])
         self.assertEqual(d.common, ['file'])
diff -r 3d0686d90f55 Lib/test/test_fileio.py
--- a/Lib/test/test_fileio.py
+++ b/Lib/test/test_fileio.py
@@ -127,6 +127,14 @@
         else:
             self.fail("Should have raised IOError")
 
+    @unittest.skipIf(os.name == 'nt', "test only works on a POSIX-like system")
+    def testOpenDirFD(self):
+        fd = os.open('.', os.O_RDONLY)
+        with self.assertRaises(IOError) as cm:
+            _FileIO(fd, 'r')
+        os.close(fd)
+        self.assertEqual(cm.exception.errno, errno.EISDIR)
+
     #A set of functions testing that we get expected behaviour if someone has
     #manually closed the internal file descriptor.  First, a decorator:
     def ClosedFD(func):
@@ -403,6 +411,17 @@
             self.assertRaises(ValueError, _FileIO, "/some/invalid/name", "rt")
             self.assertEqual(w.warnings, [])
 
+    def testUnclosedFDOnException(self):
+        class MyException(Exception): pass
+        class MyFileIO(_FileIO):
+            def __setattr__(self, name, value):
+                if name == "name":
+                    raise MyException("blocked setting name")
+                return super(MyFileIO, self).__setattr__(name, value)
+        fd = os.open(__file__, os.O_RDONLY)
+        self.assertRaises(MyException, MyFileIO, fd)
+        os.close(fd)  # should not raise OSError(EBADF)
+
 
 def test_main():
     # Historically, these tests have been sloppy about removing TESTFN.
diff -r 3d0686d90f55 Lib/test/test_fractions.py
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -6,6 +6,7 @@
 import numbers
 import operator
 import fractions
+import sys
 import unittest
 from copy import copy, deepcopy
 from pickle import dumps, loads
@@ -76,6 +77,9 @@
     def __float__(self):
         assert False, "__float__ should not be invoked"
 
+class DummyFraction(fractions.Fraction):
+    """Dummy Fraction subclass for copy and deepcopy testing."""
+
 class GcdTest(unittest.TestCase):
 
     def testMisc(self):
@@ -286,9 +290,14 @@
         self.assertEqual(F(201, 200).limit_denominator(100), F(1))
         self.assertEqual(F(201, 200).limit_denominator(101), F(102, 101))
         self.assertEqual(F(0).limit_denominator(10000), F(0))
+        for i in (0, -1):
+            self.assertRaisesMessage(
+                ValueError, "max_denominator should be at least 1",
+                F(1).limit_denominator, i)
 
     def testConversions(self):
         self.assertTypedEquals(-1, math.trunc(F(-11, 10)))
+        self.assertTypedEquals(1, math.trunc(F(11, 10)))
         self.assertTypedEquals(-2, math.floor(F(-11, 10)))
         self.assertTypedEquals(-1, math.ceil(F(-11, 10)))
         self.assertTypedEquals(-1, math.ceil(F(-10, 10)))
@@ -329,6 +338,7 @@
         self.assertEqual(F(8, 27), F(2, 3) ** F(3))
         self.assertEqual(F(27, 8), F(2, 3) ** F(-3))
         self.assertTypedEquals(2.0, F(4) ** F(1, 2))
+        self.assertEqual(F(1, 1), +F(1, 1))
         z = pow(F(-1), F(1, 2))
         self.assertAlmostEqual(z.real, 0)
         self.assertEqual(z.imag, 1)
@@ -395,6 +405,10 @@
             TypeError,
             "unsupported operand type(s) for +: 'Fraction' and 'Decimal'",
             operator.add, F(3,11), Decimal('3.1415926'))
+        self.assertRaisesMessage(
+            TypeError,
+            "unsupported operand type(s) for +: 'Decimal' and 'Fraction'",
+            operator.add, Decimal('3.1415926'), F(3,11))
 
     def testComparisons(self):
         self.assertTrue(F(1, 2) < F(2, 3))
@@ -538,9 +552,12 @@
         self.assertEqual("7", str(F(7, 1)))
 
     def testHash(self):
+        hmod = sys.hash_info.modulus
+        hinf = sys.hash_info.inf
         self.assertEqual(hash(2.5), hash(F(5, 2)))
         self.assertEqual(hash(10**50), hash(F(10**50)))
         self.assertNotEqual(hash(float(10**23)), hash(F(10**23)))
+        self.assertEqual(hinf, hash(F(1, hmod)))
         # Check that __hash__ produces the same value as hash(), for
         # consistency with int and Decimal.  (See issue #10356.)
         self.assertEqual(hash(F(-1)), F(-1).__hash__())
@@ -574,9 +591,14 @@
 
     def test_copy_deepcopy_pickle(self):
         r = F(13, 7)
+        dr = DummyFraction(13, 7)
         self.assertEqual(r, loads(dumps(r)))
         self.assertEqual(id(r), id(copy(r)))
         self.assertEqual(id(r), id(deepcopy(r)))
+        self.assertNotEqual(id(dr), id(copy(dr)))
+        self.assertNotEqual(id(dr), id(deepcopy(dr)))
+        self.assertTypedEquals(dr, copy(dr))
+        self.assertTypedEquals(dr, deepcopy(dr))
 
     def test_slots(self):
         # Issue 4998
diff -r 3d0686d90f55 Lib/test/test_gdb.py
--- a/Lib/test/test_gdb.py
+++ b/Lib/test/test_gdb.py
@@ -32,6 +32,15 @@
 if gdbpy_version == b'':
     raise unittest.SkipTest("gdb not built with embedded python support")
 
+# Verify that "gdb" can load our custom hooks
+p = subprocess.Popen(["gdb", "--batch", cmd,
+                      "--args", sys.executable],
+                     stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+__, gdbpy_errors = p.communicate()
+if b"auto-loading has been declined" in gdbpy_errors:
+    msg = "gdb security settings prevent use of custom hooks: %s"
+    raise unittest.SkipTest(msg % gdbpy_errors)
+
 def gdb_has_frame_select():
     # Does this build of gdb have gdb.Frame.select ?
     cmd = "--eval-command=python print(dir(gdb.Frame))"
diff -r 3d0686d90f55 Lib/test/test_gzip.py
--- a/Lib/test/test_gzip.py
+++ b/Lib/test/test_gzip.py
@@ -331,6 +331,20 @@
             with gzip.GzipFile(fileobj=f, mode="w") as g:
                 pass
 
+    def test_bytes_filename(self):
+        str_filename = self.filename
+        try:
+            bytes_filename = str_filename.encode("ascii")
+        except UnicodeEncodeError:
+            self.skipTest("Temporary file name needs to be ASCII")
+        with gzip.GzipFile(bytes_filename, "wb") as f:
+            f.write(data1 * 50)
+        with gzip.GzipFile(bytes_filename, "rb") as f:
+            self.assertEqual(f.read(), data1 * 50)
+        # Sanity check that we are actually operating on the right file.
+        with gzip.GzipFile(str_filename, "rb") as f:
+            self.assertEqual(f.read(), data1 * 50)
+
     # Testing compress/decompress shortcut functions
 
     def test_compress(self):
diff -r 3d0686d90f55 Lib/test/test_hashlib.py
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -111,12 +111,8 @@
                             issubset(hashlib.algorithms_available))
 
     def test_unknown_hash(self):
-        try:
-            hashlib.new('spam spam spam spam spam')
-        except ValueError:
-            pass
-        else:
-            self.assertTrue(0 == "hashlib didn't reject bogus hash name")
+        self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
+        self.assertRaises(TypeError, hashlib.new, 1)
 
     def test_get_builtin_constructor(self):
         get_builtin_constructor = hashlib.__dict__[
@@ -135,6 +131,7 @@
                 sys.modules['_md5'] = _md5
             else:
                 del sys.modules['_md5']
+        self.assertRaises(TypeError, get_builtin_constructor, 3)
 
     def test_hexdigest(self):
         for name in self.supported_hash_names:
diff -r 3d0686d90f55 Lib/test/test_htmlparser.py
--- a/Lib/test/test_htmlparser.py
+++ b/Lib/test/test_htmlparser.py
@@ -409,6 +409,16 @@
             ('starttag', 'a', [('foo', None), ('=', None), ('bar', None)])
         ]
         self._run_check(html, expected)
+        #see issue #14538
+        html = ('<meta><meta / ><meta // ><meta / / >'
+                '<meta/><meta /><meta //><meta//>')
+        expected = [
+            ('starttag', 'meta', []), ('starttag', 'meta', []),
+            ('starttag', 'meta', []), ('starttag', 'meta', []),
+            ('startendtag', 'meta', []), ('startendtag', 'meta', []),
+            ('startendtag', 'meta', []), ('startendtag', 'meta', []),
+        ]
+        self._run_check(html, expected)
 
     def test_declaration_junk_chars(self):
         self._run_check("<!DOCTYPE foo $ >", [('decl', 'DOCTYPE foo $ ')])
diff -r 3d0686d90f55 Lib/test/test_http_cookies.py
--- a/Lib/test/test_http_cookies.py
+++ b/Lib/test/test_http_cookies.py
@@ -86,13 +86,13 @@
 
         # loading 'expires'
         C = cookies.SimpleCookie()
-        C.load('Customer="W"; expires=Wed, 01-Jan-2010 00:00:00 GMT')
+        C.load('Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT')
         self.assertEqual(C['Customer']['expires'],
-                         'Wed, 01-Jan-2010 00:00:00 GMT')
+                         'Wed, 01 Jan 2010 00:00:00 GMT')
         C = cookies.SimpleCookie()
-        C.load('Customer="W"; expires=Wed, 01-Jan-98 00:00:00 GMT')
+        C.load('Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT')
         self.assertEqual(C['Customer']['expires'],
-                         'Wed, 01-Jan-98 00:00:00 GMT')
+                         'Wed, 01 Jan 98 00:00:00 GMT')
 
         # 'max-age'
         C = cookies.SimpleCookie('Customer="WILE_E_COYOTE"')
diff -r 3d0686d90f55 Lib/test/test_httplib.py
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -99,6 +99,34 @@
                 conn.request('POST', '/', body, headers)
                 self.assertEqual(conn._buffer.count[header.lower()], 1)
 
+    def test_content_length_0(self):
+
+        class ContentLengthChecker(list):
+            def __init__(self):
+                list.__init__(self)
+                self.content_length = None
+            def append(self, item):
+                kv = item.split(b':', 1)
+                if len(kv) > 1 and kv[0].lower() == b'content-length':
+                    self.content_length = kv[1].strip()
+                list.append(self, item)
+
+        # POST with empty body
+        conn = client.HTTPConnection('example.com')
+        conn.sock = FakeSocket(None)
+        conn._buffer = ContentLengthChecker()
+        conn.request('POST', '/', '')
+        self.assertEqual(conn._buffer.content_length, b'0',
+                        'Header Content-Length not set')
+
+        # PUT request with empty body
+        conn = client.HTTPConnection('example.com')
+        conn.sock = FakeSocket(None)
+        conn._buffer = ContentLengthChecker()
+        conn.request('PUT', '/', '')
+        self.assertEqual(conn._buffer.content_length, b'0',
+                        'Header Content-Length not set')
+
     def test_putheader(self):
         conn = client.HTTPConnection('example.com')
         conn.sock = FakeSocket(None)
@@ -369,6 +397,15 @@
         resp.begin()
         self.assertRaises(client.LineTooLong, resp.read)
 
+    def test_early_eof(self):
+        # Test httpresponse with no \r\n termination,
+        body = "HTTP/1.1 200 Ok"
+        sock = FakeSocket(body)
+        resp = client.HTTPResponse(sock)
+        resp.begin()
+        self.assertEqual(resp.read(), b'')
+        self.assertTrue(resp.isclosed())
+
 class OfflineTest(TestCase):
     def test_responses(self):
         self.assertEqual(client.responses[client.NOT_FOUND], "Not Found")
diff -r 3d0686d90f55 Lib/test/test_httpservers.py
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -313,6 +313,8 @@
     class request_handler(NoLogRequestHandler, CGIHTTPRequestHandler):
         pass
 
+    linesep = os.linesep.encode('ascii')
+
     def setUp(self):
         BaseTestCase.setUp(self)
         self.cwd = os.getcwd()
@@ -366,48 +368,51 @@
         finally:
             BaseTestCase.tearDown(self)
 
-    def test_url_collapse_path_split(self):
+    def test_url_collapse_path(self):
+        # verify tail is the last portion and head is the rest on proper urls
         test_vectors = {
-            '': ('/', ''),
+            '': '//',
             '..': IndexError,
             '/.//..': IndexError,
-            '/': ('/', ''),
-            '//': ('/', ''),
-            '/\\': ('/', '\\'),
-            '/.//': ('/', ''),
-            'cgi-bin/file1.py': ('/cgi-bin', 'file1.py'),
-            '/cgi-bin/file1.py': ('/cgi-bin', 'file1.py'),
-            'a': ('/', 'a'),
-            '/a': ('/', 'a'),
-            '//a': ('/', 'a'),
-            './a': ('/', 'a'),
-            './C:/': ('/C:', ''),
-            '/a/b': ('/a', 'b'),
-            '/a/b/': ('/a/b', ''),
-            '/a/b/c/..': ('/a/b', ''),
-            '/a/b/c/../d': ('/a/b', 'd'),
-            '/a/b/c/../d/e/../f': ('/a/b/d', 'f'),
-            '/a/b/c/../d/e/../../f': ('/a/b', 'f'),
-            '/a/b/c/../d/e/.././././..//f': ('/a/b', 'f'),
+            '/': '//',
+            '//': '//',
+            '/\\': '//\\',
+            '/.//': '//',
+            'cgi-bin/file1.py': '/cgi-bin/file1.py',
+            '/cgi-bin/file1.py': '/cgi-bin/file1.py',
+            'a': '//a',
+            '/a': '//a',
+            '//a': '//a',
+            './a': '//a',
+            './C:/': '/C:/',
+            '/a/b': '/a/b',
+            '/a/b/': '/a/b/',
+            '/a/b/.': '/a/b/',
+            '/a/b/c/..': '/a/b/',
+            '/a/b/c/../d': '/a/b/d',
+            '/a/b/c/../d/e/../f': '/a/b/d/f',
+            '/a/b/c/../d/e/../../f': '/a/b/f',
+            '/a/b/c/../d/e/.././././..//f': '/a/b/f',
             '../a/b/c/../d/e/.././././..//f': IndexError,
-            '/a/b/c/../d/e/../../../f': ('/a', 'f'),
-            '/a/b/c/../d/e/../../../../f': ('/', 'f'),
+            '/a/b/c/../d/e/../../../f': '/a/f',
+            '/a/b/c/../d/e/../../../../f': '//f',
             '/a/b/c/../d/e/../../../../../f': IndexError,
-            '/a/b/c/../d/e/../../../../f/..': ('/', ''),
+            '/a/b/c/../d/e/../../../../f/..': '//',
+            '/a/b/c/../d/e/../../../../f/../.': '//',
         }
         for path, expected in test_vectors.items():
             if isinstance(expected, type) and issubclass(expected, Exception):
                 self.assertRaises(expected,
-                                  server._url_collapse_path_split, path)
+                                  server._url_collapse_path, path)
             else:
-                actual = server._url_collapse_path_split(path)
+                actual = server._url_collapse_path(path)
                 self.assertEqual(expected, actual,
                                  msg='path = %r\nGot:    %r\nWanted: %r' %
                                  (path, actual, expected))
 
     def test_headers_and_content(self):
         res = self.request('/cgi-bin/file1.py')
-        self.assertEqual((b'Hello World\n', 'text/html', 200),
+        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
             (res.read(), res.getheader('Content-type'), res.status))
 
     def test_post(self):
@@ -416,7 +421,7 @@
         headers = {'Content-type' : 'application/x-www-form-urlencoded'}
         res = self.request('/cgi-bin/file2.py', 'POST', params, headers)
 
-        self.assertEqual(res.read(), b'1, python, 123456\n')
+        self.assertEqual(res.read(), b'1, python, 123456' + self.linesep)
 
     def test_invaliduri(self):
         res = self.request('/cgi-bin/invalid')
@@ -427,20 +432,20 @@
         headers = {b'Authorization' : b'Basic ' +
                    base64.b64encode(b'username:pass')}
         res = self.request('/cgi-bin/file1.py', 'GET', headers=headers)
-        self.assertEqual((b'Hello World\n', 'text/html', 200),
+        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
                 (res.read(), res.getheader('Content-type'), res.status))
 
     def test_no_leading_slash(self):
         # http://bugs.python.org/issue2254
         res = self.request('cgi-bin/file1.py')
-        self.assertEqual((b'Hello World\n', 'text/html', 200),
+        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
              (res.read(), res.getheader('Content-type'), res.status))
 
     def test_os_environ_is_not_altered(self):
         signature = "Test CGI Server"
         os.environ['SERVER_SOFTWARE'] = signature
         res = self.request('/cgi-bin/file1.py')
-        self.assertEqual((b'Hello World\n', 'text/html', 200),
+        self.assertEqual((b'Hello World' + self.linesep, 'text/html', 200),
                 (res.read(), res.getheader('Content-type'), res.status))
         self.assertEqual(os.environ['SERVER_SOFTWARE'], signature)
 
diff -r 3d0686d90f55 Lib/test/test_imaplib.py
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -11,7 +11,7 @@
 import time
 import calendar
 
-from test.support import reap_threads, verbose, transient_internet
+from test.support import reap_threads, verbose, transient_internet, run_with_tz
 import unittest
 
 try:
@@ -36,6 +36,13 @@
             b'25 (INTERNALDATE "31-Dec-1999 12:30:00 -1130")')
         self.assertEqual(time.mktime(tt), t0)
 
+    @run_with_tz('MST+07MDT,M4.1.0,M10.5.0')
+    def test_Internaldate2tuple_issue10941(self):
+        self.assertNotEqual(imaplib.Internaldate2tuple(
+            b'25 (INTERNALDATE "02-Apr-2000 02:30:00 +0000")'),
+                            imaplib.Internaldate2tuple(
+            b'25 (INTERNALDATE "02-Apr-2000 03:30:00 +0000")'))
+
     def test_that_Time2Internaldate_returns_a_result(self):
         # We can check only that it successfully produces a result,
         # not the correctness of the result itself, since the result
@@ -226,8 +233,8 @@
         with transient_internet(self.host):
             for cap in self.server.capabilities:
                 self.assertIsInstance(cap, str)
-            self.assertTrue('LOGINDISABLED' in self.server.capabilities)
-            self.assertTrue('AUTH=ANONYMOUS' in self.server.capabilities)
+            self.assertIn('LOGINDISABLED', self.server.capabilities)
+            self.assertIn('AUTH=ANONYMOUS', self.server.capabilities)
             rs = self.server.login(self.username, self.password)
             self.assertEqual(rs[0], 'OK')
 
@@ -250,7 +257,7 @@
     def test_logincapa(self):
         for cap in self.server.capabilities:
             self.assertIsInstance(cap, str)
-        self.assertFalse('LOGINDISABLED' in self.server.capabilities)
+        self.assertNotIn('LOGINDISABLED', self.server.capabilities)
 
 
 @unittest.skipUnless(ssl, "SSL not available")
@@ -261,8 +268,8 @@
     def test_logincapa(self):
         for cap in self.server.capabilities:
             self.assertIsInstance(cap, str)
-        self.assertFalse('LOGINDISABLED' in self.server.capabilities)
-        self.assertTrue('AUTH=PLAIN' in self.server.capabilities)
+        self.assertNotIn('LOGINDISABLED', self.server.capabilities)
+        self.assertIn('AUTH=PLAIN', self.server.capabilities)
 
 
 def test_main():
diff -r 3d0686d90f55 Lib/test/test_import.py
--- a/Lib/test/test_import.py
+++ b/Lib/test/test_import.py
@@ -461,6 +461,13 @@
         drive = path[0]
         unc = "\\\\%s\\%s$"%(hn, drive)
         unc += path[2:]
+        try:
+            os.listdir(unc)
+        except OSError as e:
+            if e.errno in (errno.EPERM, errno.EACCES):
+                # See issue #15338
+                self.skipTest("cannot access administrative share %r" % (unc,))
+            raise
         sys.path.append(path)
         mod = __import__("test_trailing_slash")
         self.assertEqual(mod.testdata, 'test_trailing_slash')
diff -r 3d0686d90f55 Lib/test/test_io.py
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -634,6 +634,19 @@
         for obj in test:
             self.assertTrue(hasattr(obj, "__dict__"))
 
+    def test_fileio_closefd(self):
+        # Issue #4841
+        with self.open(__file__, 'rb') as f1, \
+             self.open(__file__, 'rb') as f2:
+            fileio = self.FileIO(f1.fileno(), closefd=False)
+            # .__init__() must not close f1
+            fileio.__init__(f2.fileno(), closefd=False)
+            f1.readline()
+            # .close() must not close f2
+            fileio.close()
+            f2.readline()
+
+
 class CIOTest(IOTest):
 
     def test_IOBase_finalize(self):
@@ -779,6 +792,20 @@
             buf.raw = x
 
 
+class SizeofTest:
+
+    @support.cpython_only
+    def test_sizeof(self):
+        bufsize1 = 4096
+        bufsize2 = 8192
+        rawio = self.MockRawIO()
+        bufio = self.tp(rawio, buffer_size=bufsize1)
+        size = sys.getsizeof(bufio) - bufsize1
+        rawio = self.MockRawIO()
+        bufio = self.tp(rawio, buffer_size=bufsize2)
+        self.assertEqual(sys.getsizeof(bufio), size + bufsize2)
+
+
 class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
     read_mode = "rb"
 
@@ -970,7 +997,7 @@
                              "failed for {}: {} != 0".format(n, rawio._extraneous_reads))
 
 
-class CBufferedReaderTest(BufferedReaderTest):
+class CBufferedReaderTest(BufferedReaderTest, SizeofTest):
     tp = io.BufferedReader
 
     def test_constructor(self):
@@ -1232,7 +1259,7 @@
             self.tp(self.MockRawIO(), 8, 12)
 
 
-class CBufferedWriterTest(BufferedWriterTest):
+class CBufferedWriterTest(BufferedWriterTest, SizeofTest):
     tp = io.BufferedWriter
 
     def test_constructor(self):
@@ -1623,7 +1650,7 @@
     # You can't construct a BufferedRandom over a non-seekable stream.
     test_unseekable = None
 
-class CBufferedRandomTest(BufferedRandomTest):
+class CBufferedRandomTest(BufferedRandomTest, SizeofTest):
     tp = io.BufferedRandom
 
     def test_constructor(self):
diff -r 3d0686d90f55 Lib/test/test_keywordonlyarg.py
--- a/Lib/test/test_keywordonlyarg.py
+++ b/Lib/test/test_keywordonlyarg.py
@@ -170,6 +170,12 @@
         # used to fail with a SystemError.
         lambda *, k1=unittest: None
 
+    def test_mangling(self):
+        class X:
+            def f(self, *, __a=42):
+                return __a
+        self.assertEqual(X().f(), 42)
+
 def test_main():
     run_unittest(KeywordOnlyArgTestCase)
 
diff -r 3d0686d90f55 Lib/test/test_list.py
--- a/Lib/test/test_list.py
+++ b/Lib/test/test_list.py
@@ -70,6 +70,14 @@
         check(1000000)
 
 
+    def test_no_comdat_folding(self):
+        # Issue 8847: In the PGO build, the MSVC linker's COMDAT folding
+        # optimization causes failures in code that relies on distinct
+        # function addresses.
+        class L(list): pass
+        with self.assertRaises(TypeError):
+            (3,) + L([1,2])
+
 def test_main(verbose=None):
     support.run_unittest(ListTest)
 
diff -r 3d0686d90f55 Lib/test/test_logging.py
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Copyright 2001-2011 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved.
 #
 # Permission to use, copy, modify, and distribute this software and its
 # documentation for any purpose and without fee is hereby granted,
@@ -18,7 +18,7 @@
 
 """Test harness for the logging module. Run all tests.
 
-Copyright (C) 2001-2011 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved.
 """
 
 import logging
@@ -33,6 +33,7 @@
 import json
 import os
 import queue
+import random
 import re
 import select
 import socket
@@ -43,6 +44,7 @@
 from test.support import captured_stdout, run_with_locale, run_unittest
 from test.support import TestHandler, Matcher
 import textwrap
+import time
 import unittest
 import warnings
 import weakref
@@ -2301,7 +2303,6 @@
             # Failures occur on some systems for MIDNIGHT and W0.
             # Print detailed calculation for MIDNIGHT so we can try to see
             # what's going on
-            import time
             if when == 'MIDNIGHT':
                 try:
                     if rh.utc:
@@ -2328,6 +2329,44 @@
         rh.close()
     setattr(TimedRotatingFileHandlerTest, "test_compute_rollover_%s" % when, test_compute_rollover)
 
+class HandlerTest(BaseTest):
+
+    @unittest.skipIf(os.name == 'nt', 'WatchedFileHandler not appropriate for Windows.')
+    @unittest.skipUnless(threading, 'Threading required for this test.')
+    def test_race(self):
+        # Issue #14632 refers.
+        def remove_loop(fname, tries):
+            for _ in range(tries):
+                try:
+                    os.unlink(fname)
+                except OSError:
+                    pass
+                time.sleep(0.004 * random.randint(0, 4))
+
+        del_count = 500
+        log_count = 500
+
+        for delay in (False, True):
+            fd, fn = tempfile.mkstemp('.log', 'test_logging-3-')
+            os.close(fd)
+            remover = threading.Thread(target=remove_loop, args=(fn, del_count))
+            remover.daemon = True
+            remover.start()
+            h = logging.handlers.WatchedFileHandler(fn, delay=delay)
+            f = logging.Formatter('%(asctime)s: %(levelname)s: %(message)s')
+            h.setFormatter(f)
+            try:
+                for _ in range(log_count):
+                    time.sleep(0.005)
+                    r = logging.makeLogRecord({'msg': 'testing' })
+                    h.handle(r)
+            finally:
+                remover.join()
+                h.close()
+                if os.path.exists(fn):
+                    os.unlink(fn)
+
+
 # Set the locale to the platform-dependent default.  I have no idea
 # why the test does this, but in any case we save the current locale
 # first and restore it at the end.
@@ -2341,7 +2380,7 @@
                  LogRecordFactoryTest, ChildLoggerTest, QueueHandlerTest,
                  RotatingFileHandlerTest,
                  LastResortTest,
-                 TimedRotatingFileHandlerTest
+                 TimedRotatingFileHandlerTest, HandlerTest,
                 )
 
 if __name__ == "__main__":
diff -r 3d0686d90f55 Lib/test/test_long.py
--- a/Lib/test/test_long.py
+++ b/Lib/test/test_long.py
@@ -1148,6 +1148,20 @@
         self.assertRaises(TypeError, myint.from_bytes, 0, 'big')
         self.assertRaises(TypeError, int.from_bytes, 0, 'big', True)
 
+    def test_access_to_nonexistent_digit_0(self):
+        # http://bugs.python.org/issue14630: A bug in _PyLong_Copy meant that
+        # ob_digit[0] was being incorrectly accessed for instances of a
+        # subclass of int, with value 0.
+        class Integer(int):
+            def __new__(cls, value=0):
+                self = int.__new__(cls, value)
+                self.foo = 'foo'
+                return self
+
+        integers = [Integer(0) for i in range(1000)]
+        for n in map(int, integers):
+            self.assertEqual(n, 0)
+
 
 def test_main():
     support.run_unittest(LongTest)
diff -r 3d0686d90f55 Lib/test/test_mailbox.py
--- a/Lib/test/test_mailbox.py
+++ b/Lib/test/test_mailbox.py
@@ -7,6 +7,7 @@
 import email.message
 import re
 import io
+import shutil
 import tempfile
 from test import support
 import unittest
@@ -19,7 +20,7 @@
     pass
 
 
-class TestBase(unittest.TestCase):
+class TestBase:
 
     def _check_sample(self, msg):
         # Inspect a mailbox.Message representation of the sample message
@@ -38,12 +39,7 @@
     def _delete_recursively(self, target):
         # Delete a file or delete a directory recursively
         if os.path.isdir(target):
-            for path, dirs, files in os.walk(target, topdown=False):
-                for name in files:
-                    os.remove(os.path.join(path, name))
-                for name in dirs:
-                    os.rmdir(os.path.join(path, name))
-            os.rmdir(target)
+            shutil.rmtree(target)
         elif os.path.exists(target):
             os.remove(target)
 
@@ -115,10 +111,10 @@
         self.assertMailboxEmpty()
 
     def test_add_that_raises_leaves_mailbox_empty(self):
-        # XXX This test will start failing when Message learns to handle
-        # non-ASCII string headers, and a different internal failure will
-        # need to be found or manufactured.
-        with self.assertRaises(ValueError):
+        def raiser(*args, **kw):
+            raise Exception("a fake error")
+        support.patch(self, email.generator.BytesGenerator, 'flatten', raiser)
+        with self.assertRaises(Exception):
             self._box.add(email.message_from_string("From: Alphöso"))
         self.assertEqual(len(self._box), 0)
         self._box.close()
@@ -152,20 +148,16 @@
             f.write(_bytes_sample_message)
             f.seek(0)
             key = self._box.add(f)
-        # See issue 11062
-        if not isinstance(self._box, mailbox.Babyl):
-            self.assertEqual(self._box.get_bytes(key).split(b'\n'),
-                _bytes_sample_message.split(b'\n'))
+        self.assertEqual(self._box.get_bytes(key).split(b'\n'),
+            _bytes_sample_message.split(b'\n'))
 
     def test_add_binary_nonascii_file(self):
         with tempfile.TemporaryFile('wb+') as f:
             f.write(self._non_latin_bin_msg)
             f.seek(0)
             key = self._box.add(f)
-        # See issue 11062
-        if not isinstance(self._box, mailbox.Babyl):
-            self.assertEqual(self._box.get_bytes(key).split(b'\n'),
-                self._non_latin_bin_msg.split(b'\n'))
+        self.assertEqual(self._box.get_bytes(key).split(b'\n'),
+            self._non_latin_bin_msg.split(b'\n'))
 
     def test_add_text_file_warns(self):
         with tempfile.TemporaryFile('w+') as f:
@@ -173,10 +165,8 @@
             f.seek(0)
             with self.assertWarns(DeprecationWarning):
                 key = self._box.add(f)
-        # See issue 11062
-        if not isinstance(self._box, mailbox.Babyl):
-            self.assertEqual(self._box.get_bytes(key).split(b'\n'),
-                _bytes_sample_message.split(b'\n'))
+        self.assertEqual(self._box.get_bytes(key).split(b'\n'),
+            _bytes_sample_message.split(b'\n'))
 
     def test_add_StringIO_warns(self):
         with self.assertWarns(DeprecationWarning):
@@ -504,6 +494,17 @@
         # Write changes to disk
         self._test_flush_or_close(self._box.flush, True)
 
+    def test_popitem_and_flush_twice(self):
+        # See #15036.
+        self._box.add(self._template % 0)
+        self._box.add(self._template % 1)
+        self._box.flush()
+
+        self._box.popitem()
+        self._box.flush()
+        self._box.popitem()
+        self._box.flush()
+
     def test_lock_unlock(self):
         # Lock and unlock the mailbox
         self.assertFalse(os.path.exists(self._get_lock_path()))
@@ -549,7 +550,7 @@
         return self._path + '.lock'
 
 
-class TestMailboxSuperclass(TestBase):
+class TestMailboxSuperclass(TestBase, unittest.TestCase):
 
     def test_notimplemented(self):
         # Test that all Mailbox methods raise NotImplementedException.
@@ -585,7 +586,7 @@
         self.assertRaises(NotImplementedError, lambda: box.close())
 
 
-class TestMaildir(TestMailbox):
+class TestMaildir(TestMailbox, unittest.TestCase):
 
     _factory = lambda self, path, factory=None: mailbox.Maildir(path, factory)
 
@@ -935,7 +936,49 @@
         self._box._refresh()
         self.assertTrue(refreshed())
 
-class _TestMboxMMDF(TestMailbox):
+
+class _TestSingleFile(TestMailbox):
+    '''Common tests for single-file mailboxes'''
+
+    def test_add_doesnt_rewrite(self):
+        # When only adding messages, flush() should not rewrite the
+        # mailbox file. See issue #9559.
+
+        # Inode number changes if the contents are written to another
+        # file which is then renamed over the original file. So we
+        # must check that the inode number doesn't change.
+        inode_before = os.stat(self._path).st_ino
+
+        self._box.add(self._template % 0)
+        self._box.flush()
+
+        inode_after = os.stat(self._path).st_ino
+        self.assertEqual(inode_before, inode_after)
+
+        # Make sure the message was really added
+        self._box.close()
+        self._box = self._factory(self._path)
+        self.assertEqual(len(self._box), 1)
+
+    def test_permissions_after_flush(self):
+        # See issue #5346
+
+        # Make the mailbox world writable. It's unlikely that the new
+        # mailbox file would have these permissions after flush(),
+        # because umask usually prevents it.
+        mode = os.stat(self._path).st_mode | 0o666
+        os.chmod(self._path, mode)
+
+        self._box.add(self._template % 0)
+        i = self._box.add(self._template % 1)
+        # Need to remove one message to make flush() create a new file
+        self._box.remove(i)
+        self._box.flush()
+
+        self.assertEqual(os.stat(self._path).st_mode, mode)
+
+
+class _TestMboxMMDF(_TestSingleFile):
 
     def tearDown(self):
         super().tearDown()
@@ -1047,7 +1090,7 @@
         self._box.close()
 
 
-class TestMbox(_TestMboxMMDF):
+class TestMbox(_TestMboxMMDF, unittest.TestCase):
 
     _factory = lambda self, path, factory=None: mailbox.mbox(path, factory)
 
@@ -1070,12 +1113,12 @@
             perms = st.st_mode
             self.assertFalse((perms & 0o111)) # Execute bits should all be off.
 
-class TestMMDF(_TestMboxMMDF):
+class TestMMDF(_TestMboxMMDF, unittest.TestCase):
 
     _factory = lambda self, path, factory=None: mailbox.MMDF(path, factory)
 
 
-class TestMH(TestMailbox):
+class TestMH(TestMailbox, unittest.TestCase):
 
     _factory = lambda self, path, factory=None: mailbox.MH(path, factory)
 
@@ -1210,7 +1253,7 @@
         return os.path.join(self._path, '.mh_sequences.lock')
 
 
-class TestBabyl(TestMailbox):
+class TestBabyl(_TestSingleFile, unittest.TestCase):
 
     _factory = lambda self, path, factory=None: mailbox.Babyl(path, factory)
 
@@ -1275,7 +1318,7 @@
             self.assertTrue(box.files[i].closed)
 
 
-class TestMessage(TestBase):
+class TestMessage(TestBase, unittest.TestCase):
 
     _factory = mailbox.Message      # Overridden by subclasses to reuse tests
 
@@ -1355,7 +1398,7 @@
         pass
 
 
-class TestMaildirMessage(TestMessage):
+class TestMaildirMessage(TestMessage, unittest.TestCase):
 
     _factory = mailbox.MaildirMessage
 
@@ -1429,7 +1472,7 @@
         self._check_sample(msg)
 
 
-class _TestMboxMMDFMessage(TestMessage):
+class _TestMboxMMDFMessage:
 
     _factory = mailbox._mboxMMDFMessage
 
@@ -1476,12 +1519,12 @@
                               r"\d{2} \d{4}", msg.get_from()) is not None)
 
 
-class TestMboxMessage(_TestMboxMMDFMessage):
+class TestMboxMessage(_TestMboxMMDFMessage, TestMessage):
 
     _factory = mailbox.mboxMessage
 
 
-class TestMHMessage(TestMessage):
+class TestMHMessage(TestMessage, unittest.TestCase):
 
     _factory = mailbox.MHMessage
 
@@ -1512,7 +1555,7 @@
         self.assertEqual(msg.get_sequences(), ['foobar', 'replied'])
 
 
-class TestBabylMessage(TestMessage):
+class TestBabylMessage(TestMessage, unittest.TestCase):
 
     _factory = mailbox.BabylMessage
 
@@ -1567,12 +1610,12 @@
             self.assertEqual(visible[header], msg[header])
 
 
-class TestMMDFMessage(_TestMboxMMDFMessage):
+class TestMMDFMessage(_TestMboxMMDFMessage, TestMessage):
 
     _factory = mailbox.MMDFMessage
 
 
-class TestMessageConversion(TestBase):
+class TestMessageConversion(TestBase, unittest.TestCase):
 
     def test_plain_to_x(self):
         # Convert Message to all formats
@@ -1913,7 +1956,7 @@
         self.assertTrue(proxy.closed)
 
 
-class TestProxyFile(TestProxyFileBase):
+class TestProxyFile(TestProxyFileBase, unittest.TestCase):
 
     def setUp(self):
         self._path = support.TESTFN
@@ -1962,7 +2005,7 @@
         self._test_close(mailbox._ProxyFile(self._file))
 
 
-class TestPartialFile(TestProxyFileBase):
+class TestPartialFile(TestProxyFileBase, unittest.TestCase):
 
     def setUp(self):
         self._path = support.TESTFN
@@ -2029,6 +2072,10 @@
     def setUp(self):
         # create a new maildir mailbox to work with:
         self._dir = support.TESTFN
+        if os.path.isdir(self._dir):
+            shutil.rmtree(self._dir)
+        elif os.path.isfile(self._dir):
+            os.unlink(self._dir)
         os.mkdir(self._dir)
         os.mkdir(os.path.join(self._dir, "cur"))
         os.mkdir(os.path.join(self._dir, "tmp"))
diff -r 3d0686d90f55 Lib/test/test_marshal.py
--- a/Lib/test/test_marshal.py
+++ b/Lib/test/test_marshal.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python3
 
 from test import support
+import array
 import marshal
 import sys
 import unittest
@@ -137,6 +138,27 @@
         for constructor in (set, frozenset):
             self.helper(constructor(self.d.keys()))
 
+
+class BufferTestCase(unittest.TestCase, HelperMixin):
+
+    def test_bytearray(self):
+        b = bytearray(b"abc")
+        self.helper(b)
+        new = marshal.loads(marshal.dumps(b))
+        self.assertEqual(type(new), bytes)
+
+    def test_memoryview(self):
+        b = memoryview(b"abc")
+        self.helper(b)
+        new = marshal.loads(marshal.dumps(b))
+        self.assertEqual(type(new), bytes)
+
+    def test_array(self):
+        a = array.array('B', b"abc")
+        new = marshal.loads(marshal.dumps(a))
+        self.assertEqual(new, b"abc")
+
+
 class BugsTestCase(unittest.TestCase):
     def test_bug_5888452(self):
         # Simple-minded check for SF 588452: Debug build crashes
@@ -162,7 +184,7 @@
                 pass
 
     def test_loads_recursion(self):
-        s = 'c' + ('X' * 4*4) + '{' * 2**20
+        s = b'c' + (b'X' * 4*4) + b'{' * 2**20
         self.assertRaises(ValueError, marshal.loads, s)
 
     def test_recursion_limit(self):
@@ -235,6 +257,11 @@
             finally:
                 support.unlink(support.TESTFN)
 
+    def test_loads_reject_unicode_strings(self):
+        # Issue #14177: marshal.loads() should not accept unicode strings
+        unicode_string = 'T'
+        self.assertRaises(TypeError, marshal.loads, unicode_string)
+
 
 def test_main():
     support.run_unittest(IntTestCase,
@@ -243,6 +270,7 @@
                               CodeTestCase,
                               ContainerTestCase,
                               ExceptionTestCase,
+                              BufferTestCase,
                               BugsTestCase)
 
 if __name__ == "__main__":
diff -r 3d0686d90f55 Lib/test/test_memoryio.py
--- a/Lib/test/test_memoryio.py
+++ b/Lib/test/test_memoryio.py
@@ -654,6 +654,17 @@
         memio.close()
         self.assertRaises(ValueError, memio.__setstate__, (b"closed", 0, None))
 
+    check_sizeof = support.check_sizeof
+
+    @support.cpython_only
+    def test_sizeof(self):
+        basesize = support.calcobjsize('P2PP2PP')
+        check = self.check_sizeof
+        self.assertEqual(object.__sizeof__(io.BytesIO()), basesize)
+        check(io.BytesIO(), basesize )
+        check(io.BytesIO(b'a'), basesize + 1 + 1 )
+        check(io.BytesIO(b'a' * 1000), basesize + 1000 + 1 )
+
 
 class CStringIOTest(PyStringIOTest):
     ioclass = io.StringIO
diff -r 3d0686d90f55 Lib/test/test_minidom.py
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -350,13 +350,31 @@
     def testGetAttrList(self):
         pass
 
-    def testGetAttrValues(self): pass
+    def testGetAttrValues(self):
+        pass
 
-    def testGetAttrLength(self): pass
+    def testGetAttrLength(self):
+        pass
 
-    def testGetAttribute(self): pass
+    def testGetAttribute(self):
+        dom = Document()
+        child = dom.appendChild(
+            dom.createElementNS("http://www.python.org", "python:abc"))
+        self.assertEqual(child.getAttribute('missing'), '')
 
-    def testGetAttributeNS(self): pass
+    def testGetAttributeNS(self):
+        dom = Document()
+        child = dom.appendChild(
+                dom.createElementNS("http://www.python.org", "python:abc"))
+        child.setAttributeNS("http://www.w3.org", "xmlns:python",
+                                                "http://www.python.org")
+        self.assertEqual(child.getAttributeNS("http://www.w3.org", "python"),
+            'http://www.python.org')
+        self.assertEqual(child.getAttributeNS("http://www.w3.org", "other"),
+            '')
+        child2 = child.appendChild(dom.createElement('abc'))
+        self.assertEqual(child2.getAttributeNS("http://www.python.org", "missing"),
+                         '')
 
     def testGetAttributeNode(self): pass
 
diff -r 3d0686d90f55 Lib/test/test_multiprocessing.py
--- a/Lib/test/test_multiprocessing.py
+++ b/Lib/test/test_multiprocessing.py
@@ -18,6 +18,7 @@
 import random
 import logging
 import test.support
+import test.script_helper
 
 
 # Skip tests if _multiprocessing wasn't built.
@@ -390,6 +391,36 @@
         1/0 # MARKER
 
 
+    @classmethod
+    def _test_sys_exit(cls, reason, testfn):
+        sys.stderr = open(testfn, 'w')
+        sys.exit(reason)
+
+    def test_sys_exit(self):
+        # See Issue 13854
+        if self.TYPE == 'threads':
+            return
+
+        testfn = test.support.TESTFN
+        self.addCleanup(test.support.unlink, testfn)
+
+        for reason, code in (([1, 2, 3], 1), ('ignore this', 0)):
+            p = self.Process(target=self._test_sys_exit, args=(reason, testfn))
+            p.daemon = True
+            p.start()
+            p.join(5)
+            self.assertEqual(p.exitcode, code)
+
+            with open(testfn, 'r') as f:
+                self.assertEqual(f.read().rstrip(), str(reason))
+
+        for reason in (True, False, 8):
+            p = self.Process(target=sys.exit, args=(reason,))
+            p.daemon = True
+            p.start()
+            p.join(5)
+            self.assertEqual(p.exitcode, reason)
+
 #
 #
 #
@@ -1178,6 +1209,18 @@
         join()
         self.assertLess(join.elapsed, 0.5)
 
+    def test_empty_iterable(self):
+        # See Issue 12157
+        p = self.Pool(1)
+
+        self.assertEqual(p.map(sqr, []), [])
+        self.assertEqual(list(p.imap(sqr, [])), [])
+        self.assertEqual(list(p.imap_unordered(sqr, [])), [])
+        self.assertEqual(p.map_async(sqr, []).get(), [])
+
+        p.close()
+        p.join()
+
 def raising():
     raise KeyError("key")
 
@@ -1732,6 +1775,23 @@
             self.assertEqual(conn.recv(), 'hello')
             p.join()
             l.close()
+
+    def test_issue14725(self):
+        l = self.connection.Listener()
+        p = self.Process(target=self._test, args=(l.address,))
+        p.daemon = True
+        p.start()
+        time.sleep(1)
+        # On Windows the client process should by now have connected,
+        # written data and closed the pipe handle by now.  This causes
+        # ConnectNamdedPipe() to fail with ERROR_NO_DATA.  See Issue
+        # 14725.
+        conn = l.accept()
+        self.assertEqual(conn.recv(), 'hello')
+        conn.close()
+        p.join()
+        l.close()
+
 #
 # Test of sending connection and socket objects between processes
 #
@@ -2159,7 +2219,7 @@
         'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
         'Condition', 'Event', 'Value', 'Array', 'RawValue',
         'RawArray', 'current_process', 'active_children', 'Pipe',
-        'connection', 'JoinableQueue'
+        'connection', 'JoinableQueue', 'Pool'
         )))
 
 testcases_processes = create_test_cases(ProcessesMixin, type='processes')
@@ -2173,7 +2233,7 @@
     locals().update(get_attributes(manager, (
         'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
        'Condition', 'Event', 'Value', 'Array', 'list', 'dict',
-        'Namespace', 'JoinableQueue'
+        'Namespace', 'JoinableQueue', 'Pool'
         )))
 
 testcases_manager = create_test_cases(ManagerMixin, type='manager')
@@ -2187,7 +2247,7 @@
         'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore',
         'Condition', 'Event', 'Value', 'Array', 'current_process',
         'active_children', 'Pipe', 'connection', 'dict', 'list',
-        'Namespace', 'JoinableQueue'
+        'Namespace', 'JoinableQueue', 'Pool'
         )))
 
 testcases_threads = create_test_cases(ThreadsMixin, type='threads')
@@ -2319,8 +2379,80 @@
         flike.flush()
         assert sio.getvalue() == 'foo'
 
+
+#
+# Issue 14151: Test invalid family on invalid environment
+#
+
+class TestInvalidFamily(unittest.TestCase):
+
+    @unittest.skipIf(WIN32, "skipped on Windows")
+    def test_invalid_family(self):
+        with self.assertRaises(ValueError):
+            multiprocessing.connection.Listener(r'\\.\test')
+
+    @unittest.skipUnless(WIN32, "skipped on non-Windows platforms")
+    def test_invalid_family_win32(self):
+        with self.assertRaises(ValueError):
+            multiprocessing.connection.Listener('/var/test.pipe')
+
+#
+# Test interaction with socket timeouts - see Issue #6056
+#
+
+class TestTimeouts(unittest.TestCase):
+    @classmethod
+    def _test_timeout(cls, child, address):
+        time.sleep(1)
+        child.send(123)
+        child.close()
+        conn = multiprocessing.connection.Client(address)
+        conn.send(456)
+        conn.close()
+
+    def test_timeout(self):
+        old_timeout = socket.getdefaulttimeout()
+        try:
+            socket.setdefaulttimeout(0.1)
+            parent, child = multiprocessing.Pipe(duplex=True)
+            l = multiprocessing.connection.Listener(family='AF_INET')
+            p = multiprocessing.Process(target=self._test_timeout,
+                                        args=(child, l.address))
+            p.start()
+            child.close()
+            self.assertEqual(parent.recv(), 123)
+            parent.close()
+            conn = l.accept()
+            self.assertEqual(conn.recv(), 456)
+            conn.close()
+            l.close()
+            p.join(10)
+        finally:
+            socket.setdefaulttimeout(old_timeout)
+
+#
+# Test what happens with no "if __name__ == '__main__'"
+#
+
+class TestNoForkBomb(unittest.TestCase):
+    def test_noforkbomb(self):
+        name = os.path.join(os.path.dirname(__file__), 'mp_fork_bomb.py')
+        if WIN32:
+            rc, out, err = test.script_helper.assert_python_failure(name)
+            self.assertEqual('', out.decode('ascii'))
+            self.assertIn('RuntimeError', err.decode('ascii'))
+        else:
+            rc, out, err = test.script_helper.assert_python_ok(name)
+            self.assertEqual('123', out.decode('ascii').rstrip())
+            self.assertEqual('', err.decode('ascii'))
+
+#
+#
+#
+
 testcases_other = [OtherTest, TestInvalidHandle, TestInitializers,
-                   TestStdinBadfiledescriptor]
+                   TestStdinBadfiledescriptor, TestInvalidFamily,
+                   TestTimeouts, TestNoForkBomb]
 
 #
 #
diff -r 3d0686d90f55 Lib/test/test_nntplib.py
--- a/Lib/test/test_nntplib.py
+++ b/Lib/test/test_nntplib.py
@@ -176,7 +176,13 @@
         resp, article = self.server.article(art_num)
         self.assertTrue(resp.startswith("220 "), resp)
         self.check_article_resp(resp, article, art_num)
-        self.assertEqual(article.lines, head.lines + [b''] + body.lines)
+        # Tolerate running the tests from behind a NNTP virus checker
+        blacklist = lambda line: line.startswith(b'X-Antivirus')
+        filtered_head_lines = [line for line in head.lines
+                               if not blacklist(line)]
+        filtered_lines = [line for line in article.lines
+                          if not blacklist(line)]
+        self.assertEqual(filtered_lines, filtered_head_lines + [b''] + body.lines)
 
     def test_capabilities(self):
         # The server under test implements NNTP version 2 and has a
diff -r 3d0686d90f55 Lib/test/test_os.py
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -15,6 +15,7 @@
 import contextlib
 import mmap
 import uuid
+import stat
 from test.script_helper import assert_python_ok
 
 # Detect whether we're on a Linux system that uses the (now outdated
@@ -574,12 +575,42 @@
         path = os.path.join(support.TESTFN, 'dir1')
         mode = 0o777
         old_mask = os.umask(0o022)
-        os.makedirs(path, mode)
-        self.assertRaises(OSError, os.makedirs, path, mode)
-        self.assertRaises(OSError, os.makedirs, path, mode, exist_ok=False)
-        self.assertRaises(OSError, os.makedirs, path, 0o776, exist_ok=True)
-        os.makedirs(path, mode=mode, exist_ok=True)
-        os.umask(old_mask)
+        try:
+            os.makedirs(path, mode)
+            self.assertRaises(OSError, os.makedirs, path, mode)
+            self.assertRaises(OSError, os.makedirs, path, mode, exist_ok=False)
+            self.assertRaises(OSError, os.makedirs, path, 0o776, exist_ok=True)
+            os.makedirs(path, mode=mode, exist_ok=True)
+        finally:
+            os.umask(old_mask)
+
+    def test_exist_ok_s_isgid_directory(self):
+        path = os.path.join(support.TESTFN, 'dir1')
+        S_ISGID = stat.S_ISGID
+        mode = 0o777
+        old_mask = os.umask(0o022)
+        try:
+            existing_testfn_mode = stat.S_IMODE(
+                    os.lstat(support.TESTFN).st_mode)
+            try:
+                os.chmod(support.TESTFN, existing_testfn_mode | S_ISGID)
+            except OSError:
+                raise unittest.SkipTest('Cannot set S_ISGID for dir.')
+            if (os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID):
+                raise unittest.SkipTest('No support for S_ISGID dir mode.')
+            # The os should apply S_ISGID from the parent dir for us, but
+            # this test need not depend on that behavior.  Be explicit.
+            os.makedirs(path, mode | S_ISGID)
+            # http://bugs.python.org/issue14992
+            # Should not fail when the bit is already set.
+            os.makedirs(path, mode, exist_ok=True)
+            # remove the bit.
+            os.chmod(path, stat.S_IMODE(os.lstat(path).st_mode) & ~S_ISGID)
+            with self.assertRaises(OSError):
+                # Should fail when the bit is not already set when demanded.
+                os.makedirs(path, mode | S_ISGID, exist_ok=True)
+        finally:
+            os.umask(old_mask)
 
     def test_exist_ok_existing_regular_file(self):
         base = support.TESTFN
diff -r 3d0686d90f55 Lib/test/test_parser.py
--- a/Lib/test/test_parser.py
+++ b/Lib/test/test_parser.py
@@ -2,6 +2,7 @@
 import unittest
 import sys
 import operator
+import struct
 from test import support
 
 #
@@ -57,6 +58,16 @@
                          "    if (yield):\n"
                          "        yield x\n")
 
+    def test_nonlocal_statement(self):
+        self.check_suite("def f():\n"
+                         "    x = 0\n"
+                         "    def g():\n"
+                         "        nonlocal x\n")
+        self.check_suite("def f():\n"
+                         "    x = y = 0\n"
+                         "    def g():\n"
+                         "        nonlocal x, y\n")
+
     def test_expressions(self):
         self.check_expr("foo(1)")
         self.check_expr("[1, 2, 3]")
@@ -96,6 +107,8 @@
         self.check_expr("lambda x, *y, **z: 0")
         self.check_expr("(x for x in range(10))")
         self.check_expr("foo(x for x in range(10))")
+        self.check_expr("...")
+        self.check_expr("a[...]")
 
     def test_simple_expression(self):
         # expr_stmt
@@ -146,6 +159,27 @@
         self.check_suite("@funcattrs()\n"
                          "def f(): pass")
 
+        # keyword-only arguments
+        self.check_suite("def f(*, a): pass")
+        self.check_suite("def f(*, a = 5): pass")
+        self.check_suite("def f(*, a = 5, b): pass")
+        self.check_suite("def f(*, a, b = 5): pass")
+        self.check_suite("def f(*, a, b = 5, **kwds): pass")
+        self.check_suite("def f(*args, a): pass")
+        self.check_suite("def f(*args, a = 5): pass")
+        self.check_suite("def f(*args, a = 5, b): pass")
+        self.check_suite("def f(*args, a, b = 5): pass")
+        self.check_suite("def f(*args, a, b = 5, **kwds): pass")
+
+        # function annotations
+        self.check_suite("def f(a: int): pass")
+        self.check_suite("def f(a: int = 5): pass")
+        self.check_suite("def f(*args: list): pass")
+        self.check_suite("def f(**kwds: dict): pass")
+        self.check_suite("def f(*, a: int): pass")
+        self.check_suite("def f(*, a: int = 5): pass")
+        self.check_suite("def f() -> int: pass")
+
     def test_class_defs(self):
         self.check_suite("class foo():pass")
         self.check_suite("class foo(object):pass")
@@ -266,6 +300,37 @@
         self.check_suite("[*a, *b] = y")
         self.check_suite("for [*x, b] in x: pass")
 
+    def test_raise_statement(self):
+        self.check_suite("raise\n")
+        self.check_suite("raise e\n")
+        self.check_suite("try:\n"
+                         "    suite\n"
+                         "except Exception as e:\n"
+                         "    raise ValueError from e\n")
+
+    def test_set_displays(self):
+        self.check_expr('{2}')
+        self.check_expr('{2,}')
+        self.check_expr('{2, 3}')
+        self.check_expr('{2, 3,}')
+
+    def test_dict_displays(self):
+        self.check_expr('{}')
+        self.check_expr('{a:b}')
+        self.check_expr('{a:b,}')
+        self.check_expr('{a:b, c:d}')
+        self.check_expr('{a:b, c:d,}')
+
+    def test_set_comprehensions(self):
+        self.check_expr('{x for x in seq}')
+        self.check_expr('{f(x) for x in seq}')
+        self.check_expr('{f(x) for x in seq if condition(x)}')
+
+    def test_dict_comprehensions(self):
+        self.check_expr('{x:x for x in seq}')
+        self.check_expr('{x**2:x[3] for x in seq if condition(x)}')
+        self.check_expr('{x:x for x in seq1 for y in seq2 if condition(x, y)}')
+
 
 #
 #  Second, we take *invalid* trees and make sure we get ParserError
@@ -611,6 +676,44 @@
         self.assertRaises(TypeError, operator.lt, st1, 1815)
         self.assertRaises(TypeError, operator.gt, b'waterloo', st2)
 
+    check_sizeof = support.check_sizeof
+
+    @support.cpython_only
+    def test_sizeof(self):
+        def XXXROUNDUP(n):
+            if n <= 1:
+                return n
+            if n <= 128:
+                return (n + 3) & ~3
+            return 1 << (n - 1).bit_length()
+
+        basesize = support.calcobjsize('Pii')
+        nodesize = struct.calcsize('hP3iP0h')
+        def sizeofchildren(node):
+            if node is None:
+                return 0
+            res = 0
+            hasstr = len(node) > 1 and isinstance(node[-1], str)
+            if hasstr:
+                res += len(node[-1]) + 1
+            children = node[1:-1] if hasstr else node[1:]
+            if children:
+                res += XXXROUNDUP(len(children)) * nodesize
+                for child in children:
+                    res += sizeofchildren(child)
+            return res
+
+        def check_st_sizeof(st):
+            self.check_sizeof(st, basesize + nodesize +
+                                  sizeofchildren(st.totuple()))
+
+        check_st_sizeof(parser.expr('2 + 3'))
+        check_st_sizeof(parser.expr('2 + 3 + 4'))
+        check_st_sizeof(parser.suite('x = 2 + 3'))
+        check_st_sizeof(parser.suite(''))
+        check_st_sizeof(parser.suite('# -*- coding: utf-8 -*-'))
+        check_st_sizeof(parser.expr('[' + '2,' * 1000 + ']'))
+
 
     # XXX tests for pickling and unpickling of ST objects should go here
 
diff -r 3d0686d90f55 Lib/test/test_pdb.py
--- a/Lib/test/test_pdb.py
+++ b/Lib/test/test_pdb.py
@@ -5,6 +5,7 @@
 import sys
 import unittest
 import subprocess
+import textwrap
 
 from test import support
 # This little helper class is essential for testing pdb under doctest.
@@ -595,6 +596,23 @@
 
 class PdbTestCase(unittest.TestCase):
 
+    def run_pdb(self, script, commands):
+        """Run 'script' lines with pdb and the pdb 'commands'."""
+        filename = 'main.py'
+        with open(filename, 'w') as f:
+            f.write(textwrap.dedent(script))
+        self.addCleanup(support.unlink, filename)
+        cmd = [sys.executable, '-m', 'pdb', filename]
+        stdout = stderr = None
+        with subprocess.Popen(cmd, stdout=subprocess.PIPE,
+                                   stdin=subprocess.PIPE,
+                                   stderr=subprocess.STDOUT,
+                                   ) as proc:
+            stdout, stderr = proc.communicate(str.encode(commands))
+        stdout = stdout and bytes.decode(stdout)
+        stderr = stderr and bytes.decode(stderr)
+        return stdout, stderr
+
     def test_issue7964(self):
         # open the file as binary so we can force \r\n newline
         with open(support.TESTFN, 'wb') as f:
@@ -610,6 +628,42 @@
         self.assertNotIn(b'SyntaxError', stdout,
                          "Got a syntax error running test script under PDB")
 
+    def test_issue13183(self):
+        script = """
+            from bar import bar
+
+            def foo():
+                bar()
+
+            def nope():
+                pass
+
+            def foobar():
+                foo()
+                nope()
+
+            foobar()
+        """
+        commands = """
+            from bar import bar
+            break bar
+            continue
+            step
+            step
+            quit
+        """
+        bar = """
+            def bar():
+                pass
+        """
+        with open('bar.py', 'w') as f:
+            f.write(textwrap.dedent(bar))
+        self.addCleanup(support.unlink, 'bar.py')
+        stdout, stderr = self.run_pdb(script, commands)
+        self.assertTrue(
+            any('main.py(5)foo()->None' in l for l in stdout.splitlines()),
+            'Fail to step into the caller after a return')
+
     def tearDown(self):
         support.unlink(support.TESTFN)
 
diff -r 3d0686d90f55 Lib/test/test_pkgutil.py
--- a/Lib/test/test_pkgutil.py
+++ b/Lib/test/test_pkgutil.py
@@ -1,4 +1,4 @@
-from test.support import run_unittest
+from test.support import run_unittest, unload
 import unittest
 import sys
 import imp
@@ -137,8 +137,99 @@
         self.assertEqual(foo.loads, 1)
         del sys.modules['foo']
 
+
+class ExtendPathTests(unittest.TestCase):
+    def create_init(self, pkgname):
+        dirname = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, dirname)
+        sys.path.insert(0, dirname)
+
+        pkgdir = os.path.join(dirname, pkgname)
+        os.mkdir(pkgdir)
+        with open(os.path.join(pkgdir, '__init__.py'), 'w') as fl:
+            fl.write('from pkgutil import extend_path\n__path__ = extend_path(__path__, __name__)\n')
+
+        return dirname
+
+    def create_submodule(self, dirname, pkgname, submodule_name, value):
+        module_name = os.path.join(dirname, pkgname, submodule_name + '.py')
+        with open(module_name, 'w') as fl:
+            print('value={}'.format(value), file=fl)
+
+    def setUp(self):
+        # Create 2 directories on sys.path
+        self.pkgname = 'foo'
+        self.dirname_0 = self.create_init(self.pkgname)
+        self.dirname_1 = self.create_init(self.pkgname)
+
+    def tearDown(self):
+        del sys.path[0]
+        del sys.path[0]
+        del sys.modules['foo']
+        del sys.modules['foo.bar']
+        del sys.modules['foo.baz']
+
+    def test_simple(self):
+        self.create_submodule(self.dirname_0, self.pkgname, 'bar', 0)
+        self.create_submodule(self.dirname_1, self.pkgname, 'baz', 1)
+        import foo.bar
+        import foo.baz
+        # Ensure we read the expected values
+        self.assertEqual(foo.bar.value, 0)
+        self.assertEqual(foo.baz.value, 1)
+
+        # Ensure the path is set up correctly
+        self.assertEqual(sorted(foo.__path__),
+                         sorted([os.path.join(self.dirname_0, self.pkgname),
+                                 os.path.join(self.dirname_1, self.pkgname)]))
+
+    # XXX: test .pkg files
+
+
+class NestedNamespacePackageTest(unittest.TestCase):
+
+    def setUp(self):
+        self.basedir = tempfile.mkdtemp()
+        self.old_path = sys.path[:]
+
+    def tearDown(self):
+        sys.path[:] = self.old_path
+        shutil.rmtree(self.basedir)
+
+    def create_module(self, name, contents):
+        base, final = name.rsplit('.', 1)
+        base_path = os.path.join(self.basedir, base.replace('.', os.path.sep))
+        os.makedirs(base_path, exist_ok=True)
+        with open(os.path.join(base_path, final + ".py"), 'w') as f:
+            f.write(contents)
+
+    def test_nested(self):
+        pkgutil_boilerplate = (
+            'import pkgutil; '
+            '__path__ = pkgutil.extend_path(__path__, __name__)')
+        self.create_module('a.pkg.__init__', pkgutil_boilerplate)
+        self.create_module('b.pkg.__init__', pkgutil_boilerplate)
+        self.create_module('a.pkg.subpkg.__init__', pkgutil_boilerplate)
+        self.create_module('b.pkg.subpkg.__init__', pkgutil_boilerplate)
+        self.create_module('a.pkg.subpkg.c', 'c = 1')
+        self.create_module('b.pkg.subpkg.d', 'd = 2')
+        sys.path.insert(0, os.path.join(self.basedir, 'a'))
+        sys.path.insert(0, os.path.join(self.basedir, 'b'))
+        import pkg
+        self.addCleanup(unload, 'pkg')
+        self.assertEqual(len(pkg.__path__), 2)
+        import pkg.subpkg
+        self.addCleanup(unload, 'pkg.subpkg')
+        self.assertEqual(len(pkg.subpkg.__path__), 2)
+        from pkg.subpkg.c import c
+        from pkg.subpkg.d import d
+        self.assertEqual(c, 1)
+        self.assertEqual(d, 2)
+
+
 def test_main():
-    run_unittest(PkgutilTests, PkgutilPEP302Tests)
+    run_unittest(PkgutilTests, PkgutilPEP302Tests, ExtendPathTests,
+                 NestedNamespacePackageTest)
     # this is necessary if test is run repeated (like when finding leaks)
     import zipimport
     zipimport._zip_directory_cache.clear()
diff -r 3d0686d90f55 Lib/test/test_posix.py
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -9,6 +9,7 @@
 import sys
 import time
 import os
+import platform
 import pwd
 import shutil
 import stat
@@ -106,7 +107,11 @@
         # If a non-privileged user invokes it, it should fail with OSError
         # EPERM.
         if os.getuid() != 0:
-            name = pwd.getpwuid(posix.getuid()).pw_name
+            try:
+                name = pwd.getpwuid(posix.getuid()).pw_name
+            except KeyError:
+                # the current UID may not have a pwd entry
+                raise unittest.SkipTest("need a pwd entry")
             try:
                 posix.initgroups(name, 13)
             except OSError as e:
@@ -229,6 +234,9 @@
 
     def _test_all_chown_common(self, chown_func, first_param):
         """Common code for chown, fchown and lchown tests."""
+        # test a successful chown call
+        chown_func(first_param, os.getuid(), os.getgid())
+
         if os.getuid() == 0:
             try:
                 # Many linux distros have a nfsnobody user as MAX_UID-2
@@ -240,12 +248,15 @@
                 chown_func(first_param, ent.pw_uid, ent.pw_gid)
             except KeyError:
                 pass
+        elif platform.system() in ('HP-UX', 'SunOS'):
+            # HP-UX and Solaris can allow a non-root user to chown() to root
+            # (issue #5113)
+            raise unittest.SkipTest("Skipping because of non-standard chown() "
+                                    "behavior")
         else:
             # non-root cannot chown to root, raises OSError
             self.assertRaises(OSError, chown_func,
                               first_param, 0, 0)
-        # test a successful chown call
-        chown_func(first_param, os.getuid(), os.getgid())
 
     @unittest.skipUnless(hasattr(posix, 'chown'), "test needs os.chown()")
     def test_chown(self):
@@ -323,7 +334,16 @@
     def _test_chflags_regular_file(self, chflags_func, target_file):
         st = os.stat(target_file)
         self.assertTrue(hasattr(st, 'st_flags'))
-        chflags_func(target_file, st.st_flags | stat.UF_IMMUTABLE)
+
+        # ZFS returns EOPNOTSUPP when attempting to set flag UF_IMMUTABLE.
+        try:
+            chflags_func(target_file, st.st_flags | stat.UF_IMMUTABLE)
+        except OSError as err:
+            if err.errno != errno.EOPNOTSUPP:
+                raise
+            msg = 'chflag UF_IMMUTABLE not supported by underlying fs'
+            self.skipTest(msg)
+
         try:
             new_st = os.stat(target_file)
             self.assertEqual(st.st_flags | stat.UF_IMMUTABLE, new_st.st_flags)
@@ -352,8 +372,16 @@
         self.teardown_files.append(_DUMMY_SYMLINK)
         dummy_symlink_st = os.lstat(_DUMMY_SYMLINK)
 
-        posix.lchflags(_DUMMY_SYMLINK,
-                       dummy_symlink_st.st_flags | stat.UF_IMMUTABLE)
+        # ZFS returns EOPNOTSUPP when attempting to set flag UF_IMMUTABLE.
+        try:
+            posix.lchflags(_DUMMY_SYMLINK,
+                           dummy_symlink_st.st_flags | stat.UF_IMMUTABLE)
+        except OSError as err:
+            if err.errno != errno.EOPNOTSUPP:
+                raise
+            msg = 'chflag UF_IMMUTABLE not supported by underlying fs'
+            self.skipTest(msg)
+
         try:
             new_testfn_st = os.stat(support.TESTFN)
             new_dummy_symlink_st = os.lstat(_DUMMY_SYMLINK)
@@ -414,8 +442,9 @@
     def test_getgroups(self):
         with os.popen('id -G') as idg:
             groups = idg.read().strip()
+            ret = idg.close()
 
-        if not groups:
+        if ret != None or not groups:
             raise unittest.SkipTest("need working 'id -G'")
 
         # 'id -G' and 'os.getgroups()' should return the same
diff -r 3d0686d90f55 Lib/test/test_posixpath.py
--- a/Lib/test/test_posixpath.py
+++ b/Lib/test/test_posixpath.py
@@ -1,6 +1,7 @@
 import unittest
 from test import support, test_genericpath
 
+import itertools
 import posixpath
 import os
 import sys
@@ -56,8 +57,21 @@
         self.assertEqual(posixpath.join(b"/foo/", b"bar/", b"baz/"),
                          b"/foo/bar/baz/")
 
-        self.assertRaises(TypeError, posixpath.join, b"bytes", "str")
-        self.assertRaises(TypeError, posixpath.join, "str", b"bytes")
+        def check_error_msg(list_of_args, msg):
+            """Check posixpath.join raises friendly TypeErrors."""
+            for args in (item for perm in list_of_args
+                              for item in itertools.permutations(perm)):
+                with self.assertRaises(TypeError) as cm:
+                    posixpath.join(*args)
+                self.assertEqual(msg, cm.exception.args[0])
+
+        check_error_msg([[b'bytes', 'str'], [bytearray(b'bytes'), 'str']],
+                        "Can't mix strings and bytes in path components.")
+        # regression, see #15377
+        with self.assertRaises(TypeError) as cm:
+            posixpath.join(None, 'str')
+        self.assertNotEqual("Can't mix strings and bytes in path components.",
+                            cm.exception.args[0])
 
     def test_split(self):
         self.assertEqual(posixpath.split("/foo/bar"), ("/foo", "bar"))
@@ -298,6 +312,7 @@
             with support.EnvironmentVarGuard() as env:
                 env['HOME'] = '/'
                 self.assertEqual(posixpath.expanduser("~"), "/")
+                self.assertEqual(posixpath.expanduser("~/foo"), "/foo")
                 # expanduser should fall back to using the password database
                 del env['HOME']
                 home = pwd.getpwuid(os.getuid()).pw_dir
diff -r 3d0686d90f55 Lib/test/test_pprint.py
--- a/Lib/test/test_pprint.py
+++ b/Lib/test/test_pprint.py
@@ -462,6 +462,16 @@
         self.assertEqual(clean(pprint.pformat(dict.fromkeys(keys))),
             '{' + ','.join('%r:None' % k for k in skeys) + '}')
 
+        # Issue 10017: TypeError on user-defined types as dict keys.
+        self.assertEqual(pprint.pformat({Unorderable: 0, 1: 0}),
+                         '{1: 0, ' + repr(Unorderable) +': 0}')
+
+        # Issue 14998: TypeError on tuples with NoneTypes as dict keys.
+        keys = [(1,), (None,)]
+        self.assertEqual(pprint.pformat(dict.fromkeys(keys, 0)),
+                         '{%r: 0, %r: 0}' % tuple(sorted(keys, key=id)))
+
+
 class DottedPrettyPrinter(pprint.PrettyPrinter):
 
     def format(self, object, context, maxlevels, level):
diff -r 3d0686d90f55 Lib/test/test_pyclbr.py
--- a/Lib/test/test_pyclbr.py
+++ b/Lib/test/test_pyclbr.py
@@ -167,6 +167,11 @@
         cm('email.parser')
         cm('test.test_pyclbr')
 
+    def test_issue_14798(self):
+        # test ImportError is raised when the first part of a dotted name is
+        # not a package
+        self.assertRaises(ImportError, pyclbr.readmodule_ex, 'asyncore.foo')
+
 
 def test_main():
     run_unittest(PyclbrTest)
diff -r 3d0686d90f55 Lib/test/test_pydoc.py
--- a/Lib/test/test_pydoc.py
+++ b/Lib/test/test_pydoc.py
@@ -282,6 +282,17 @@
         result, doc_loc = get_pydoc_text(xml.etree)
         self.assertEqual(doc_loc, "", "MODULE DOCS incorrectly includes a link")
 
+    def test_non_str_name(self):
+        # issue14638
+        # Treat illegal (non-str) name like no name
+        class A:
+            __name__ = 42
+        class B:
+            pass
+        adoc = pydoc.render_doc(A())
+        bdoc = pydoc.render_doc(B())
+        self.assertEqual(adoc.replace("A", "B"), bdoc)
+
     def test_not_here(self):
         missing_module = "test.i_am_not_here"
         result = str(run_pydoc(missing_module), 'ascii')
diff -r 3d0686d90f55 Lib/test/test_queue.py
--- a/Lib/test/test_queue.py
+++ b/Lib/test/test_queue.py
@@ -82,7 +82,7 @@
                 self.fail("trigger thread ended but event never set")
 
 
-class BaseQueueTest(unittest.TestCase, BlockingTestMixin):
+class BaseQueueTestMixin(BlockingTestMixin):
     def setUp(self):
         self.cum = 0
         self.cumlock = threading.Lock()
@@ -229,13 +229,13 @@
         with self.assertRaises(queue.Full):
             q.put_nowait(4)
 
-class QueueTest(BaseQueueTest):
+class QueueTest(BaseQueueTestMixin, unittest.TestCase):
     type2test = queue.Queue
 
-class LifoQueueTest(BaseQueueTest):
+class LifoQueueTest(BaseQueueTestMixin, unittest.TestCase):
     type2test = queue.LifoQueue
 
-class PriorityQueueTest(BaseQueueTest):
+class PriorityQueueTest(BaseQueueTestMixin, unittest.TestCase):
     type2test = queue.PriorityQueue
 
 
diff -r 3d0686d90f55 Lib/test/test_re.py
--- a/Lib/test/test_re.py
+++ b/Lib/test/test_re.py
@@ -1,4 +1,5 @@
-from test.support import verbose, run_unittest
+from test.support import verbose, run_unittest, gc_collect
+import io
 import re
 from re import Scanner
 import sys
@@ -16,6 +17,17 @@
 
 class ReTests(unittest.TestCase):
 
+    def test_keep_buffer(self):
+        # See bug 14212
+        b = bytearray(b'x')
+        it = re.finditer(b'a', b)
+        with self.assertRaises(BufferError):
+            b.extend(b'x'*400)
+        list(it)
+        del it
+        gc_collect()
+        b.extend(b'x'*400)
+
     def test_weakref(self):
         s = 'QabbbcR'
         x = re.compile('ab+c')
@@ -355,6 +367,32 @@
         self.assertEqual(re.search(r"\d\D\w\W\s\S",
                                    "1aa! a", re.UNICODE).group(0), "1aa! a")
 
+    def test_string_boundaries(self):
+        # See http://bugs.python.org/issue10713
+        self.assertEqual(re.search(r"\b(abc)\b", "abc").group(1),
+                         "abc")
+        # There's a word boundary at the start of a string.
+        self.assertTrue(re.match(r"\b", "abc"))
+        # A non-empty string includes a non-boundary zero-length match.
+        self.assertTrue(re.search(r"\B", "abc"))
+        # There is no non-boundary match at the start of a string.
+        self.assertFalse(re.match(r"\B", "abc"))
+        # However, an empty string contains no word boundaries, and also no
+        # non-boundaries.
+        self.assertEqual(re.search(r"\B", ""), None)
+        # This one is questionable and different from the perlre behaviour,
+        # but describes current behavior.
+        self.assertEqual(re.search(r"\b", ""), None)
+        # A single word-character string has two boundaries, but no
+        # non-boundary gaps.
+        self.assertEqual(len(re.findall(r"\b", "a")), 2)
+        self.assertEqual(len(re.findall(r"\B", "a")), 0)
+        # If there are no words, there are no boundaries
+        self.assertEqual(len(re.findall(r"\b", " ")), 0)
+        self.assertEqual(len(re.findall(r"\b", "   ")), 0)
+        # Can match around the whitespace.
+        self.assertEqual(len(re.findall(r"\B", " ")), 2)
+
     def test_bigcharset(self):
         self.assertEqual(re.match("([\u2222\u2223])",
                                   "\u2222").group(1), "\u2222")
@@ -780,6 +818,16 @@
         self.assertRaises(OverflowError, _sre.compile, "abc", 0, [long_overflow])
         self.assertRaises(TypeError, _sre.compile, {}, 0, [])
 
+    def test_compile(self):
+        # Test return value when given string and pattern as parameter
+        pattern = re.compile('random pattern')
+        self.assertIsInstance(pattern, re._pattern_type)
+        same_pattern = re.compile(pattern)
+        self.assertIsInstance(same_pattern, re._pattern_type)
+        self.assertIs(same_pattern, pattern)
+        # Test behaviour when not given a string or pattern as parameter
+        self.assertRaises(TypeError, re.compile, 0)
+
 def run_re_tests():
     from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR
     if verbose:
diff -r 3d0686d90f55 Lib/test/test_runpy.py
--- a/Lib/test/test_runpy.py
+++ b/Lib/test/test_runpy.py
@@ -10,80 +10,143 @@
 from test.script_helper import (
     make_pkg, make_script, make_zip_pkg, make_zip_script, temp_dir)
 
-
+import runpy
 from runpy import _run_code, _run_module_code, run_module, run_path
 # Note: This module can't safely test _run_module_as_main as it
 # runs its tests in the current process, which would mess with the
 # real __main__ module (usually test.regrtest)
 # See test_cmd_line_script for a test that executes that code path
 
+
 # Set up the test code and expected results
+example_source = """\
+# Check basic code execution
+result = ['Top level assignment']
+def f():
+    result.append('Lower level reference')
+f()
+del f
+# Check the sys module
+import sys
+run_argv0 = sys.argv[0]
+run_name_in_sys_modules = __name__ in sys.modules
+module_in_sys_modules = (run_name_in_sys_modules and
+                         globals() is sys.modules[__name__].__dict__)
+# Check nested operation
+import runpy
+nested = runpy._run_module_code('x=1\\n', mod_name='<run>')
+"""
 
-class RunModuleCodeTest(unittest.TestCase):
+implicit_namespace = {
+    "__name__": None,
+    "__file__": None,
+    "__cached__": None,
+    "__package__": None,
+    "__doc__": None,
+}
+example_namespace =  {
+    "sys": sys,
+    "runpy": runpy,
+    "result": ["Top level assignment", "Lower level reference"],
+    "run_argv0": sys.argv[0],
+    "run_name_in_sys_modules": False,
+    "module_in_sys_modules": False,
+    "nested": dict(implicit_namespace,
+                   x=1, __name__="<run>", __loader__=None),
+}
+example_namespace.update(implicit_namespace)
+
+class CodeExecutionMixin:
+    # Issue #15230 (run_path not handling run_name correctly) highlighted a
+    # problem with the way arguments were being passed from higher level APIs
+    # down to lower level code. This mixin makes it easier to ensure full
+    # testing occurs at those upper layers as well, not just at the utility
+    # layer
+
+    def assertNamespaceMatches(self, result_ns, expected_ns):
+        """Check two namespaces match.
+
+           Ignores any unspecified interpreter created names
+        """
+        # Impls are permitted to add extra names, so filter them out
+        for k in list(result_ns):
+            if k.startswith("__") and k.endswith("__"):
+                if k not in expected_ns:
+                    result_ns.pop(k)
+                if k not in expected_ns["nested"]:
+                    result_ns["nested"].pop(k)
+        # Don't use direct dict comparison - the diffs are too hard to debug
+        self.assertEqual(set(result_ns), set(expected_ns))
+        for k in result_ns:
+            actual = (k, result_ns[k])
+            expected = (k, expected_ns[k])
+            self.assertEqual(actual, expected)
+
+    def check_code_execution(self, create_namespace, expected_namespace):
+        """Check that an interface runs the example code correctly
+
+           First argument is a callable accepting the initial globals and
+           using them to create the actual namespace
+           Second argument is the expected result
+        """
+        sentinel = object()
+        expected_ns = expected_namespace.copy()
+        run_name = expected_ns["__name__"]
+        saved_argv0 = sys.argv[0]
+        saved_mod = sys.modules.get(run_name, sentinel)
+        # Check without initial globals
+        result_ns = create_namespace(None)
+        self.assertNamespaceMatches(result_ns, expected_ns)
+        self.assertIs(sys.argv[0], saved_argv0)
+        self.assertIs(sys.modules.get(run_name, sentinel), saved_mod)
+        # And then with initial globals
+        initial_ns = {"sentinel": sentinel}
+        expected_ns["sentinel"] = sentinel
+        result_ns = create_namespace(initial_ns)
+        self.assertIsNot(result_ns, initial_ns)
+        self.assertNamespaceMatches(result_ns, expected_ns)
+        self.assertIs(sys.argv[0], saved_argv0)
+        self.assertIs(sys.modules.get(run_name, sentinel), saved_mod)
+
+
+class ExecutionLayerTestCase(unittest.TestCase, CodeExecutionMixin):
     """Unit tests for runpy._run_code and runpy._run_module_code"""
 
-    expected_result = ["Top level assignment", "Lower level reference"]
-    test_source = (
-        "# Check basic code execution\n"
-        "result = ['Top level assignment']\n"
-        "def f():\n"
-        "    result.append('Lower level reference')\n"
-        "f()\n"
-        "# Check the sys module\n"
-        "import sys\n"
-        "run_argv0 = sys.argv[0]\n"
-        "run_name_in_sys_modules = __name__ in sys.modules\n"
-        "if run_name_in_sys_modules:\n"
-        "   module_in_sys_modules = globals() is sys.modules[__name__].__dict__\n"
-        "# Check nested operation\n"
-        "import runpy\n"
-        "nested = runpy._run_module_code('x=1\\n', mod_name='<run>')\n"
-    )
-
     def test_run_code(self):
-        saved_argv0 = sys.argv[0]
-        d = _run_code(self.test_source, {})
-        self.assertEqual(d["result"], self.expected_result)
-        self.assertIs(d["__name__"], None)
-        self.assertIs(d["__file__"], None)
-        self.assertIs(d["__cached__"], None)
-        self.assertIs(d["__loader__"], None)
-        self.assertIs(d["__package__"], None)
-        self.assertIs(d["run_argv0"], saved_argv0)
-        self.assertNotIn("run_name", d)
-        self.assertIs(sys.argv[0], saved_argv0)
+        expected_ns = example_namespace.copy()
+        expected_ns.update({
+            "__loader__": None,
+        })
+        def create_ns(init_globals):
+            return _run_code(example_source, {}, init_globals)
+        self.check_code_execution(create_ns, expected_ns)
 
     def test_run_module_code(self):
-        initial = object()
-        name = "<Nonsense>"
-        file = "Some other nonsense"
-        loader = "Now you're just being silly"
-        package = '' # Treat as a top level module
-        d1 = dict(initial=initial)
-        saved_argv0 = sys.argv[0]
-        d2 = _run_module_code(self.test_source,
-                              d1,
-                              name,
-                              file,
-                              loader,
-                              package)
-        self.assertNotIn("result", d1)
-        self.assertIs(d2["initial"], initial)
-        self.assertEqual(d2["result"], self.expected_result)
-        self.assertEqual(d2["nested"]["x"], 1)
-        self.assertIs(d2["__name__"], name)
-        self.assertTrue(d2["run_name_in_sys_modules"])
-        self.assertTrue(d2["module_in_sys_modules"])
-        self.assertIs(d2["__file__"], file)
-        self.assertIs(d2["__cached__"], None)
-        self.assertIs(d2["run_argv0"], file)
-        self.assertIs(d2["__loader__"], loader)
-        self.assertIs(d2["__package__"], package)
-        self.assertIs(sys.argv[0], saved_argv0)
-        self.assertNotIn(name, sys.modules)
+        mod_name = "<Nonsense>"
+        mod_fname = "Some other nonsense"
+        mod_loader = "Now you're just being silly"
+        mod_package = '' # Treat as a top level module
+        expected_ns = example_namespace.copy()
+        expected_ns.update({
+            "__name__": mod_name,
+            "__file__": mod_fname,
+            "__loader__": mod_loader,
+            "__package__": mod_package,
+            "run_argv0": mod_fname,
+            "run_name_in_sys_modules": True,
+            "module_in_sys_modules": True,
+        })
+        def create_ns(init_globals):
+            return _run_module_code(example_source,
+                                    init_globals,
+                                    mod_name,
+                                    mod_fname,
+                                    mod_loader,
+                                    mod_package)
+        self.check_code_execution(create_ns, expected_ns)
 
 
-class RunModuleTest(unittest.TestCase):
+class RunModuleTestCase(unittest.TestCase, CodeExecutionMixin):
     """Unit tests for runpy.run_module"""
 
     def expect_import_error(self, mod_name):
@@ -107,7 +170,7 @@
         self.expect_import_error("multiprocessing")
 
     def test_library_module(self):
-        run_module("runpy")
+        self.assertEqual(run_module("runpy")["__name__"], "runpy")
 
     def _add_pkg_dir(self, pkg_dir):
         os.mkdir(pkg_dir)
@@ -119,20 +182,20 @@
     def _make_pkg(self, source, depth, mod_base="runpy_test"):
         pkg_name = "__runpy_pkg__"
         test_fname = mod_base+os.extsep+"py"
-        pkg_dir = sub_dir = tempfile.mkdtemp()
-        if verbose: print("  Package tree in:", sub_dir)
+        pkg_dir = sub_dir = os.path.realpath(tempfile.mkdtemp())
+        if verbose > 1: print("  Package tree in:", sub_dir)
         sys.path.insert(0, pkg_dir)
-        if verbose: print("  Updated sys.path:", sys.path[0])
+        if verbose > 1: print("  Updated sys.path:", sys.path[0])
         for i in range(depth):
             sub_dir = os.path.join(sub_dir, pkg_name)
             pkg_fname = self._add_pkg_dir(sub_dir)
-            if verbose: print("  Next level in:", sub_dir)
-            if verbose: print("  Created:", pkg_fname)
+            if verbose > 1: print("  Next level in:", sub_dir)
+            if verbose > 1: print("  Created:", pkg_fname)
         mod_fname = os.path.join(sub_dir, test_fname)
         mod_file = open(mod_fname, "w")
         mod_file.write(source)
         mod_file.close()
-        if verbose: print("  Created:", mod_fname)
+        if verbose > 1: print("  Created:", mod_fname)
         mod_name = (pkg_name+".")*depth + mod_base
         return pkg_dir, mod_fname, mod_name
 
@@ -140,73 +203,98 @@
         for entry in list(sys.modules):
             if entry.startswith("__runpy_pkg__"):
                 del sys.modules[entry]
-        if verbose: print("  Removed sys.modules entries")
+        if verbose > 1: print("  Removed sys.modules entries")
         del sys.path[0]
-        if verbose: print("  Removed sys.path entry")
+        if verbose > 1: print("  Removed sys.path entry")
         for root, dirs, files in os.walk(top, topdown=False):
             for name in files:
                 try:
                     os.remove(os.path.join(root, name))
                 except OSError as ex:
-                    if verbose: print(ex) # Persist with cleaning up
+                    if verbose > 1: print(ex) # Persist with cleaning up
             for name in dirs:
                 fullname = os.path.join(root, name)
                 try:
                     os.rmdir(fullname)
                 except OSError as ex:
-                    if verbose: print(ex) # Persist with cleaning up
+                    if verbose > 1: print(ex) # Persist with cleaning up
         try:
             os.rmdir(top)
-            if verbose: print("  Removed package tree")
+            if verbose > 1: print("  Removed package tree")
         except OSError as ex:
-            if verbose: print(ex) # Persist with cleaning up
+            if verbose > 1: print(ex) # Persist with cleaning up
 
-    def _check_module(self, depth):
+    def _fix_ns_for_legacy_pyc(self, ns, alter_sys):
+        char_to_add = "c" if __debug__ else "o"
+        ns["__file__"] += char_to_add
+        if alter_sys:
+            ns["run_argv0"] += char_to_add
+
+
+    def _check_module(self, depth, alter_sys=False):
         pkg_dir, mod_fname, mod_name = (
-               self._make_pkg("x=1\n", depth))
+               self._make_pkg(example_source, depth))
         forget(mod_name)
+        expected_ns = example_namespace.copy()
+        expected_ns.update({
+            "__name__": mod_name,
+            "__file__": mod_fname,
+            "__package__": mod_name.rpartition(".")[0],
+        })
+        if alter_sys:
+            expected_ns.update({
+                "run_argv0": mod_fname,
+                "run_name_in_sys_modules": True,
+                "module_in_sys_modules": True,
+            })
+        def create_ns(init_globals):
+            return run_module(mod_name, init_globals, alter_sys=alter_sys)
         try:
-            if verbose: print("Running from source:", mod_name)
-            d1 = run_module(mod_name) # Read from source
-            self.assertIn("x", d1)
-            self.assertEqual(d1["x"], 1)
-            del d1 # Ensure __loader__ entry doesn't keep file open
+            if verbose > 1: print("Running from source:", mod_name)
+            self.check_code_execution(create_ns, expected_ns)
             __import__(mod_name)
             os.remove(mod_fname)
             make_legacy_pyc(mod_fname)
             unload(mod_name)  # In case loader caches paths
-            if verbose: print("Running from compiled:", mod_name)
-            d2 = run_module(mod_name) # Read from bytecode
-            self.assertIn("x", d2)
-            self.assertEqual(d2["x"], 1)
-            del d2 # Ensure __loader__ entry doesn't keep file open
+            if verbose > 1: print("Running from compiled:", mod_name)
+            self._fix_ns_for_legacy_pyc(expected_ns, alter_sys)
+            self.check_code_execution(create_ns, expected_ns)
         finally:
             self._del_pkg(pkg_dir, depth, mod_name)
-        if verbose: print("Module executed successfully")
+        if verbose > 1: print("Module executed successfully")
 
-    def _check_package(self, depth):
+    def _check_package(self, depth, alter_sys=False):
         pkg_dir, mod_fname, mod_name = (
-               self._make_pkg("x=1\n", depth, "__main__"))
-        pkg_name, _, _ = mod_name.rpartition(".")
+               self._make_pkg(example_source, depth, "__main__"))
+        pkg_name = mod_name.rpartition(".")[0]
         forget(mod_name)
+        expected_ns = example_namespace.copy()
+        expected_ns.update({
+            "__name__": mod_name,
+            "__file__": mod_fname,
+            "__package__": pkg_name,
+        })
+        if alter_sys:
+            expected_ns.update({
+                "run_argv0": mod_fname,
+                "run_name_in_sys_modules": True,
+                "module_in_sys_modules": True,
+            })
+        def create_ns(init_globals):
+            return run_module(pkg_name, init_globals, alter_sys=alter_sys)
         try:
-            if verbose: print("Running from source:", pkg_name)
-            d1 = run_module(pkg_name) # Read from source
-            self.assertIn("x", d1)
-            self.assertTrue(d1["x"] == 1)
-            del d1 # Ensure __loader__ entry doesn't keep file open
+            if verbose > 1: print("Running from source:", pkg_name)
+            self.check_code_execution(create_ns, expected_ns)
             __import__(mod_name)
             os.remove(mod_fname)
             make_legacy_pyc(mod_fname)
             unload(mod_name)  # In case loader caches paths
-            if verbose: print("Running from compiled:", pkg_name)
-            d2 = run_module(pkg_name) # Read from bytecode
-            self.assertIn("x", d2)
-            self.assertTrue(d2["x"] == 1)
-            del d2 # Ensure __loader__ entry doesn't keep file open
+            if verbose > 1: print("Running from compiled:", pkg_name)
+            self._fix_ns_for_legacy_pyc(expected_ns, alter_sys)
+            self.check_code_execution(create_ns, expected_ns)
         finally:
             self._del_pkg(pkg_dir, depth, pkg_name)
-        if verbose: print("Package executed successfully")
+        if verbose > 1: print("Package executed successfully")
 
     def _add_relative_modules(self, base_dir, source, depth):
         if depth <= 1:
@@ -220,18 +308,18 @@
         sibling_fname = os.path.join(module_dir, "sibling.py")
         sibling_file = open(sibling_fname, "w")
         sibling_file.close()
-        if verbose: print("  Added sibling module:", sibling_fname)
+        if verbose > 1: print("  Added sibling module:", sibling_fname)
         # Add nephew module
         uncle_dir = os.path.join(parent_dir, "uncle")
         self._add_pkg_dir(uncle_dir)
-        if verbose: print("  Added uncle package:", uncle_dir)
+        if verbose > 1: print("  Added uncle package:", uncle_dir)
         cousin_dir = os.path.join(uncle_dir, "cousin")
         self._add_pkg_dir(cousin_dir)
-        if verbose: print("  Added cousin package:", cousin_dir)
+        if verbose > 1: print("  Added cousin package:", cousin_dir)
         nephew_fname = os.path.join(cousin_dir, "nephew.py")
         nephew_file = open(nephew_fname, "w")
         nephew_file.close()
-        if verbose: print("  Added nephew module:", nephew_fname)
+        if verbose > 1: print("  Added nephew module:", nephew_fname)
 
     def _check_relative_imports(self, depth, run_name=None):
         contents = r"""\
@@ -241,13 +329,17 @@
 """
         pkg_dir, mod_fname, mod_name = (
                self._make_pkg(contents, depth))
+        if run_name is None:
+            expected_name = mod_name
+        else:
+            expected_name = run_name
         try:
             self._add_relative_modules(pkg_dir, contents, depth)
             pkg_name = mod_name.rpartition('.')[0]
-            if verbose: print("Running from source:", mod_name)
+            if verbose > 1: print("Running from source:", mod_name)
             d1 = run_module(mod_name, run_name=run_name) # Read from source
-            self.assertIn("__package__", d1)
-            self.assertTrue(d1["__package__"] == pkg_name)
+            self.assertEqual(d1["__name__"], expected_name)
+            self.assertEqual(d1["__package__"], pkg_name)
             self.assertIn("sibling", d1)
             self.assertIn("nephew", d1)
             del d1 # Ensure __loader__ entry doesn't keep file open
@@ -255,77 +347,97 @@
             os.remove(mod_fname)
             make_legacy_pyc(mod_fname)
             unload(mod_name)  # In case the loader caches paths
-            if verbose: print("Running from compiled:", mod_name)
+            if verbose > 1: print("Running from compiled:", mod_name)
             d2 = run_module(mod_name, run_name=run_name) # Read from bytecode
-            self.assertIn("__package__", d2)
-            self.assertTrue(d2["__package__"] == pkg_name)
+            self.assertEqual(d2["__name__"], expected_name)
+            self.assertEqual(d2["__package__"], pkg_name)
             self.assertIn("sibling", d2)
             self.assertIn("nephew", d2)
             del d2 # Ensure __loader__ entry doesn't keep file open
         finally:
             self._del_pkg(pkg_dir, depth, mod_name)
-        if verbose: print("Module executed successfully")
+        if verbose > 1: print("Module executed successfully")
 
     def test_run_module(self):
         for depth in range(4):
-            if verbose: print("Testing package depth:", depth)
+            if verbose > 1: print("Testing package depth:", depth)
             self._check_module(depth)
 
     def test_run_package(self):
         for depth in range(1, 4):
-            if verbose: print("Testing package depth:", depth)
+            if verbose > 1: print("Testing package depth:", depth)
             self._check_package(depth)
 
+    def test_run_module_alter_sys(self):
+        for depth in range(4):
+            if verbose > 1: print("Testing package depth:", depth)
+            self._check_module(depth, alter_sys=True)
+
+    def test_run_package_alter_sys(self):
+        for depth in range(1, 4):
+            if verbose > 1: print("Testing package depth:", depth)
+            self._check_package(depth, alter_sys=True)
+
     def test_explicit_relative_import(self):
         for depth in range(2, 5):
-            if verbose: print("Testing relative imports at depth:", depth)
+            if verbose > 1: print("Testing relative imports at depth:", depth)
             self._check_relative_imports(depth)
 
     def test_main_relative_import(self):
         for depth in range(2, 5):
-            if verbose: print("Testing main relative imports at depth:", depth)
+            if verbose > 1: print("Testing main relative imports at depth:", depth)
             self._check_relative_imports(depth, "__main__")
 
+    def test_run_name(self):
+        depth = 1
+        run_name = "And now for something completely different"
+        pkg_dir, mod_fname, mod_name = (
+               self._make_pkg(example_source, depth))
+        forget(mod_name)
+        expected_ns = example_namespace.copy()
+        expected_ns.update({
+            "__name__": run_name,
+            "__file__": mod_fname,
+            "__package__": mod_name.rpartition(".")[0],
+        })
+        def create_ns(init_globals):
+            return run_module(mod_name, init_globals, run_name)
+        try:
+            self.check_code_execution(create_ns, expected_ns)
+        finally:
+            self._del_pkg(pkg_dir, depth, mod_name)
 
-class RunPathTest(unittest.TestCase):
+
+class RunPathTestCase(unittest.TestCase, CodeExecutionMixin):
     """Unit tests for runpy.run_path"""
-    # Based on corresponding tests in test_cmd_line_script
-
-    test_source = """\
-# Script may be run with optimisation enabled, so don't rely on assert
-# statements being executed
-def assertEqual(lhs, rhs):
-    if lhs != rhs:
-        raise AssertionError('%r != %r' % (lhs, rhs))
-def assertIs(lhs, rhs):
-    if lhs is not rhs:
-        raise AssertionError('%r is not %r' % (lhs, rhs))
-# Check basic code execution
-result = ['Top level assignment']
-def f():
-    result.append('Lower level reference')
-f()
-assertEqual(result, ['Top level assignment', 'Lower level reference'])
-# Check the sys module
-import sys
-assertIs(globals(), sys.modules[__name__].__dict__)
-argv0 = sys.argv[0]
-"""
 
     def _make_test_script(self, script_dir, script_basename, source=None):
         if source is None:
-            source = self.test_source
+            source = example_source
         return make_script(script_dir, script_basename, source)
 
     def _check_script(self, script_name, expected_name, expected_file,
-                            expected_argv0, expected_package):
-        result = run_path(script_name)
-        self.assertEqual(result["__name__"], expected_name)
-        self.assertEqual(result["__file__"], expected_file)
-        self.assertEqual(result["__cached__"], None)
-        self.assertIn("argv0", result)
-        self.assertEqual(result["argv0"], expected_argv0)
-        self.assertEqual(result["__package__"], expected_package)
+                            expected_argv0):
+        # First check is without run_name
+        def create_ns(init_globals):
+            return run_path(script_name, init_globals)
+        expected_ns = example_namespace.copy()
+        expected_ns.update({
+            "__name__": expected_name,
+            "__file__": expected_file,
+            "__package__": "",
+            "run_argv0": expected_argv0,
+            "run_name_in_sys_modules": True,
+            "module_in_sys_modules": True,
+        })
+        self.check_code_execution(create_ns, expected_ns)
+        # Second check makes sure run_name works in all cases
+        run_name = "prove.issue15230.is.fixed"
+        def create_ns(init_globals):
+            return run_path(script_name, init_globals, run_name)
+        expected_ns["__name__"] = run_name
+        expected_ns["__package__"] = run_name.rpartition(".")[0]
+        self.check_code_execution(create_ns, expected_ns)
 
     def _check_import_error(self, script_name, msg):
         msg = re.escape(msg)
@@ -336,7 +448,7 @@
             mod_name = 'script'
             script_name = self._make_test_script(script_dir, mod_name)
             self._check_script(script_name, "<run_path>", script_name,
-                               script_name, None)
+                               script_name)
 
     def test_script_compiled(self):
         with temp_dir() as script_dir:
@@ -345,14 +457,14 @@
             compiled_name = py_compile.compile(script_name, doraise=True)
             os.remove(script_name)
             self._check_script(compiled_name, "<run_path>", compiled_name,
-                               compiled_name, None)
+                               compiled_name)
 
     def test_directory(self):
         with temp_dir() as script_dir:
             mod_name = '__main__'
             script_name = self._make_test_script(script_dir, mod_name)
             self._check_script(script_dir, "<run_path>", script_name,
-                               script_dir, '')
+                               script_dir)
 
     def test_directory_compiled(self):
         with temp_dir() as script_dir:
@@ -362,7 +474,7 @@
             os.remove(script_name)
             legacy_pyc = make_legacy_pyc(script_name)
             self._check_script(script_dir, "<run_path>", legacy_pyc,
-                               script_dir, '')
+                               script_dir)
 
     def test_directory_error(self):
         with temp_dir() as script_dir:
@@ -376,7 +488,7 @@
             mod_name = '__main__'
             script_name = self._make_test_script(script_dir, mod_name)
             zip_name, fname = make_zip_script(script_dir, 'test_zip', script_name)
-            self._check_script(zip_name, "<run_path>", fname, zip_name, '')
+            self._check_script(zip_name, "<run_path>", fname, zip_name)
 
     def test_zipfile_compiled(self):
         with temp_dir() as script_dir:
@@ -385,7 +497,7 @@
             compiled_name = py_compile.compile(script_name, doraise=True)
             zip_name, fname = make_zip_script(script_dir, 'test_zip',
                                               compiled_name)
-            self._check_script(zip_name, "<run_path>", fname, zip_name, '')
+            self._check_script(zip_name, "<run_path>", fname, zip_name)
 
     def test_zipfile_error(self):
         with temp_dir() as script_dir:
@@ -419,9 +531,9 @@
 
 def test_main():
     run_unittest(
-                 RunModuleCodeTest,
-                 RunModuleTest,
-                 RunPathTest
+                 ExecutionLayerTestCase,
+                 RunModuleTestCase,
+                 RunPathTestCase
                  )
 
 if __name__ == "__main__":
diff -r 3d0686d90f55 Lib/test/test_shutil.py
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -8,6 +8,7 @@
 import os
 import os.path
 import functools
+import errno
 from test import support
 from test.support import TESTFN
 from os.path import splitdrive
@@ -307,6 +308,35 @@
         finally:
             shutil.rmtree(TESTFN, ignore_errors=True)
 
+    @unittest.skipUnless(hasattr(os, 'chflags') and
+                         hasattr(errno, 'EOPNOTSUPP') and
+                         hasattr(errno, 'ENOTSUP'),
+                         "requires os.chflags, EOPNOTSUPP & ENOTSUP")
+    def test_copystat_handles_harmless_chflags_errors(self):
+        tmpdir = self.mkdtemp()
+        file1 = os.path.join(tmpdir, 'file1')
+        file2 = os.path.join(tmpdir, 'file2')
+        self.write_file(file1, 'xxx')
+        self.write_file(file2, 'xxx')
+
+        def make_chflags_raiser(err):
+            ex = OSError()
+
+            def _chflags_raiser(path, flags):
+                ex.errno = err
+                raise ex
+            return _chflags_raiser
+        old_chflags = os.chflags
+        try:
+            for err in errno.EOPNOTSUPP, errno.ENOTSUP:
+                os.chflags = make_chflags_raiser(err)
+                shutil.copystat(file1, file2)
+            # assert others errors break it
+            os.chflags = make_chflags_raiser(errno.EOPNOTSUPP + errno.ENOTSUP)
+            self.assertRaises(OSError, shutil.copystat, file1, file2)
+        finally:
+            os.chflags = old_chflags
+
     @support.skip_unless_symlink
     def test_dont_copy_file_onto_symlink_to_itself(self):
         # bug 851123.
diff -r 3d0686d90f55 Lib/test/test_socket.py
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -951,6 +951,7 @@
         f = self.cli_conn.detach()
         self.assertEqual(f, fileno)
         # cli_conn cannot be used anymore...
+        self.assertTrue(self.cli_conn._closed)
         self.assertRaises(socket.error, self.cli_conn.recv, 1024)
         self.cli_conn.close()
         # ...but we can create another socket using the (still open)
diff -r 3d0686d90f55 Lib/test/test_socketserver.py
--- a/Lib/test/test_socketserver.py
+++ b/Lib/test/test_socketserver.py
@@ -8,6 +8,8 @@
 import select
 import signal
 import socket
+import select
+import errno
 import tempfile
 import unittest
 import socketserver
@@ -32,8 +34,11 @@
     if hasattr(signal, 'alarm'):
         signal.alarm(n)
 
+# Remember real select() to avoid interferences with mocking
+_real_select = select.select
+
 def receive(sock, n, timeout=20):
-    r, w, x = select.select([sock], [], [], timeout)
+    r, w, x = _real_select([sock], [], [], timeout)
     if sock in r:
         return sock.recv(n)
     else:
@@ -226,6 +231,38 @@
                                 socketserver.DatagramRequestHandler,
                                 self.dgram_examine)
 
+    @contextlib.contextmanager
+    def mocked_select_module(self):
+        """Mocks the select.select() call to raise EINTR for first call"""
+        old_select = select.select
+
+        class MockSelect:
+            def __init__(self):
+                self.called = 0
+
+            def __call__(self, *args):
+                self.called += 1
+                if self.called == 1:
+                    # raise the exception on first call
+                    raise select.error(errno.EINTR, os.strerror(errno.EINTR))
+                else:
+                    # Return real select value for consecutive calls
+                    return old_select(*args)
+
+        select.select = MockSelect()
+        try:
+            yield select.select
+        finally:
+            select.select = old_select
+
+    def test_InterruptServerSelectCall(self):
+        with self.mocked_select_module() as mock_select:
+            pid = self.run_server(socketserver.TCPServer,
+                                  socketserver.StreamRequestHandler,
+                                  self.stream_examine)
+            # Make sure select was called again:
+            self.assertGreater(mock_select.called, 1)
+
     # Alas, on Linux (at least) recvfrom() doesn't return a meaningful
     # client address so this cannot work:
 
diff -r 3d0686d90f55 Lib/test/test_ssl.py
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -713,13 +713,18 @@
         # SHA256 was added in OpenSSL 0.9.8
         if ssl.OPENSSL_VERSION_INFO < (0, 9, 8, 0, 15):
             self.skipTest("SHA256 not available on %r" % ssl.OPENSSL_VERSION)
+        # sha256.tbs-internet.com needs SNI to use the correct certificate
+        if not ssl.HAS_SNI:
+            self.skipTest("SNI needed for this test")
         # https://sha2.hboeck.de/ was used until 2011-01-08 (no route to host)
         remote = ("sha256.tbs-internet.com", 443)
         sha256_cert = os.path.join(os.path.dirname(__file__), "sha256.pem")
         with support.transient_internet("sha256.tbs-internet.com"):
-            s = ssl.wrap_socket(socket.socket(socket.AF_INET),
-                                cert_reqs=ssl.CERT_REQUIRED,
-                                ca_certs=sha256_cert,)
+            ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+            ctx.verify_mode = ssl.CERT_REQUIRED
+            ctx.load_verify_locations(sha256_cert)
+            s = ctx.wrap_socket(socket.socket(socket.AF_INET),
+                                server_hostname="sha256.tbs-internet.com")
             try:
                 s.connect(remote)
                 if support.verbose:
diff -r 3d0686d90f55 Lib/test/test_string.py
--- a/Lib/test/test_string.py
+++ b/Lib/test/test_string.py
@@ -26,6 +26,18 @@
         self.assertEqual(string.capwords('\taBc\tDeF\t'), 'Abc Def')
         self.assertEqual(string.capwords('\taBc\tDeF\t', '\t'), '\tAbc\tDef\t')
 
+    def test_conversion_specifiers(self):
+        fmt = string.Formatter()
+        self.assertEqual(fmt.format("-{arg!r}-", arg='test'), "-'test'-")
+        self.assertEqual(fmt.format("{0!s}", 'test'), 'test')
+        self.assertRaises(ValueError, fmt.format, "{0!h}", 'test')
+        # issue13579
+        self.assertEqual(fmt.format("{0!a}", 42), '42')
+        self.assertEqual(fmt.format("{0!a}",  string.ascii_letters),
+            "'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'")
+        self.assertEqual(fmt.format("{0!a}",  chr(255)), "'\\xff'")
+        self.assertEqual(fmt.format("{0!a}",  chr(256)), "'\\u0100'")
+
     def test_formatter(self):
         fmt = string.Formatter()
         self.assertEqual(fmt.format("foo"), "foo")
diff -r 3d0686d90f55 Lib/test/test_strptime.py
--- a/Lib/test/test_strptime.py
+++ b/Lib/test/test_strptime.py
@@ -38,9 +38,9 @@
         comparison = testing[self.time_tuple[tuple_position]]
         self.assertIn(strftime_output, testing,
                       "%s: not found in tuple" % error_msg)
-        self.assertTrue(comparison == strftime_output,
-                        "%s: position within tuple incorrect; %s != %s" %
-                        (error_msg, comparison, strftime_output))
+        self.assertEqual(comparison, strftime_output,
+                         "%s: position within tuple incorrect; %s != %s" %
+                         (error_msg, comparison, strftime_output))
 
     def test_weekday(self):
         # Make sure that full and abbreviated weekday names are correct in
@@ -65,8 +65,8 @@
                       "AM/PM representation not in tuple")
         if self.time_tuple[3] < 12: position = 0
         else: position = 1
-        self.assertTrue(strftime_output == self.LT_ins.am_pm[position],
-                        "AM/PM representation in the wrong position within the tuple")
+        self.assertEqual(self.LT_ins.am_pm[position], strftime_output,
+                         "AM/PM representation in the wrong position within the tuple")
 
     def test_timezone(self):
         # Make sure timezone is correct
@@ -86,17 +86,14 @@
         #  output.
         magic_date = (1999, 3, 17, 22, 44, 55, 2, 76, 0)
         strftime_output = time.strftime("%c", magic_date)
-        self.assertTrue(strftime_output == time.strftime(self.LT_ins.LC_date_time,
-                                                         magic_date),
-                        "LC_date_time incorrect")
+        self.assertEqual(time.strftime(self.LT_ins.LC_date_time, magic_date),
+                         strftime_output, "LC_date_time incorrect")
         strftime_output = time.strftime("%x", magic_date)
-        self.assertTrue(strftime_output == time.strftime(self.LT_ins.LC_date,
-                                                         magic_date),
-                        "LC_date incorrect")
+        self.assertEqual(time.strftime(self.LT_ins.LC_date, magic_date),
+                         strftime_output, "LC_date incorrect")
         strftime_output = time.strftime("%X", magic_date)
-        self.assertTrue(strftime_output == time.strftime(self.LT_ins.LC_time,
-                                                         magic_date),
-                        "LC_time incorrect")
+        self.assertEqual(time.strftime(self.LT_ins.LC_time, magic_date),
+                         strftime_output, "LC_time incorrect")
         LT = _strptime.LocaleTime()
         LT.am_pm = ('', '')
         self.assertTrue(LT.LC_time, "LocaleTime's LC directives cannot handle "
@@ -168,8 +165,8 @@
         # Fixes bug #661354
         test_locale = _strptime.LocaleTime()
         test_locale.timezone = (frozenset(), frozenset())
-        self.assertTrue(_strptime.TimeRE(test_locale).pattern("%Z") == '',
-                        "with timezone == ('',''), TimeRE().pattern('%Z') != ''")
+        self.assertEqual(_strptime.TimeRE(test_locale).pattern("%Z"), '',
+                         "with timezone == ('',''), TimeRE().pattern('%Z') != ''")
 
     def test_matching_with_escapes(self):
         # Make sure a format that requires escaping of characters works
@@ -195,7 +192,7 @@
         # so as to not allow to subpatterns to end up next to each other and
         # "steal" characters from each other.
         pattern = self.time_re.pattern('%j %H')
-        self.assertTrue(not re.match(pattern, "180"))
+        self.assertFalse(re.match(pattern, "180"))
         self.assertTrue(re.match(pattern, "18 0"))
 
 
@@ -381,6 +378,14 @@
         need_escaping = ".^$*+?{}\[]|)("
         self.assertTrue(_strptime._strptime_time(need_escaping, need_escaping))
 
+    def test_feb29_on_leap_year_without_year(self):
+        time.strptime("Feb 29", "%b %d")
+
+    def test_mar1_comes_after_feb29_even_when_omitting_the_year(self):
+        self.assertLess(
+                time.strptime("Feb 29", "%b %d"),
+                time.strptime("Mar 1", "%b %d"))
+
 class Strptime12AMPMTests(unittest.TestCase):
     """Test a _strptime regression in '%I %p' at 12 noon (12 PM)"""
 
diff -r 3d0686d90f55 Lib/test/test_struct.py
--- a/Lib/test/test_struct.py
+++ b/Lib/test/test_struct.py
@@ -3,7 +3,7 @@
 import struct
 import sys
 
-from test.support import run_unittest
+from test import support
 
 ISBIGENDIAN = sys.byteorder == "big"
 IS32BIT = sys.maxsize == 0x7fffffff
@@ -30,6 +30,7 @@
         return string_reverse(value)
 
 class StructTest(unittest.TestCase):
+
     def test_isbigendian(self):
         self.assertEqual((struct.pack('=i', 1)[0] == 0), ISBIGENDIAN)
 
@@ -556,8 +557,29 @@
         s = struct.Struct('i')
         s.__init__('ii')
 
+    def check_sizeof(self, format_str, number_of_codes):
+        # The size of 'PyStructObject'
+        totalsize = support.calcobjsize('5P')
+        # The size taken up by the 'formatcode' dynamic array
+        totalsize += struct.calcsize('3P') * (number_of_codes + 1)
+        support.check_sizeof(self, struct.Struct(format_str), totalsize)
+
+    @support.cpython_only
+    def test__sizeof__(self):
+        for code in integer_codes:
+            self.check_sizeof(code, 1)
+        self.check_sizeof('BHILfdspP', 9)
+        self.check_sizeof('B' * 1234, 1234)
+        self.check_sizeof('fd', 2)
+        self.check_sizeof('xxxxxxxxxxxxxx', 0)
+        self.check_sizeof('100H', 100)
+        self.check_sizeof('187s', 1)
+        self.check_sizeof('20p', 1)
+        self.check_sizeof('0s', 1)
+        self.check_sizeof('0c', 0)
+
 def test_main():
-    run_unittest(StructTest)
+    support.run_unittest(StructTest)
 
 if __name__ == '__main__':
     test_main()
diff -r 3d0686d90f55 Lib/test/test_subprocess.py
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -4,6 +4,7 @@
 import sys
 import signal
 import io
+import locale
 import os
 import errno
 import tempfile
@@ -476,21 +477,22 @@
     def test_universal_newlines(self):
         p = subprocess.Popen([sys.executable, "-c",
                               'import sys,os;' + SETBINARY +
-                              'sys.stdout.write(sys.stdin.readline());'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("line2\\n");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write(sys.stdin.read());'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("line4\\n");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("line5\\r\\n");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("line6\\r");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("\\nline7");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("\\nline8");'],
+                              'buf = sys.stdout.buffer;'
+                              'buf.write(sys.stdin.readline().encode());'
+                              'buf.flush();'
+                              'buf.write(b"line2\\n");'
+                              'buf.flush();'
+                              'buf.write(sys.stdin.read().encode());'
+                              'buf.flush();'
+                              'buf.write(b"line4\\n");'
+                              'buf.flush();'
+                              'buf.write(b"line5\\r\\n");'
+                              'buf.flush();'
+                              'buf.write(b"line6\\r");'
+                              'buf.flush();'
+                              'buf.write(b"\\nline7");'
+                              'buf.flush();'
+                              'buf.write(b"\\nline8");'],
                              stdin=subprocess.PIPE,
                              stdout=subprocess.PIPE,
                              universal_newlines=1)
@@ -510,17 +512,18 @@
         # universal newlines through communicate()
         p = subprocess.Popen([sys.executable, "-c",
                               'import sys,os;' + SETBINARY +
-                              'sys.stdout.write("line2\\n");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("line4\\n");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("line5\\r\\n");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("line6\\r");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("\\nline7");'
-                              'sys.stdout.flush();'
-                              'sys.stdout.write("\\nline8");'],
+                              'buf = sys.stdout.buffer;'
+                              'buf.write(b"line2\\n");'
+                              'buf.flush();'
+                              'buf.write(b"line4\\n");'
+                              'buf.flush();'
+                              'buf.write(b"line5\\r\\n");'
+                              'buf.flush();'
+                              'buf.write(b"line6\\r");'
+                              'buf.flush();'
+                              'buf.write(b"\\nline7");'
+                              'buf.flush();'
+                              'buf.write(b"\\nline8");'],
                              stderr=subprocess.PIPE,
                              stdout=subprocess.PIPE,
                              universal_newlines=1)
@@ -546,6 +549,50 @@
         (stdout, stderr) = p.communicate("line1\nline3\n")
         self.assertEqual(p.returncode, 0)
 
+    def test_universal_newlines_communicate_input_none(self):
+        # Test communicate(input=None) with universal newlines.
+        #
+        # We set stdout to PIPE because, as of this writing, a different
+        # code path is tested when the number of pipes is zero or one.
+        p = subprocess.Popen([sys.executable, "-c", "pass"],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             universal_newlines=True)
+        p.communicate()
+        self.assertEqual(p.returncode, 0)
+
+    def test_universal_newlines_communicate_encodings(self):
+        # Check that universal newlines mode works for various encodings,
+        # in particular for encodings in the UTF-16 and UTF-32 families.
+        # See issue #15595.
+        #
+        # UTF-16 and UTF-32-BE are sufficient to check both with BOM and
+        # without, and UTF-16 and UTF-32.
+        for encoding in ['utf-16', 'utf-32-be']:
+            old_getpreferredencoding = locale.getpreferredencoding
+            # Indirectly via io.TextIOWrapper, Popen() defaults to
+            # locale.getpreferredencoding(False) and earlier in Python 3.2 to
+            # locale.getpreferredencoding().
+            def getpreferredencoding(do_setlocale=True):
+                return encoding
+            code = ("import sys; "
+                    r"sys.stdout.buffer.write('1\r\n2\r3\n4'.encode('%s'))" %
+                    encoding)
+            args = [sys.executable, '-c', code]
+            try:
+                locale.getpreferredencoding = getpreferredencoding
+                # We set stdin to be non-None because, as of this writing,
+                # a different code path is used when the number of pipes is
+                # zero or one.
+                popen = subprocess.Popen(args, universal_newlines=True,
+                                         stdin=subprocess.PIPE,
+                                         stdout=subprocess.PIPE)
+                stdout, stderr = popen.communicate(input='')
+            finally:
+                locale.getpreferredencoding = old_getpreferredencoding
+
+            self.assertEqual(stdout, '1\n2\n3\n4')
+
     def test_no_leaking(self):
         # Make sure we leak no resources
         if not mswindows:
@@ -989,6 +1036,27 @@
         getattr(p, method)(*args)
         return p
 
+    def _kill_dead_process(self, method, *args):
+        # Do not inherit file handles from the parent.
+        # It should fix failures on some platforms.
+        p = subprocess.Popen([sys.executable, "-c", """if 1:
+                             import sys, time
+                             sys.stdout.write('x\\n')
+                             sys.stdout.flush()
+                             """],
+                             close_fds=True,
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        # Wait for the interpreter to be completely initialized before
+        # sending any signal.
+        p.stdout.read(1)
+        # The process should end after this
+        time.sleep(1)
+        # This shouldn't raise even though the child is now dead
+        getattr(p, method)(*args)
+        p.communicate()
+
     def test_send_signal(self):
         p = self._kill_process('send_signal', signal.SIGINT)
         _, stderr = p.communicate()
@@ -1007,6 +1075,18 @@
         self.assertStderrEqual(stderr, b'')
         self.assertEqual(p.wait(), -signal.SIGTERM)
 
+    def test_send_signal_dead(self):
+        # Sending a signal to a dead process
+        self._kill_dead_process('send_signal', signal.SIGINT)
+
+    def test_kill_dead(self):
+        # Killing a dead process
+        self._kill_dead_process('kill')
+
+    def test_terminate_dead(self):
+        # Terminating a dead process
+        self._kill_dead_process('terminate')
+
     def check_close_std_fds(self, fds):
         # Issue #9905: test that subprocess pipes still work properly with
         # some standard fds closed
@@ -1568,6 +1648,31 @@
         returncode = p.wait()
         self.assertNotEqual(returncode, 0)
 
+    def _kill_dead_process(self, method, *args):
+        p = subprocess.Popen([sys.executable, "-c", """if 1:
+                             import sys, time
+                             sys.stdout.write('x\\n')
+                             sys.stdout.flush()
+                             sys.exit(42)
+                             """],
+                             stdin=subprocess.PIPE,
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        self.addCleanup(p.stdout.close)
+        self.addCleanup(p.stderr.close)
+        self.addCleanup(p.stdin.close)
+        # Wait for the interpreter to be completely initialized before
+        # sending any signal.
+        p.stdout.read(1)
+        # The process should end after this
+        time.sleep(1)
+        # This shouldn't raise even though the child is now dead
+        getattr(p, method)(*args)
+        _, stderr = p.communicate()
+        self.assertStderrEqual(stderr, b'')
+        rc = p.wait()
+        self.assertEqual(rc, 42)
+
     def test_send_signal(self):
         self._kill_process('send_signal', signal.SIGTERM)
 
@@ -1577,6 +1682,15 @@
     def test_terminate(self):
         self._kill_process('terminate')
 
+    def test_send_signal_dead(self):
+        self._kill_dead_process('send_signal', signal.SIGTERM)
+
+    def test_kill_dead(self):
+        self._kill_dead_process('kill')
+
+    def test_terminate_dead(self):
+        self._kill_dead_process('terminate')
+
 
 # The module says:
 #   "NB This only works (and is only relevant) for UNIX."
diff -r 3d0686d90f55 Lib/test/test_sys.py
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -523,7 +523,8 @@
         p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
                              stdout = subprocess.PIPE, env=env)
         out = p.communicate()[0].strip()
-        self.assertEqual(out, "\xa2\n".encode("cp424"))
+        expected = ("\xa2" + os.linesep).encode("cp424")
+        self.assertEqual(out, expected)
 
         env["PYTHONIOENCODING"] = "ascii:replace"
         p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
@@ -570,22 +571,8 @@
 
 class SizeofTest(unittest.TestCase):
 
-    TPFLAGS_HAVE_GC = 1<<14
-    TPFLAGS_HEAPTYPE = 1<<9
-
     def setUp(self):
-        self.c = len(struct.pack('c', b' '))
-        self.H = len(struct.pack('H', 0))
-        self.i = len(struct.pack('i', 0))
-        self.l = len(struct.pack('l', 0))
-        self.P = len(struct.pack('P', 0))
-        # due to missing size_t information from struct, it is assumed that
-        # sizeof(Py_ssize_t) = sizeof(void*)
-        self.header = 'PP'
-        self.vheader = self.header + 'P'
-        if hasattr(sys, "gettotalrefcount"):
-            self.header += '2P'
-            self.vheader += '2P'
+        self.P = struct.calcsize('P')
         self.longdigit = sys.int_info.sizeof_digit
         import _testcapi
         self.gc_headsize = _testcapi.SIZEOF_PYGC_HEAD
@@ -595,123 +582,102 @@
         self.file.close()
         test.support.unlink(test.support.TESTFN)
 
-    def check_sizeof(self, o, size):
-        result = sys.getsizeof(o)
-        # add GC header size
-        if ((type(o) == type) and (o.__flags__ & self.TPFLAGS_HEAPTYPE) or\
-           ((type(o) != type) and (type(o).__flags__ & self.TPFLAGS_HAVE_GC))):
-            size += self.gc_headsize
-        msg = 'wrong size for %s: got %d, expected %d' \
-              % (type(o), result, size)
-        self.assertEqual(result, size, msg)
-
-    def calcsize(self, fmt):
-        """Wrapper around struct.calcsize which enforces the alignment of the
-        end of a structure to the alignment requirement of pointer.
-
-        Note: This wrapper should only be used if a pointer member is included
-        and no member with a size larger than a pointer exists.
-        """
-        return struct.calcsize(fmt + '0P')
+    check_sizeof = test.support.check_sizeof
 
     def test_gc_head_size(self):
         # Check that the gc header size is added to objects tracked by the gc.
-        h = self.header
-        vh = self.vheader
-        size = self.calcsize
+        vsize = test.support.calcvobjsize
         gc_header_size = self.gc_headsize
         # bool objects are not gc tracked
-        self.assertEqual(sys.getsizeof(True), size(vh) + self.longdigit)
+        self.assertEqual(sys.getsizeof(True), vsize('') + self.longdigit)
         # but lists are
-        self.assertEqual(sys.getsizeof([]), size(vh + 'PP') + gc_header_size)
+        self.assertEqual(sys.getsizeof([]), vsize('PP') + gc_header_size)
 
     def test_default(self):
-        h = self.header
-        vh = self.vheader
-        size = self.calcsize
-        self.assertEqual(sys.getsizeof(True), size(vh) + self.longdigit)
-        self.assertEqual(sys.getsizeof(True, -1), size(vh) + self.longdigit)
+        vsize = test.support.calcvobjsize
+        self.assertEqual(sys.getsizeof(True), vsize('') + self.longdigit)
+        self.assertEqual(sys.getsizeof(True, -1), vsize('') + self.longdigit)
 
     def test_objecttypes(self):
         # check all types defined in Objects/
-        h = self.header
-        vh = self.vheader
-        size = self.calcsize
+        size = test.support.calcobjsize
+        vsize = test.support.calcvobjsize
         check = self.check_sizeof
         # bool
-        check(True, size(vh) + self.longdigit)
+        check(True, vsize('') + self.longdigit)
         # buffer
         # XXX
         # builtin_function_or_method
-        check(len, size(h + '3P'))
+        check(len, size('3P')) # XXX check layout
         # bytearray
         samples = [b'', b'u'*100000]
         for sample in samples:
             x = bytearray(sample)
-            check(x, size(vh + 'iPP') + x.__alloc__() * self.c)
+            check(x, vsize('iPP') + x.__alloc__())
         # bytearray_iterator
-        check(iter(bytearray()), size(h + 'PP'))
+        check(iter(bytearray()), size('PP'))
         # cell
         def get_cell():
             x = 42
             def inner():
                 return x
             return inner
-        check(get_cell().__closure__[0], size(h + 'P'))
+        check(get_cell().__closure__[0], size('P'))
         # code
-        check(get_cell().__code__, size(h + '5i8Pi3P'))
+        check(get_cell().__code__, size('5i8Pi3P'))
         # complex
-        check(complex(0,1), size(h + '2d'))
+        check(complex(0,1), size('2d'))
         # method_descriptor (descriptor object)
-        check(str.lower, size(h + '2PP'))
+        check(str.lower, size('2PP'))
         # classmethod_descriptor (descriptor object)
         # XXX
         # member_descriptor (descriptor object)
         import datetime
-        check(datetime.timedelta.days, size(h + '2PP'))
+        check(datetime.timedelta.days, size('2PP'))
         # getset_descriptor (descriptor object)
         import collections
-        check(collections.defaultdict.default_factory, size(h + '2PP'))
+        check(collections.defaultdict.default_factory, size('2PP'))
         # wrapper_descriptor (descriptor object)
-        check(int.__add__, size(h + '2P2P'))
+        check(int.__add__, size('2P2P'))
         # method-wrapper (descriptor object)
-        check({}.__iter__, size(h + '2P'))
+        check({}.__iter__, size('2P'))
         # dict
-        check({}, size(h + '3P2P' + 8*'P2P'))
+        check({}, size('3P2P' + 8*'P2P'))
         longdict = {1:1, 2:2, 3:3, 4:4, 5:5, 6:6, 7:7, 8:8}
-        check(longdict, size(h + '3P2P' + 8*'P2P') + 16*size('P2P'))
+        check(longdict, size('3P2P' + 8*'P2P') + 16*struct.calcsize('P2P'))
         # dictionary-keyiterator
-        check({}.keys(), size(h + 'P'))
+        check({}.keys(), size('P'))
         # dictionary-valueiterator
-        check({}.values(), size(h + 'P'))
+        check({}.values(), size('P'))
         # dictionary-itemiterator
-        check({}.items(), size(h + 'P'))
+        check({}.items(), size('P'))
+        # dictionary iterator
+        check(iter({}), size('P2PPP'))
         # dictproxy
         class C(object): pass
-        check(C.__dict__, size(h + 'P'))
+        check(C.__dict__, size('P'))
         # BaseException
-        check(BaseException(), size(h + '5P'))
+        check(BaseException(), size('5P'))
         # UnicodeEncodeError
-        check(UnicodeEncodeError("", "", 0, 0, ""), size(h + '5P 2P2PP'))
+        check(UnicodeEncodeError("", "", 0, 0, ""), size('5P 2P2PP'))
         # UnicodeDecodeError
-        # XXX
-#        check(UnicodeDecodeError("", "", 0, 0, ""), size(h + '5P2PP'))
+        check(UnicodeDecodeError("", b"", 0, 0, ""), size('5P 2P2PP'))
         # UnicodeTranslateError
-        check(UnicodeTranslateError("", 0, 1, ""), size(h + '5P 2P2PP'))
+        check(UnicodeTranslateError("", 0, 1, ""), size('5P 2P2PP'))
         # ellipses
-        check(Ellipsis, size(h + ''))
+        check(Ellipsis, size(''))
         # EncodingMap
         import codecs, encodings.iso8859_3
         x = codecs.charmap_build(encodings.iso8859_3.decoding_table)
-        check(x, size(h + '32B2iB'))
+        check(x, size('32B2iB'))
         # enumerate
-        check(enumerate([]), size(h + 'l3P'))
+        check(enumerate([]), size('l3P'))
         # reverse
-        check(reversed(''), size(h + 'PP'))
+        check(reversed(''), size('PP'))
         # float
-        check(float(0), size(h + 'd'))
+        check(float(0), size('d'))
         # sys.floatinfo
-        check(sys.float_info, size(vh) + self.P * len(sys.float_info))
+        check(sys.float_info, vsize('') + self.P * len(sys.float_info))
         # frame
         import inspect
         CO_MAXBLOCKS = 20
@@ -720,10 +686,10 @@
         nfrees = len(x.f_code.co_freevars)
         extras = x.f_code.co_stacksize + x.f_code.co_nlocals +\
                   ncells + nfrees - 1
-        check(x, size(vh + '12P3i' + CO_MAXBLOCKS*'3i' + 'P' + extras*'P'))
+        check(x, vsize('12P3i' + CO_MAXBLOCKS*'3i' + 'P' + extras*'P'))
         # function
         def func(): pass
-        check(func, size(h + '11P'))
+        check(func, size('11P'))
         class c():
             @staticmethod
             def foo():
@@ -732,68 +698,68 @@
             def bar(cls):
                 pass
             # staticmethod
-            check(foo, size(h + 'P'))
+            check(foo, size('P'))
             # classmethod
-            check(bar, size(h + 'P'))
+            check(bar, size('P'))
         # generator
         def get_gen(): yield 1
-        check(get_gen(), size(h + 'Pi2P'))
+        check(get_gen(), size('Pi2P'))
         # iterator
-        check(iter('abc'), size(h + 'lP'))
+        check(iter('abc'), size('lP'))
         # callable-iterator
         import re
-        check(re.finditer('',''), size(h + '2P'))
+        check(re.finditer('',''), size('2P'))
         # list
         samples = [[], [1,2,3], ['1', '2', '3']]
         for sample in samples:
-            check(sample, size(vh + 'PP') + len(sample)*self.P)
+            check(sample, vsize('PP') + len(sample)*self.P)
         # sortwrapper (list)
         # XXX
         # cmpwrapper (list)
         # XXX
         # listiterator (list)
-        check(iter([]), size(h + 'lP'))
+        check(iter([]), size('lP'))
         # listreverseiterator (list)
-        check(reversed([]), size(h + 'lP'))
+        check(reversed([]), size('lP'))
         # long
-        check(0, size(vh))
-        check(1, size(vh) + self.longdigit)
-        check(-1, size(vh) + self.longdigit)
+        check(0, vsize(''))
+        check(1, vsize('') + self.longdigit)
+        check(-1, vsize('') + self.longdigit)
         PyLong_BASE = 2**sys.int_info.bits_per_digit
-        check(int(PyLong_BASE), size(vh) + 2*self.longdigit)
-        check(int(PyLong_BASE**2-1), size(vh) + 2*self.longdigit)
-        check(int(PyLong_BASE**2), size(vh) + 3*self.longdigit)
-        # memory
-        check(memoryview(b''), size(h + 'PP2P2i7P'))
+        check(int(PyLong_BASE), vsize('') + 2*self.longdigit)
+        check(int(PyLong_BASE**2-1), vsize('') + 2*self.longdigit)
+        check(int(PyLong_BASE**2), vsize('') + 3*self.longdigit)
+        # memoryview
+        check(memoryview(b''), size('PP2P2i7P'))
         # module
-        check(unittest, size(h + '3P'))
+        check(unittest, size('3P'))
         # None
-        check(None, size(h + ''))
+        check(None, size(''))
         # NotImplementedType
-        check(NotImplemented, size(h))
+        check(NotImplemented, size(''))
         # object
-        check(object(), size(h + ''))
+        check(object(), size(''))
         # property (descriptor object)
         class C(object):
             def getx(self): return self.__x
             def setx(self, value): self.__x = value
             def delx(self): del self.__x
             x = property(getx, setx, delx, "")
-            check(x, size(h + '4Pi'))
+            check(x, size('4Pi'))
         # PyCapsule
         # XXX
         # rangeiterator
-        check(iter(range(1)), size(h + '4l'))
+        check(iter(range(1)), size('4l'))
         # reverse
-        check(reversed(''), size(h + 'PP'))
+        check(reversed(''), size('PP'))
         # range
-        check(range(1), size(h + '4P'))
-        check(range(66000), size(h + '4P'))
+        check(range(1), size('4P'))
+        check(range(66000), size('4P'))
         # set
         # frozenset
         PySet_MINSIZE = 8
         samples = [[], range(10), range(50)]
-        s = size(h + '3P2P' + PySet_MINSIZE*'lP' + 'lP')
+        s = size('3P2P' + PySet_MINSIZE*'PP' + 'PP')
         for sample in samples:
             minused = len(sample)
             if minused == 0: tmp = 1
@@ -810,18 +776,18 @@
                 check(set(sample), s + newsize*struct.calcsize('lP'))
                 check(frozenset(sample), s + newsize*struct.calcsize('lP'))
         # setiterator
-        check(iter(set()), size(h + 'P3P'))
+        check(iter(set()), size('P3P'))
         # slice
-        check(slice(0), size(h + '3P'))
+        check(slice(0), size('3P'))
         # super
-        check(super(int), size(h + '3P'))
+        check(super(int), size('3P'))
         # tuple
-        check((), size(vh))
-        check((1,2,3), size(vh) + 3*self.P)
+        check((), vsize(''))
+        check((1,2,3), vsize('') + 3*self.P)
         # type
         # (PyTypeObject + PyNumberMethods + PyMappingMethods +
         #  PySequenceMethods + PyBufferProcs)
-        s = size(vh + 'P2P15Pl4PP9PP11PI') + size('16Pi17P 3P 10P 2P 2P')
+        s = vsize('P2P15Pl4PP9PP11PI') + struct.calcsize('16Pi17P 3P 10P 2P 2P')
         check(int, s)
         # class
         class newstyleclass(object): pass
@@ -832,39 +798,38 @@
         # we need to test for both sizes, because we don't know if the string
         # has been cached
         for s in samples:
-            basicsize =  size(h + 'PPPiP') + usize * (len(s) + 1)
+            basicsize =  size('PPPiP') + usize * (len(s) + 1)
             check(s, basicsize)
         # weakref
         import weakref
-        check(weakref.ref(int), size(h + '2Pl2P'))
+        check(weakref.ref(int), size('2Pl2P'))
         # weakproxy
         # XXX
         # weakcallableproxy
-        check(weakref.proxy(int), size(h + '2Pl2P'))
+        check(weakref.proxy(int), size('2Pl2P'))
 
     def test_pythontypes(self):
         # check all types defined in Python/
-        h = self.header
-        vh = self.vheader
-        size = self.calcsize
+        size = test.support.calcobjsize
+        vsize = test.support.calcvobjsize
         check = self.check_sizeof
         # _ast.AST
         import _ast
-        check(_ast.AST(), size(h + ''))
+        check(_ast.AST(), size(''))
         # imp.NullImporter
         import imp
-        check(imp.NullImporter(self.file.name), size(h + ''))
+        check(imp.NullImporter(self.file.name), size(''))
         try:
             raise TypeError
         except TypeError:
             tb = sys.exc_info()[2]
             # traceback
             if tb != None:
-                check(tb, size(h + '2P2i'))
+                check(tb, size('2P2i'))
         # symtable entry
         # XXX
         # sys.flags
-        check(sys.flags, size(vh) + self.P * len(sys.flags))
+        check(sys.flags, vsize('') + self.P * len(sys.flags))
 
 
 def test_main():
diff -r 3d0686d90f55 Lib/test/test_sys_settrace.py
--- a/Lib/test/test_sys_settrace.py
+++ b/Lib/test/test_sys_settrace.py
@@ -671,6 +671,14 @@
 no_jump_to_non_integers.jump = (2, "Spam")
 no_jump_to_non_integers.output = [True]
 
+def jump_across_with(output):
+    with open(support.TESTFN, "wb") as fp:
+        pass
+    with open(support.TESTFN, "wb") as fp:
+        pass
+jump_across_with.jump = (1, 3)
+jump_across_with.output = []
+
 # This verifies that you can't set f_lineno via _getframe or similar
 # trickery.
 def no_jump_without_trace_function():
@@ -740,6 +748,9 @@
         self.run_test(no_jump_to_non_integers)
     def test_19_no_jump_without_trace_function(self):
         no_jump_without_trace_function()
+    def test_jump_across_with(self):
+        self.addCleanup(support.unlink, support.TESTFN)
+        self.run_test(jump_across_with)
 
     def test_20_large_function(self):
         d = {}
diff -r 3d0686d90f55 Lib/test/test_tarfile.py
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -166,6 +166,9 @@
     def test_fileobj_symlink2(self):
         self._test_fileobj_link("./ustar/linktest2/symtype", "ustar/linktest1/regtype")
 
+    def test_issue14160(self):
+        self._test_fileobj_link("symtype2", "ustar/regtype")
+
 
 class CommonReadTest(ReadTest):
 
diff -r 3d0686d90f55 Lib/test/test_telnetlib.py
--- a/Lib/test/test_telnetlib.py
+++ b/Lib/test/test_telnetlib.py
@@ -4,6 +4,7 @@
 import time
 import contextlib
 
+import unittest
 from unittest import TestCase
 from test import support
 threading = support.import_module('threading')
@@ -75,8 +76,8 @@
 
 class SocketStub(object):
     ''' a socket proxy that re-defines sendall() '''
-    def __init__(self, reads=[]):
-        self.reads = reads
+    def __init__(self, reads=()):
+        self.reads = list(reads)  # Intentionally make a copy.
         self.writes = []
         self.block = False
     def sendall(self, data):
@@ -102,7 +103,7 @@
         self._messages += out.getvalue()
         return
 
-def new_select(*s_args):
+def mock_select(*s_args):
     block = False
     for l in s_args:
         for fob in l:
@@ -113,6 +114,30 @@
     else:
         return s_args
 
+class MockPoller(object):
+    test_case = None  # Set during TestCase setUp.
+
+    def __init__(self):
+        self._file_objs = []
+
+    def register(self, fd, eventmask):
+        self.test_case.assertTrue(hasattr(fd, 'fileno'), fd)
+        self.test_case.assertEqual(eventmask, select.POLLIN|select.POLLPRI)
+        self._file_objs.append(fd)
+
+    def poll(self, timeout=None):
+        block = False
+        for fob in self._file_objs:
+            if isinstance(fob, TelnetAlike):
+                block = fob.sock.block
+        if block:
+            return []
+        else:
+            return zip(self._file_objs, [select.POLLIN]*len(self._file_objs))
+
+    def unregister(self, fd):
+        self._file_objs.remove(fd)
+
 @contextlib.contextmanager
 def test_socket(reads):
     def new_conn(*ignored):
@@ -125,7 +150,7 @@
         socket.create_connection = old_conn
     return
 
-def test_telnet(reads=[], cls=TelnetAlike):
+def test_telnet(reads=(), cls=TelnetAlike, use_poll=None):
     ''' return a telnetlib.Telnet object that uses a SocketStub with
         reads queued up to be read '''
     for x in reads:
@@ -133,15 +158,31 @@
     with test_socket(reads):
         telnet = cls('dummy', 0)
         telnet._messages = '' # debuglevel output
+        if use_poll is not None:
+            if use_poll and not telnet._has_poll:
+                raise unittest.SkipTest('select.poll() required.')
+            telnet._has_poll = use_poll
     return telnet
 
-class ReadTests(TestCase):
+
+class ExpectAndReadTestCase(TestCase):
     def setUp(self):
         self.old_select = select.select
-        select.select = new_select
+        select.select = mock_select
+        self.old_poll = False
+        if hasattr(select, 'poll'):
+            self.old_poll = select.poll
+            select.poll = MockPoller
+            MockPoller.test_case = self
+
     def tearDown(self):
+        if self.old_poll:
+            MockPoller.test_case = None
+            select.poll = self.old_poll
         select.select = self.old_select
 
+
+class ReadTests(ExpectAndReadTestCase):
     def test_read_until(self):
         """
         read_until(expected, timeout=None)
@@ -158,6 +199,22 @@
         data = telnet.read_until(b'match')
         self.assertEqual(data, expect)
 
+    def test_read_until_with_poll(self):
+        """Use select.poll() to implement telnet.read_until()."""
+        want = [b'x' * 10, b'match', b'y' * 10]
+        telnet = test_telnet(want, use_poll=True)
+        select.select = lambda *_: self.fail('unexpected select() call.')
+        data = telnet.read_until(b'match')
+        self.assertEqual(data, b''.join(want[:-1]))
+
+    def test_read_until_with_select(self):
+        """Use select.select() to implement telnet.read_until()."""
+        want = [b'x' * 10, b'match', b'y' * 10]
+        telnet = test_telnet(want, use_poll=False)
+        if self.old_poll:
+            select.poll = lambda *_: self.fail('unexpected poll() call.')
+        data = telnet.read_until(b'match')
+        self.assertEqual(data, b''.join(want[:-1]))
 
     def test_read_all(self):
         """
@@ -349,8 +406,39 @@
         self.assertRegex(telnet._messages, r'0.*test')
 
 
+class ExpectTests(ExpectAndReadTestCase):
+    def test_expect(self):
+        """
+        expect(expected, [timeout])
+          Read until the expected string has been seen, or a timeout is
+          hit (default is no timeout); may block.
+        """
+        want = [b'x' * 10, b'match', b'y' * 10]
+        telnet = test_telnet(want)
+        (_,_,data) = telnet.expect([b'match'])
+        self.assertEqual(data, b''.join(want[:-1]))
+
+    def test_expect_with_poll(self):
+        """Use select.poll() to implement telnet.expect()."""
+        want = [b'x' * 10, b'match', b'y' * 10]
+        telnet = test_telnet(want, use_poll=True)
+        select.select = lambda *_: self.fail('unexpected select() call.')
+        (_,_,data) = telnet.expect([b'match'])
+        self.assertEqual(data, b''.join(want[:-1]))
+
+    def test_expect_with_select(self):
+        """Use select.select() to implement telnet.expect()."""
+        want = [b'x' * 10, b'match', b'y' * 10]
+        telnet = test_telnet(want, use_poll=False)
+        if self.old_poll:
+            select.poll = lambda *_: self.fail('unexpected poll() call.')
+        (_,_,data) = telnet.expect([b'match'])
+        self.assertEqual(data, b''.join(want[:-1]))
+
+
 def test_main(verbose=None):
-    support.run_unittest(GeneralTests, ReadTests, WriteTests, OptionTests)
+    support.run_unittest(GeneralTests, ReadTests, WriteTests, OptionTests,
+                         ExpectTests)
 
 if __name__ == '__main__':
     test_main()
diff -r 3d0686d90f55 Lib/test/test_thread.py
--- a/Lib/test/test_thread.py
+++ b/Lib/test/test_thread.py
@@ -128,6 +128,29 @@
             time.sleep(0.01)
         self.assertEqual(thread._count(), orig)
 
+    def test_save_exception_state_on_error(self):
+        # See issue #14474
+        def task():
+            started.release()
+            raise SyntaxError
+        def mywrite(self, *args):
+            try:
+                raise ValueError
+            except ValueError:
+                pass
+            real_write(self, *args)
+        c = thread._count()
+        started = thread.allocate_lock()
+        with support.captured_output("stderr") as stderr:
+            real_write = stderr.write
+            stderr.write = mywrite
+            started.acquire()
+            thread.start_new_thread(task, ())
+            started.acquire()
+            while thread._count() > c:
+                time.sleep(0.01)
+        self.assertIn("Traceback", stderr.getvalue())
+
 
 class Barrier:
     def __init__(self, num_threads):
diff -r 3d0686d90f55 Lib/test/test_threading.py
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -2,6 +2,8 @@
 
 import test.support
 from test.support import verbose, strip_python_stderr, import_module
+from test.script_helper import assert_python_ok
+
 import random
 import re
 import sys
@@ -407,6 +409,33 @@
         t.daemon = True
         self.assertTrue('daemon' in repr(t))
 
+    @unittest.skipUnless(hasattr(os, 'fork'), 'test needs fork()')
+    def test_dummy_thread_after_fork(self):
+        # Issue #14308: a dummy thread in the active list doesn't mess up
+        # the after-fork mechanism.
+        code = """if 1:
+            import _thread, threading, os, time
+
+            def background_thread(evt):
+                # Creates and registers the _DummyThread instance
+                threading.current_thread()
+                evt.set()
+                time.sleep(10)
+
+            evt = threading.Event()
+            _thread.start_new_thread(background_thread, (evt,))
+            evt.wait()
+            assert threading.active_count() == 2, threading.active_count()
+            if os.fork() == 0:
+                assert threading.active_count() == 1, threading.active_count()
+                os._exit(0)
+            else:
+                os.wait()
+        """
+        _, out, err = assert_python_ok("-c", code)
+        self.assertEqual(out, b'')
+        self.assertEqual(err, b'')
+
 
 class ThreadJoinOnShutdown(BaseTestCase):
 
@@ -624,6 +653,7 @@
         output = "end of worker thread\nend of main thread\n"
         self.assertScriptHasOutput(script, output)
 
+    @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug")
     def test_6_daemon_threads(self):
         # Check that a daemon thread cannot crash the interpreter on shutdown
         # by manipulating internal structures that are being disposed of in
@@ -738,10 +768,10 @@
             """
         expected_output = "end of main thread\n"
         p = subprocess.Popen([sys.executable, "-c", script],
-                             stdout=subprocess.PIPE)
+                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
         stdout, stderr = p.communicate()
         data = stdout.decode().replace('\r', '')
-        self.assertEqual(p.returncode, 0, "Unexpected error")
+        self.assertEqual(p.returncode, 0, "Unexpected error: " + stderr.decode())
         self.assertEqual(data, expected_output)
 
 class LockTests(lock_tests.LockTests):
diff -r 3d0686d90f55 Lib/test/test_timeout.py
--- a/Lib/test/test_timeout.py
+++ b/Lib/test/test_timeout.py
@@ -138,14 +138,88 @@
         self.sock.close()
 
     def testConnectTimeout(self):
-        # Choose a private address that is unlikely to exist to prevent
-        # failures due to the connect succeeding before the timeout.
-        # Use a dotted IP address to avoid including the DNS lookup time
-        # with the connect time.  This avoids failing the assertion that
-        # the timeout occurred fast enough.
-        addr = ('10.0.0.0', 12345)
-        with support.transient_internet(addr[0]):
-            self._sock_operation(1, 0.001, 'connect', addr)
+        # Testing connect timeout is tricky: we need to have IP connectivity
+        # to a host that silently drops our packets.  We can't simulate this
+        # from Python because it's a function of the underlying TCP/IP stack.
+        # So, the following Snakebite host has been defined:
+        blackhole = ('blackhole.snakebite.net', 56666)
+
+        # Blackhole has been configured to silently drop any incoming packets.
+        # No RSTs (for TCP) or ICMP UNREACH (for UDP/ICMP) will be sent back
+        # to hosts that attempt to connect to this address: which is exactly
+        # what we need to confidently test connect timeout.
+
+        # However, we want to prevent false positives.  It's not unreasonable
+        # to expect certain hosts may not be able to reach the blackhole, due
+        # to firewalling or general network configuration.  In order to improve
+        # our confidence in testing the blackhole, a corresponding 'whitehole'
+        # has also been set up using one port higher:
+        whitehole = ('whitehole.snakebite.net', 56667)
+
+        # This address has been configured to immediately drop any incoming
+        # packets as well, but it does it respectfully with regards to the
+        # incoming protocol.  RSTs are sent for TCP packets, and ICMP UNREACH
+        # is sent for UDP/ICMP packets.  This means our attempts to connect to
+        # it should be met immediately with ECONNREFUSED.  The test case has
+        # been structured around this premise: if we get an ECONNREFUSED from
+        # the whitehole, we proceed with testing connect timeout against the
+        # blackhole.  If we don't, we skip the test (with a message about not
+        # getting the required RST from the whitehole within the required
+        # timeframe).
+
+        # For the records, the whitehole/blackhole configuration has been set
+        # up using the 'pf' firewall (available on BSDs), using the following:
+        #
+        #   ext_if="bge0"
+        #
+        #   blackhole_ip="35.8.247.6"
+        #   whitehole_ip="35.8.247.6"
+        #   blackhole_port="56666"
+        #   whitehole_port="56667"
+        #
+        #   block return in log quick on $ext_if proto { tcp udp } \
+        #       from any to $whitehole_ip port $whitehole_port
+        #   block drop in log quick on $ext_if proto { tcp udp } \
+        #       from any to $blackhole_ip port $blackhole_port
+        #
+
+        skip = True
+        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        # Use a timeout of 3 seconds.  Why 3?  Because it's more than 1, and
+        # less than 5.  i.e. no particular reason.  Feel free to tweak it if
+        # you feel a different value would be more appropriate.
+        timeout = 3
+        sock.settimeout(timeout)
+        try:
+            sock.connect((whitehole))
+        except socket.timeout:
+            pass
+        except IOError as err:
+            if err.errno == errno.ECONNREFUSED:
+                skip = False
+        finally:
+            sock.close()
+            del sock
+
+        if skip:
+            self.skipTest(
+                "We didn't receive a connection reset (RST) packet from "
+                "{}:{} within {} seconds, so we're unable to test connect "
+                "timeout against the corresponding {}:{} (which is "
+                "configured to silently drop packets)."
+                    .format(
+                        whitehole[0],
+                        whitehole[1],
+                        timeout,
+                        blackhole[0],
+                        blackhole[1],
+                    )
+            )
+
+        # All that hard work just to test if connect times out in 0.001s ;-)
+        self.addr_remote = blackhole
+        with support.transient_internet(self.addr_remote[0]):
+            self._sock_operation(1, 0.001, 'connect', self.addr_remote)
 
     def testRecvTimeout(self):
         # Test recv() timeout
diff -r 3d0686d90f55 Lib/test/test_tokenize.py
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -674,6 +674,10 @@
         f = 'tokenize_tests-utf8-coding-cookie-and-utf8-bom-sig.txt'
         self.assertTrue(self._testFile(f))
 
+    def test_bad_coding_cookie(self):
+        self.assertRaises(SyntaxError, self._testFile, 'bad_coding.py')
+        self.assertRaises(SyntaxError, self._testFile, 'bad_coding2.py')
+
 
 class Test_Tokenize(TestCase):
 
@@ -825,6 +829,16 @@
                 found, consumed_lines = detect_encoding(rl)
                 self.assertEqual(found, "iso-8859-1")
 
+    def test_syntaxerror_latin1(self):
+        # Issue 14629: need to raise SyntaxError if the first
+        # line(s) have non-UTF-8 characters
+        lines = (
+            b'print("\xdf")', # Latin-1: LATIN SMALL LETTER SHARP S
+            )
+        readline = self.get_readline(lines)
+        self.assertRaises(SyntaxError, detect_encoding, readline)
+
+
     def test_utf8_normalization(self):
         # See get_normal_name() in tokenizer.c.
         encodings = ("utf-8", "utf-8-mac", "utf-8-unix")
diff -r 3d0686d90f55 Lib/test/test_tools.py
--- /dev/null
+++ b/Lib/test/test_tools.py
@@ -0,0 +1,108 @@
+"""Tests for scripts in the Tools directory.
+
+This file contains regression tests for some of the scripts found in the
+Tools directory of a Python checkout or tarball, such as reindent.py.
+"""
+
+import os
+import sys
+import imp
+import unittest
+import sysconfig
+import tempfile
+from test import support
+from test.script_helper import assert_python_ok
+
+if not sysconfig.is_python_build():
+    # XXX some installers do contain the tools, should we detect that
+    # and run the tests in that case too?
+    raise unittest.SkipTest('test irrelevant for an installed Python')
+
+srcdir = sysconfig.get_config_var('projectbase')
+basepath = os.path.join(os.getcwd(), srcdir, 'Tools')
+scriptsdir = os.path.join(basepath, 'scripts')
+
+
+class ReindentTests(unittest.TestCase):
+    script = os.path.join(scriptsdir, 'reindent.py')
+
+    def test_noargs(self):
+        assert_python_ok(self.script)
+
+    def test_help(self):
+        rc, out, err = assert_python_ok(self.script, '-h')
+        self.assertEqual(out, b'')
+        self.assertGreater(err, b'')
+
+
+class TestSundryScripts(unittest.TestCase):
+    # At least make sure the rest don't have syntax errors.  When tests are
+    # added for a script it should be added to the whitelist below.
+
+    # scripts that have independent tests.
+    whitelist = ['reindent.py']
+    # scripts that can't be imported without running
+    blacklist = ['make_ctype.py']
+    # scripts that use windows-only modules
+    windows_only = ['win_add2path.py']
+    # blacklisted for other reasons
+    other = ['analyze_dxp.py']
+
+    skiplist = blacklist + whitelist + windows_only + other
+
+    def setUp(self):
+        cm = support.DirsOnSysPath(scriptsdir)
+        cm.__enter__()
+        self.addCleanup(cm.__exit__)
+
+    def test_sundry(self):
+        for fn in os.listdir(scriptsdir):
+            if fn.endswith('.py') and fn not in self.skiplist:
+                __import__(fn[:-3])
+
+    @unittest.skipIf(sys.platform != "win32", "Windows-only test")
+    def test_sundry_windows(self):
+        for fn in self.windows_only:
+            __import__(fn[:-3])
+
+    @unittest.skipIf(not support.threading, "test requires _thread module")
+    def test_analyze_dxp_import(self):
+        if hasattr(sys, 'getdxp'):
+            import analyze_dxp
+        else:
+            with self.assertRaises(RuntimeError):
+                import analyze_dxp
+
+
+class PdepsTests(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(self):
+        path = os.path.join(scriptsdir, 'pdeps.py')
+        self.pdeps = imp.load_source('pdeps', path)
+
+    @classmethod
+    def tearDownClass(self):
+        if 'pdeps' in sys.modules:
+            del sys.modules['pdeps']
+
+    def test_process_errors(self):
+        # Issue #14492: m_import.match(line) can be None.
+        with tempfile.TemporaryDirectory() as tmpdir:
+            fn = os.path.join(tmpdir, 'foo')
+            with open(fn, 'w') as stream:
+                stream.write("#!/this/will/fail")
+            self.pdeps.process(fn, {})
+
+    def test_inverse_attribute_error(self):
+        # Issue #14492: this used to fail with an AttributeError.
+        self.pdeps.inverse({'a': []})
+
+
+def test_main():
+    support.run_unittest(*[obj for obj in globals().values()
+                               if isinstance(obj, type)])
+
+
+if __name__ == '__main__':
+    unittest.main()
diff -r 3d0686d90f55 Lib/test/test_tuple.py
--- a/Lib/test/test_tuple.py
+++ b/Lib/test/test_tuple.py
@@ -164,6 +164,14 @@
         check(10)       # check our checking code
         check(1000000)
 
+    def test_no_comdat_folding(self):
+        # Issue 8847: In the PGO build, the MSVC linker's COMDAT folding
+        # optimization causes failures in code that relies on distinct
+        # function addresses.
+        class T(tuple): pass
+        with self.assertRaises(TypeError):
+            [3,] + T((1,2))
+
 def test_main():
     support.run_unittest(TupleTest)
 
diff -r 3d0686d90f55 Lib/test/test_unicode.py
--- a/Lib/test/test_unicode.py
+++ b/Lib/test/test_unicode.py
@@ -891,12 +891,15 @@
         self.assertEqual('{foo._x}'.format_map({'foo': C(20)}), '20')
 
         # test various errors
-        self.assertRaises(TypeError, '{'.format_map)
-        self.assertRaises(TypeError, '}'.format_map)
-        self.assertRaises(TypeError, 'a{'.format_map)
-        self.assertRaises(TypeError, 'a}'.format_map)
-        self.assertRaises(TypeError, '{a'.format_map)
-        self.assertRaises(TypeError, '}a'.format_map)
+        self.assertRaises(TypeError, ''.format_map)
+        self.assertRaises(TypeError, 'a'.format_map)
+
+        self.assertRaises(ValueError, '{'.format_map, {})
+        self.assertRaises(ValueError, '}'.format_map, {})
+        self.assertRaises(ValueError, 'a{'.format_map, {})
+        self.assertRaises(ValueError, 'a}'.format_map, {})
+        self.assertRaises(ValueError, '{a'.format_map, {})
+        self.assertRaises(ValueError, '}a'.format_map, {})
 
         # issue #12579: can't supply positional params to format_map
         self.assertRaises(ValueError, '{}'.format_map, {'a' : 2})
diff -r 3d0686d90f55 Lib/test/test_urllib2.py
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -1218,6 +1218,22 @@
     def test_basic_auth_with_single_quoted_realm(self):
         self.test_basic_auth(quote_char="'")
 
+    def test_basic_auth_with_unquoted_realm(self):
+        opener = OpenerDirector()
+        password_manager = MockPasswordManager()
+        auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
+        realm = "ACME Widget Store"
+        http_handler = MockHTTPHandler(
+            401, 'WWW-Authenticate: Basic realm=%s\r\n\r\n' % realm)
+        opener.add_handler(auth_handler)
+        opener.add_handler(http_handler)
+        with self.assertWarns(UserWarning):
+            self._test_basic_auth(opener, auth_handler, "Authorization",
+                                realm, http_handler, password_manager,
+                                "http://acme.example.com/protected",
+                                "http://acme.example.com/protected",
+                                )
+
     def test_proxy_basic_auth(self):
         opener = OpenerDirector()
         ph = urllib.request.ProxyHandler(dict(http="proxy.example.com:3128"))
diff -r 3d0686d90f55 Lib/test/test_urlparse.py
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -524,6 +524,11 @@
         self.assertEqual(p.port, 80)
         self.assertEqual(p.geturl(), url)
 
+        # Verify an illegal port is returned as None
+        url = b"HTTP://WWW.PYTHON.ORG:65536/doc/#frag"
+        p = urllib.parse.urlsplit(url)
+        self.assertEqual(p.port, None)
+
     def test_attributes_bad_port(self):
         """Check handling of non-integer ports."""
         p = urllib.parse.urlsplit("http://www.example.net:foo")
@@ -636,11 +641,20 @@
                          ('s3', 'foo.com', '/stuff', '', '', ''))
         self.assertEqual(urllib.parse.urlparse("x-newscheme://foo.com/stuff"),
                          ('x-newscheme', 'foo.com', '/stuff', '', '', ''))
+        self.assertEqual(urllib.parse.urlparse("x-newscheme://foo.com/stuff?query#fragment"),
+                         ('x-newscheme', 'foo.com', '/stuff', '', 'query', 'fragment'))
+        self.assertEqual(urllib.parse.urlparse("x-newscheme://foo.com/stuff?query"),
+                         ('x-newscheme', 'foo.com', '/stuff', '', 'query', ''))
+
         # And for bytes...
         self.assertEqual(urllib.parse.urlparse(b"s3://foo.com/stuff"),
                          (b's3', b'foo.com', b'/stuff', b'', b'', b''))
         self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff"),
                          (b'x-newscheme', b'foo.com', b'/stuff', b'', b'', b''))
+        self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff?query#fragment"),
+                         (b'x-newscheme', b'foo.com', b'/stuff', b'', b'query', b'fragment'))
+        self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff?query"),
+                         (b'x-newscheme', b'foo.com', b'/stuff', b'', b'query', b''))
 
     def test_mixed_types_rejected(self):
         # Several functions that process either strings or ASCII encoded bytes
@@ -797,6 +811,13 @@
                           encoding='utf-8')
         self.assertRaises(TypeError, urllib.parse.quote, b'foo', errors='strict')
 
+    def test_issue14072(self):
+        p1 = urllib.parse.urlsplit('tel:+31-641044153')
+        self.assertEqual(p1.scheme, 'tel')
+        self.assertEqual(p1.path, '+31-641044153')
+        p2 = urllib.parse.urlsplit('tel:+31641044153')
+        self.assertEqual(p2.scheme, 'tel')
+        self.assertEqual(p2.path, '+31641044153')
 
 def test_main():
     support.run_unittest(UrlParseTestCase)
diff -r 3d0686d90f55 Lib/test/test_weakref.py
--- a/Lib/test/test_weakref.py
+++ b/Lib/test/test_weakref.py
@@ -812,11 +812,71 @@
     def __hash__(self):
         return hash(self.arg)
 
+class RefCycle:
+    def __init__(self):
+        self.cycle = self
+
 
 class MappingTestCase(TestBase):
 
     COUNT = 10
 
+    def check_len_cycles(self, dict_type, cons):
+        N = 20
+        items = [RefCycle() for i in range(N)]
+        dct = dict_type(cons(o) for o in items)
+        # Keep an iterator alive
+        it = dct.items()
+        try:
+            next(it)
+        except StopIteration:
+            pass
+        del items
+        gc.collect()
+        n1 = len(dct)
+        del it
+        gc.collect()
+        n2 = len(dct)
+        # one item may be kept alive inside the iterator
+        self.assertIn(n1, (0, 1))
+        self.assertEqual(n2, 0)
+
+    def test_weak_keyed_len_cycles(self):
+        self.check_len_cycles(weakref.WeakKeyDictionary, lambda k: (k, 1))
+
+    def test_weak_valued_len_cycles(self):
+        self.check_len_cycles(weakref.WeakValueDictionary, lambda k: (1, k))
+
+    def check_len_race(self, dict_type, cons):
+        # Extended sanity checks for len() in the face of cyclic collection
+        self.addCleanup(gc.set_threshold, *gc.get_threshold())
+        for th in range(1, 100):
+            N = 20
+            gc.collect(0)
+            gc.set_threshold(th, th, th)
+            items = [RefCycle() for i in range(N)]
+            dct = dict_type(cons(o) for o in items)
+            del items
+            # All items will be collected at next garbage collection pass
+            it = dct.items()
+            try:
+                next(it)
+            except StopIteration:
+                pass
+            n1 = len(dct)
+            del it
+            n2 = len(dct)
+            self.assertGreaterEqual(n1, 0)
+            self.assertLessEqual(n1, N)
+            self.assertGreaterEqual(n2, 0)
+            self.assertLessEqual(n2, n1)
+
+    def test_weak_keyed_len_race(self):
+        self.check_len_race(weakref.WeakKeyDictionary, lambda k: (k, 1))
+
+    def test_weak_valued_len_race(self):
+        self.check_len_race(weakref.WeakValueDictionary, lambda k: (1, k))
+
     def test_weak_values(self):
         #
         #  This exercises d.copy(), d.items(), d[], del d[], len(d).
diff -r 3d0686d90f55 Lib/test/test_weakset.py
--- a/Lib/test/test_weakset.py
+++ b/Lib/test/test_weakset.py
@@ -17,6 +17,10 @@
 class Foo:
     pass
 
+class RefCycle:
+    def __init__(self):
+        self.cycle = self
+
 
 class TestWeakSet(unittest.TestCase):
 
@@ -24,6 +28,12 @@
         # need to keep references to them
         self.items = [ustr(c) for c in ('a', 'b', 'c')]
         self.items2 = [ustr(c) for c in ('x', 'y', 'z')]
+        self.ab_items = [ustr(c) for c in 'ab']
+        self.abcde_items = [ustr(c) for c in 'abcde']
+        self.def_items = [ustr(c) for c in 'def']
+        self.ab_weakset = WeakSet(self.ab_items)
+        self.abcde_weakset = WeakSet(self.abcde_items)
+        self.def_weakset = WeakSet(self.def_items)
         self.letters = [ustr(c) for c in string.ascii_letters]
         self.s = WeakSet(self.items)
         self.d = dict.fromkeys(self.items)
@@ -67,6 +77,11 @@
             x = WeakSet(self.items + self.items2)
             c = C(self.items2)
             self.assertEqual(self.s.union(c), x)
+            del c
+        self.assertEqual(len(u), len(self.items) + len(self.items2))
+        self.items2.pop()
+        gc.collect()
+        self.assertEqual(len(u), len(self.items) + len(self.items2))
 
     def test_or(self):
         i = self.s.union(self.items2)
@@ -74,14 +89,19 @@
         self.assertEqual(self.s | frozenset(self.items2), i)
 
     def test_intersection(self):
-        i = self.s.intersection(self.items2)
+        s = WeakSet(self.letters)
+        i = s.intersection(self.items2)
         for c in self.letters:
-            self.assertEqual(c in i, c in self.d and c in self.items2)
-        self.assertEqual(self.s, WeakSet(self.items))
+            self.assertEqual(c in i, c in self.items2 and c in self.letters)
+        self.assertEqual(s, WeakSet(self.letters))
         self.assertEqual(type(i), WeakSet)
         for C in set, frozenset, dict.fromkeys, list, tuple:
             x = WeakSet([])
-            self.assertEqual(self.s.intersection(C(self.items2)), x)
+            self.assertEqual(i.intersection(C(self.items)), x)
+        self.assertEqual(len(i), len(self.items2))
+        self.items2.pop()
+        gc.collect()
+        self.assertEqual(len(i), len(self.items2))
 
     def test_isdisjoint(self):
         self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
@@ -112,6 +132,10 @@
         self.assertEqual(self.s, WeakSet(self.items))
         self.assertEqual(type(i), WeakSet)
         self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
+        self.assertEqual(len(i), len(self.items) + len(self.items2))
+        self.items2.pop()
+        gc.collect()
+        self.assertEqual(len(i), len(self.items) + len(self.items2))
 
     def test_xor(self):
         i = self.s.symmetric_difference(self.items2)
@@ -119,22 +143,28 @@
         self.assertEqual(self.s ^ frozenset(self.items2), i)
 
     def test_sub_and_super(self):
-        pl, ql, rl = map(lambda s: [ustr(c) for c in s], ['ab', 'abcde', 'def'])
-        p, q, r = map(WeakSet, (pl, ql, rl))
-        self.assertTrue(p < q)
-        self.assertTrue(p <= q)
-        self.assertTrue(q <= q)
-        self.assertTrue(q > p)
-        self.assertTrue(q >= p)
-        self.assertFalse(q < r)
-        self.assertFalse(q <= r)
-        self.assertFalse(q > r)
-        self.assertFalse(q >= r)
+        self.assertTrue(self.ab_weakset <= self.abcde_weakset)
+        self.assertTrue(self.abcde_weakset <= self.abcde_weakset)
+        self.assertTrue(self.abcde_weakset >= self.ab_weakset)
+        self.assertFalse(self.abcde_weakset <= self.def_weakset)
+        self.assertFalse(self.abcde_weakset >= self.def_weakset)
         self.assertTrue(set('a').issubset('abc'))
         self.assertTrue(set('abc').issuperset('a'))
         self.assertFalse(set('a').issubset('cbs'))
         self.assertFalse(set('cbs').issuperset('a'))
 
+    def test_lt(self):
+        self.assertTrue(self.ab_weakset < self.abcde_weakset)
+        self.assertFalse(self.abcde_weakset < self.def_weakset)
+        self.assertFalse(self.ab_weakset < self.ab_weakset)
+        self.assertFalse(WeakSet() < WeakSet())
+
+    def test_gt(self):
+        self.assertTrue(self.abcde_weakset > self.ab_weakset)
+        self.assertFalse(self.abcde_weakset > self.def_weakset)
+        self.assertFalse(self.ab_weakset > self.ab_weakset)
+        self.assertFalse(WeakSet() > WeakSet())
+
     def test_gc(self):
         # Create a nest of cycles to exercise overall ref count check
         s = WeakSet(Foo() for i in range(1000))
@@ -359,6 +389,49 @@
             s.clear()
         self.assertEqual(len(s), 0)
 
+    def test_len_cycles(self):
+        N = 20
+        items = [RefCycle() for i in range(N)]
+        s = WeakSet(items)
+        del items
+        it = iter(s)
+        try:
+            next(it)
+        except StopIteration:
+            pass
+        gc.collect()
+        n1 = len(s)
+        del it
+        gc.collect()
+        n2 = len(s)
+        # one item may be kept alive inside the iterator
+        self.assertIn(n1, (0, 1))
+        self.assertEqual(n2, 0)
+
+    def test_len_race(self):
+        # Extended sanity checks for len() in the face of cyclic collection
+        self.addCleanup(gc.set_threshold, *gc.get_threshold())
+        for th in range(1, 100):
+            N = 20
+            gc.collect(0)
+            gc.set_threshold(th, th, th)
+            items = [RefCycle() for i in range(N)]
+            s = WeakSet(items)
+            del items
+            # All items will be collected at next garbage collection pass
+            it = iter(s)
+            try:
+                next(it)
+            except StopIteration:
+                pass
+            n1 = len(s)
+            del it
+            n2 = len(s)
+            self.assertGreaterEqual(n1, 0)
+            self.assertLessEqual(n1, N)
+            self.assertGreaterEqual(n2, 0)
+            self.assertLessEqual(n2, n1)
+
 
 def test_main(verbose=None):
     support.run_unittest(TestWeakSet)
diff -r 3d0686d90f55 Lib/test/test_winreg.py
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -1,7 +1,7 @@
 # Test the windows specific win32reg module.
 # Only win32reg functions not hit here: FlushKey, LoadKey and SaveKey
 
-import os, sys
+import os, sys, errno
 import unittest
 from test import support
 threading = support.import_module("threading")
@@ -292,7 +292,13 @@
     def test_dynamic_key(self):
         # Issue2810, when the value is dynamically generated, these
         # throw "WindowsError: More data is available" in 2.6 and 3.1
-        EnumValue(HKEY_PERFORMANCE_DATA, 0)
+        try:
+            EnumValue(HKEY_PERFORMANCE_DATA, 0)
+        except OSError as e:
+            if e.errno in (errno.EPERM, errno.EACCES):
+                self.skipTest("access denied to registry key "
+                              "(are you running in a non-interactive session?)")
+            raise
         QueryValueEx(HKEY_PERFORMANCE_DATA, "")
 
     # Reflection requires XP x64/Vista at a minimum. XP doesn't have this stuff
diff -r 3d0686d90f55 Lib/test/test_xmlrpc_net.py
--- a/Lib/test/test_xmlrpc_net.py
+++ b/Lib/test/test_xmlrpc_net.py
@@ -39,7 +39,7 @@
     def test_python_builders(self):
         # Get the list of builders from the XMLRPC buildbot interface at
         # python.org.
-        server = xmlrpclib.ServerProxy("http://www.python.org/dev/buildbot/all/xmlrpc/")
+        server = xmlrpclib.ServerProxy("http://buildbot.python.org/all/xmlrpc/")
         try:
             builders = server.getAllBuilders()
         except socket.error as e:
diff -r 3d0686d90f55 Lib/test/test_zipfile.py
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -972,6 +972,46 @@
         with zipfile.ZipFile(TESTFN, mode="r") as zipfr:
             self.assertEqual(zipfr.comment, comment2)
 
+        # check that comments are correctly modified in append mode
+        with zipfile.ZipFile(TESTFN,mode="w") as zipf:
+            zipf.comment = b"original comment"
+            zipf.writestr("foo.txt", "O, for a Muse of Fire!")
+        with zipfile.ZipFile(TESTFN,mode="a") as zipf:
+            zipf.comment = b"an updated comment"
+        with zipfile.ZipFile(TESTFN,mode="r") as zipf:
+            self.assertEqual(zipf.comment, b"an updated comment")
+
+        # check that comments are correctly shortened in append mode
+        with zipfile.ZipFile(TESTFN,mode="w") as zipf:
+            zipf.comment = b"original comment that's longer"
+            zipf.writestr("foo.txt", "O, for a Muse of Fire!")
+        with zipfile.ZipFile(TESTFN,mode="a") as zipf:
+            zipf.comment = b"shorter comment"
+        with zipfile.ZipFile(TESTFN,mode="r") as zipf:
+            self.assertEqual(zipf.comment, b"shorter comment")
+
+    def test_unicode_comment(self):
+        with zipfile.ZipFile(TESTFN, "w", zipfile.ZIP_STORED) as zipf:
+            zipf.writestr("foo.txt", "O, for a Muse of Fire!")
+            with self.assertRaises(TypeError):
+                zipf.comment = "this is an error"
+
+    def test_change_comment_in_empty_archive(self):
+        with zipfile.ZipFile(TESTFN, "a", zipfile.ZIP_STORED) as zipf:
+            self.assertFalse(zipf.filelist)
+            zipf.comment = b"this is a comment"
+        with zipfile.ZipFile(TESTFN, "r") as zipf:
+            self.assertEqual(zipf.comment, b"this is a comment")
+
+    def test_change_comment_in_nonempty_archive(self):
+        with zipfile.ZipFile(TESTFN, "w", zipfile.ZIP_STORED) as zipf:
+            zipf.writestr("foo.txt", "O, for a Muse of Fire!")
+        with zipfile.ZipFile(TESTFN, "a", zipfile.ZIP_STORED) as zipf:
+            self.assertTrue(zipf.filelist)
+            zipf.comment = b"this is a comment"
+        with zipfile.ZipFile(TESTFN, "r") as zipf:
+            self.assertEqual(zipf.comment, b"this is a comment")
+
     def check_testzip_with_bad_crc(self, compression):
         """Tests that files with bad CRCs return their name from testzip."""
         zipdata = self.zips_with_bad_crc[compression]
diff -r 3d0686d90f55 Lib/test/test_zlib.py
--- a/Lib/test/test_zlib.py
+++ b/Lib/test/test_zlib.py
@@ -66,24 +66,11 @@
 # Issue #10276 - check that inputs >=4GB are handled correctly.
 class ChecksumBigBufferTestCase(unittest.TestCase):
 
-    def setUp(self):
-        with open(support.TESTFN, "wb+") as f:
-            f.seek(_4G)
-            f.write(b"asdf")
-            f.flush()
-            self.mapping = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
-
-    def tearDown(self):
-        self.mapping.close()
-        support.unlink(support.TESTFN)
-
-    @unittest.skipUnless(mmap, "mmap() is not available.")
-    @unittest.skipUnless(sys.maxsize > _4G, "Can't run on a 32-bit system.")
-    @unittest.skipUnless(support.is_resource_enabled("largefile"),
-                         "May use lots of disk space.")
-    def test_big_buffer(self):
-        self.assertEqual(zlib.crc32(self.mapping), 3058686908)
-        self.assertEqual(zlib.adler32(self.mapping), 82837919)
+    @bigmemtest(size=_4G + 4, memuse=1, dry_run=False)
+    def test_big_buffer(self, size):
+        data = b"nyan" * (_1G + 1)
+        self.assertEqual(zlib.crc32(data), 1044521549)
+        self.assertEqual(zlib.adler32(data), 2256789997)
 
 
 class ExceptionTestCase(unittest.TestCase):
diff -r 3d0686d90f55 Lib/test/testtar.tar
Binary file Lib/test/testtar.tar has changed
diff -r 3d0686d90f55 Lib/threading.py
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -1007,6 +1007,9 @@
     def _set_daemon(self):
         return True
 
+    def _stop(self):
+        pass
+
     def join(self, timeout=None):
         assert False, "cannot join a dummy thread"
 
diff -r 3d0686d90f55 Lib/tkinter/__init__.py
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -526,12 +526,19 @@
 
         The type keyword specifies the form in which the data is
         to be returned and should be an atom name such as STRING
-        or FILE_NAME.  Type defaults to STRING.
+        or FILE_NAME.  Type defaults to STRING, except on X11, where the default
+        is to try UTF8_STRING and fall back to STRING.
 
         This command is equivalent to:
 
         selection_get(CLIPBOARD)
         """
+        if 'type' not in kw and self._windowingsystem == 'x11':
+            try:
+                kw['type'] = 'UTF8_STRING'
+                return self.tk.call(('clipboard', 'get') + self._options(kw))
+            except TclError:
+                del kw['type']
         return self.tk.call(('clipboard', 'get') + self._options(kw))
 
     def clipboard_clear(self, **kw):
@@ -613,8 +620,16 @@
         A keyword parameter selection specifies the name of
         the selection and defaults to PRIMARY.  A keyword
         parameter displayof specifies a widget on the display
-        to use."""
+        to use. A keyword parameter type specifies the form of data to be
+        fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
+        before STRING."""
         if 'displayof' not in kw: kw['displayof'] = self._w
+        if 'type' not in kw and self._windowingsystem == 'x11':
+            try:
+                kw['type'] = 'UTF8_STRING'
+                return self.tk.call(('selection', 'get') + self._options(kw))
+            except TclError:
+                del kw['type']
         return self.tk.call(('selection', 'get') + self._options(kw))
     def selection_handle(self, command, **kw):
         """Specify a function COMMAND to call if the X
@@ -1029,6 +1044,15 @@
         if displayof is None:
             return ('-displayof', self._w)
         return ()
+    @property
+    def _windowingsystem(self):
+        """Internal function."""
+        try:
+            return self._root()._windowingsystem_cached
+        except AttributeError:
+            ws = self._root()._windowingsystem_cached = \
+                        self.tk.call('tk', 'windowingsystem')
+            return ws
     def _options(self, cnf, kw = None):
         """Internal function."""
         if kw:
diff -r 3d0686d90f55 Lib/tkinter/simpledialog.py
--- a/Lib/tkinter/simpledialog.py
+++ b/Lib/tkinter/simpledialog.py
@@ -282,7 +282,7 @@
         self.entry = Entry(master, name="entry")
         self.entry.grid(row=1, padx=5, sticky=W+E)
 
-        if self.initialvalue:
+        if self.initialvalue is not None:
             self.entry.insert(0, self.initialvalue)
             self.entry.select_range(0, END)
 
diff -r 3d0686d90f55 Lib/tkinter/ttk.py
--- a/Lib/tkinter/ttk.py
+++ b/Lib/tkinter/ttk.py
@@ -1253,7 +1253,7 @@
 
 
     def exists(self, item):
-        """Returns True if the specified item is present in the three,
+        """Returns True if the specified item is present in the tree,
         False otherwise."""
         return bool(self.tk.call(self._w, "exists", item))
 
diff -r 3d0686d90f55 Lib/tokenize.py
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -292,9 +292,12 @@
 
     def find_cookie(line):
         try:
-            line_string = line.decode('ascii')
+            # Decode as UTF-8. Either the line is an encoding declaration,
+            # in which case it should be pure ASCII, or it must be UTF-8
+            # per default encoding.
+            line_string = line.decode('utf-8')
         except UnicodeDecodeError:
-            return None
+            raise SyntaxError("invalid or missing encoding declaration")
 
         matches = cookie_re.findall(line_string)
         if not matches:
@@ -307,7 +310,7 @@
             raise SyntaxError("unknown encoding: " + encoding)
 
         if bom_found:
-            if codec.name != 'utf-8':
+            if encoding != 'utf-8':
                 # This behaviour mimics the Python interpreter
                 raise SyntaxError('encoding problem: utf-8')
             encoding += '-sig'
diff -r 3d0686d90f55 Lib/unittest/case.py
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -62,7 +62,7 @@
     Unconditionally skip a test.
     """
     def decorator(test_item):
-        if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
+        if not isinstance(test_item, type):
             @functools.wraps(test_item)
             def skip_wrapper(*args, **kwargs):
                 raise SkipTest(reason)
diff -r 3d0686d90f55 Lib/unittest/loader.py
--- a/Lib/unittest/loader.py
+++ b/Lib/unittest/loader.py
@@ -34,6 +34,11 @@
     TestClass = type(classname, (case.TestCase,), attrs)
     return suiteClass((TestClass(methodname),))
 
+def _jython_aware_splitext(path):
+    if path.lower().endswith('$py.class'):
+        return path[:-9]
+    return os.path.splitext(path)[0]
+
 
 class TestLoader(object):
     """
@@ -221,7 +226,7 @@
             return os.path.dirname(full_path)
 
     def _get_name_from_path(self, path):
-        path = os.path.splitext(os.path.normpath(path))[0]
+        path = _jython_aware_splitext(os.path.normpath(path))
 
         _relpath = os.path.relpath(path, self._top_level_dir)
         assert not os.path.isabs(_relpath), "Path must be within the project"
@@ -258,11 +263,11 @@
                     yield _make_failed_import_test(name, self.suiteClass)
                 else:
                     mod_file = os.path.abspath(getattr(module, '__file__', full_path))
-                    realpath = os.path.splitext(mod_file)[0]
-                    fullpath_noext = os.path.splitext(full_path)[0]
+                    realpath = _jython_aware_splitext(mod_file)
+                    fullpath_noext = _jython_aware_splitext(full_path)
                     if realpath.lower() != fullpath_noext.lower():
                         module_dir = os.path.dirname(realpath)
-                        mod_name = os.path.splitext(os.path.basename(full_path))[0]
+                        mod_name = _jython_aware_splitext(os.path.basename(full_path))
                         expected_dir = os.path.dirname(full_path)
                         msg = ("%r module incorrectly imported from %r. Expected %r. "
                                "Is this module globally installed?")
diff -r 3d0686d90f55 Lib/unittest/test/test_skipping.py
--- a/Lib/unittest/test/test_skipping.py
+++ b/Lib/unittest/test/test_skipping.py
@@ -66,6 +66,21 @@
         self.assertEqual(result.skipped, [(test, "testing")])
         self.assertEqual(record, [])
 
+    def test_skip_non_unittest_class(self):
+        @unittest.skip("testing")
+        class Mixin:
+            def test_1(self):
+                record.append(1)
+        class Foo(Mixin, unittest.TestCase):
+            pass
+        record = []
+        result = unittest.TestResult()
+        test = Foo("test_1")
+        suite = unittest.TestSuite([test])
+        suite.run(result)
+        self.assertEqual(result.skipped, [(test, "testing")])
+        self.assertEqual(record, [])
+
     def test_expected_failure(self):
         class Foo(unittest.TestCase):
             @unittest.expectedFailure
diff -r 3d0686d90f55 Lib/urllib/parse.py
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -44,11 +44,14 @@
                'imap', 'wais', 'file', 'mms', 'https', 'shttp',
                'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
                'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh']
-non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
-                    'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
 uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
                'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
                'mms', '', 'sftp']
+
+# These are not actually used anymore, but should stay for backwards
+# compatibility.  (They are undocumented, but have a public-looking name.)
+non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
+                    'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
 uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
               'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
 uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
@@ -150,6 +153,9 @@
         port = self._hostinfo[1]
         if port is not None:
             port = int(port, 10)
+            # Return None on an illegal port
+            if not ( 0 <= port <= 65535):
+                return None
         return port
 
 
@@ -283,8 +289,8 @@
     Note that we don't break the components up in smaller bits
     (e.g. netloc is a single string) and we don't expand % escapes."""
     url, scheme, _coerce_result = _coerce_args(url, scheme)
-    tuple = urlsplit(url, scheme, allow_fragments)
-    scheme, netloc, url, query, fragment = tuple
+    splitresult = urlsplit(url, scheme, allow_fragments)
+    scheme, netloc, url, query, fragment = splitresult
     if scheme in uses_params and ';' in url:
         url, params = _splitparams(url)
     else:
@@ -345,21 +351,21 @@
             if c not in scheme_chars:
                 break
         else:
-            try:
-                # make sure "url" is not actually a port number (in which case
-                # "scheme" is really part of the path
-                _testportnum = int(url[i+1:])
-            except ValueError:
-                scheme, url = url[:i].lower(), url[i+1:]
+            # make sure "url" is not actually a port number (in which case
+            # "scheme" is really part of the path)
+            rest = url[i+1:]
+            if not rest or any(c not in '0123456789' for c in rest):
+                # not a port number
+                scheme, url = url[:i].lower(), rest
 
     if url[:2] == '//':
         netloc, url = _splitnetloc(url, 2)
         if (('[' in netloc and ']' not in netloc) or
                 (']' in netloc and '[' not in netloc)):
             raise ValueError("Invalid IPv6 URL")
-    if allow_fragments and scheme in uses_fragment and '#' in url:
+    if allow_fragments and '#' in url:
         url, fragment = url.split('#', 1)
-    if scheme in uses_query and '?' in url:
+    if '?' in url:
         url, query = url.split('?', 1)
     v = SplitResult(scheme, netloc, url, query, fragment)
     _parse_cache[key] = v
@@ -551,15 +557,15 @@
         encoding and errors: specify how to decode percent-encoded sequences
             into Unicode characters, as accepted by the bytes.decode() method.
     """
-    dict = {}
+    parsed_result = {}
     pairs = parse_qsl(qs, keep_blank_values, strict_parsing,
                       encoding=encoding, errors=errors)
     for name, value in pairs:
-        if name in dict:
-            dict[name].append(value)
+        if name in parsed_result:
+            parsed_result[name].append(value)
         else:
-            dict[name] = [value]
-    return dict
+            parsed_result[name] = [value]
+    return parsed_result
 
 def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
               encoding='utf-8', errors='replace'):
@@ -711,7 +717,7 @@
 def quote_from_bytes(bs, safe='/'):
     """Like quote(), but accepts a bytes object rather than a str, and does
     not perform string-to-bytes encoding.  It always returns an ASCII string.
-    quote_from_bytes(b'abc def\xab') -> 'abc%20def%AB'
+    quote_from_bytes(b'abc def\x3f') -> 'abc%20def%3f'
     """
     if not isinstance(bs, (bytes, bytearray)):
         raise TypeError("quote_from_bytes() expected bytes")
diff -r 3d0686d90f55 Lib/urllib/request.py
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -95,6 +95,7 @@
 import sys
 import time
 import collections
+import warnings
 
 from urllib.error import URLError, HTTPError, ContentTooShortError
 from urllib.parse import (
@@ -794,7 +795,7 @@
     # allow for double- and single-quoted realm values
     # (single quotes are a violation of the RFC, but appear in the wild)
     rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
-                    'realm=(["\'])(.*?)\\2', re.I)
+                    'realm=(["\']?)([^"\']*)\\2', re.I)
 
     # XXX could pre-emptively send auth info already accepted (RFC 2617,
     # end of section 2, and section 1.2 immediately after "credentials"
@@ -827,6 +828,9 @@
             mo = AbstractBasicAuthHandler.rx.search(authreq)
             if mo:
                 scheme, quote, realm = mo.groups()
+                if quote not in ["'", '"']:
+                    warnings.warn("Basic Auth Realm was unquoted",
+                                  UserWarning, 2)
                 if scheme.lower() == 'basic':
                     response = self.retry_http_basic_auth(host, req, realm)
                     if response and response.code != 401:
@@ -1062,8 +1066,9 @@
         if request.data is not None:  # POST
             data = request.data
             if isinstance(data, str):
-                raise TypeError("POST data should be bytes"
-                        " or an iterable of bytes. It cannot be str.")
+                msg = "POST data should be bytes or an iterable of bytes. "\
+                      "It cannot be of type str."
+                raise TypeError(msg)
             if not request.has_header('Content-type'):
                 request.add_unredirected_header(
                     'Content-type',
diff -r 3d0686d90f55 Lib/urllib/response.py
--- a/Lib/urllib/response.py
+++ b/Lib/urllib/response.py
@@ -61,11 +61,11 @@
         self.hookargs = hookargs
 
     def close(self):
-        addbase.close(self)
         if self.closehook:
             self.closehook(*self.hookargs)
             self.closehook = None
             self.hookargs = None
+        addbase.close(self)
 
 class addinfo(addbase):
     """class to add an info() method to an open file."""
diff -r 3d0686d90f55 Lib/weakref.py
--- a/Lib/weakref.py
+++ b/Lib/weakref.py
@@ -78,7 +78,7 @@
         del self.data[key]
 
     def __len__(self):
-        return sum(wr() is not None for wr in self.data.values())
+        return len(self.data) - len(self._pending_removals)
 
     def __contains__(self, key):
         try:
@@ -290,7 +290,7 @@
         return self.data[ref(key)]
 
     def __len__(self):
-        return len(self.data)
+        return len(self.data) - len(self._pending_removals)
 
     def __repr__(self):
         return "<WeakKeyDictionary at %s>" % id(self)
diff -r 3d0686d90f55 Lib/xmlrpc/server.py
--- a/Lib/xmlrpc/server.py
+++ b/Lib/xmlrpc/server.py
@@ -1,4 +1,4 @@
-"""XML-RPC Servers.
+r"""XML-RPC Servers.
 
 This module can be used to create simple XML-RPC servers
 by creating a server and either installing functions, a
diff -r 3d0686d90f55 Lib/zipfile.py
--- a/Lib/zipfile.py
+++ b/Lib/zipfile.py
@@ -698,7 +698,7 @@
         self.compression = compression  # Method of compression
         self.mode = key = mode.replace('b', '')[0]
         self.pwd = None
-        self.comment = b''
+        self._comment = b''
 
         # Check if we were passed a file-like object
         if isinstance(file, str):
@@ -774,7 +774,7 @@
             print(endrec)
         size_cd = endrec[_ECD_SIZE]             # bytes in central directory
         offset_cd = endrec[_ECD_OFFSET]         # offset of central directory
-        self.comment = endrec[_ECD_COMMENT]     # archive comment
+        self._comment = endrec[_ECD_COMMENT]    # archive comment
 
         # "concat" is zero, unless zip was concatenated to another file
         concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
@@ -886,6 +886,24 @@
         else:
             self.pwd = None
 
+    @property
+    def comment(self):
+        """The comment text associated with the ZIP file."""
+        return self._comment
+
+    @comment.setter
+    def comment(self, comment):
+        if not isinstance(comment, bytes):
+            raise TypeError("comment: expected bytes, got %s" % type(comment))
+        # check for valid comment length
+        if len(comment) >= ZIP_MAX_COMMENT:
+            if self.debug:
+                print('Archive comment is too long; truncating to %d bytes'
+                        % ZIP_MAX_COMMENT)
+            comment = comment[:ZIP_MAX_COMMENT]
+        self._comment = comment
+        self._didModify = True
+
     def read(self, name, pwd=None):
         """Return file bytes (as a string) for name."""
         with self.open(name, "r", pwd) as fp:
@@ -1287,18 +1305,11 @@
                 centDirSize = min(centDirSize, 0xFFFFFFFF)
                 centDirOffset = min(centDirOffset, 0xFFFFFFFF)
 
-            # check for valid comment length
-            if len(self.comment) >= ZIP_MAX_COMMENT:
-                if self.debug > 0:
-                    msg = 'Archive comment is too long; truncating to %d bytes' \
-                          % ZIP_MAX_COMMENT
-                self.comment = self.comment[:ZIP_MAX_COMMENT]
-
             endrec = struct.pack(structEndArchive, stringEndArchive,
                                  0, 0, centDirCount, centDirCount,
-                                 centDirSize, centDirOffset, len(self.comment))
+                                 centDirSize, centDirOffset, len(self._comment))
             self.fp.write(endrec)
-            self.fp.write(self.comment)
+            self.fp.write(self._comment)
             self.fp.flush()
 
         if not self._filePassed:
diff -r 3d0686d90f55 Mac/README
--- a/Mac/README
+++ b/Mac/README
@@ -66,7 +66,7 @@
   $ make
   $ make install
 
-This flag can be used a framework build of python, but also with a classic
+This flag can be used with a framework build of python, but also with a classic
 unix build. Either way you will have to build python on Mac OS X 10.4 (or later)
 with Xcode 2.1 (or later). You also have to install the 10.4u SDK when 
 installing Xcode.
@@ -214,8 +214,8 @@
 
 Go to the directory "Mac/OSX/BuildScript". There you'll find a script 
 "build-installer.py" that does all the work. This will download and build
-a number of 3th-party libaries, configures and builds a framework Python,
-installs it, creates the installer pacakge files and then packs this in a 
+a number of 3rd-party libaries, configures and builds a framework Python,
+installs it, creates the installer package files and then packs this in a
 DMG image.
 
 The script will build a universal binary, you'll therefore have to run this
@@ -251,8 +251,8 @@
 Uninstalling a framework install, including the binary installer
 ================================================================
 
-Uninstalling a framework can be done by manually removing all bits that got installed,
-that's true for both installations from source and installations using the binary installer.
+Uninstalling a framework can be done by manually removing all bits that got installed.
+That's true for both installations from source and installations using the binary installer.
 Sadly enough OSX does not have a central uninstaller.
 
 The main bit of a framework install is the framework itself, installed in
diff -r 3d0686d90f55 Makefile.pre.in
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -161,7 +161,7 @@
 SUBDIRSTOO=	Include Lib Misc
 
 # Files and directories to be distributed
-CONFIGFILES=	configure configure.in acconfig.h pyconfig.h.in Makefile.pre.in
+CONFIGFILES=	configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in
 DISTFILES=	README ChangeLog $(CONFIGFILES)
 DISTDIRS=	$(SUBDIRS) $(SUBDIRSTOO) Ext-dummy
 DIST=		$(DISTFILES) $(DISTDIRS)
@@ -917,7 +917,8 @@
 LIBSUBDIRS=	tkinter tkinter/test tkinter/test/test_tkinter \
 		tkinter/test/test_ttk site-packages test \
 		test/capath test/data \
-		test/cjkencodings test/decimaltestdata test/xmltestdata test/subprocessdata \
+		test/cjkencodings test/decimaltestdata test/xmltestdata \
+		test/subprocessdata test/sndhdrdata \
 		test/tracedmodules test/encoded_modules \
 		concurrent concurrent/futures encodings \
 		email email/mime email/test email/test/data \
@@ -936,6 +937,8 @@
 		unittest unittest/test \
 		curses pydoc_data $(MACHDEPS)
 libinstall:	build_all $(srcdir)/Lib/$(PLATDIR) $(srcdir)/Modules/xxmodule.c
+	-PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+		./$(BUILDPYTHON) -Wi -c "import lib2to3.pygram, lib2to3.patcomp;lib2to3.patcomp.PatternCompiler()"
 	@for i in $(SCRIPTDIR) $(LIBDEST); \
 	do \
 		if test ! -d $(DESTDIR)$$i; then \
@@ -1013,8 +1016,6 @@
 		./$(BUILDPYTHON) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \
 		-d $(LIBDEST)/site-packages -f \
 		-x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
-	-PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
-		./$(BUILDPYTHON) -Wi -c "import lib2to3.pygram, lib2to3.patcomp;lib2to3.patcomp.PatternCompiler()"
 
 # Create the PLATDIR source directory, if one wasn't distributed..
 $(srcdir)/Lib/$(PLATDIR):
@@ -1157,8 +1158,11 @@
 # Install a number of symlinks to keep software that expects a normal unix
 # install (which includes python-config) happy.
 frameworkinstallmaclib:
+	ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config-$(LDVERSION)/libpython$(LDVERSION).a"
+	ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config-$(LDVERSION)/libpython$(LDVERSION).dylib"
 	ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config-$(LDVERSION)/libpython$(VERSION).a"
 	ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config-$(LDVERSION)/libpython$(VERSION).dylib"
+	ln -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libpython$(LDVERSION).dylib"
 	ln -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libpython$(VERSION).dylib"
 
 # This installs the IDE, the Launcher and other apps into /Applications
@@ -1210,7 +1214,7 @@
 	$(SHELL) config.status --recheck
 	$(SHELL) config.status
 
-# Rebuild the configure script from configure.in; also rebuild pyconfig.h.in
+# Rebuild the configure script from configure.ac; also rebuild pyconfig.h.in
 autoconf:
 	(cd $(srcdir); autoconf -Wall)
 	(cd $(srcdir); autoheader -Wall)
@@ -1252,7 +1256,7 @@
 	find . -name '*.so.[0-9]*.[0-9]*' -exec rm -f {} ';'
 	find build -name 'fficonfig.h' -exec rm -f {} ';' || true
 	find build -name 'fficonfig.py' -exec rm -f {} ';' || true
-	-rm -f Lib/lib2to3/*Grammar*.pickle
+	-rm -f $(srcdir)/Lib/lib2to3/*Grammar*.pickle
 	-rm -f Modules/_testembed
 
 profile-removal:
diff -r 3d0686d90f55 Misc/ACKS
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -52,6 +52,7 @@
 Michael J. Barber
 Nicolas Bareil
 Chris Barker
+Anton Barkovsky
 Nick Barnes
 Quentin Barnes
 Richard Barran
@@ -145,10 +146,12 @@
 Brett Cannon
 Mike Carlton
 Terry Carroll
+Damien Cassou
 Lorenzo M. Catucci
 Donn Cave
 Charles Cazabon
 Per Cederqvist
+Matej Cepl
 Octavian Cerna
 Pascal Chambon
 John Chandler
@@ -173,6 +176,7 @@
 Andrew Clegg
 Brad Clements
 Steve Clift
+Hervé Coatanhay
 Nick Coghlan
 Josh Cogliati
 Dave Cole
@@ -214,6 +218,7 @@
 Eric Daniel
 Scott David Daniels
 Ben Darnell
+Kushal Das
 Jonathan Dasteel
 John DeGood
 Ned Deily
@@ -306,11 +311,13 @@
 John Fouhy
 Martin Franklin
 Robin Friedrich
+Bradley Froehle
 Ivan Frohne
 Jim Fulton
 Tadayoshi Funaba
 Gyro Funch
 Peter Funk
+Ethan Furman
 Geoff Furnish
 Ulisses Furquim
 Hagen Fürstenau
@@ -329,6 +336,7 @@
 Dan Gass
 Andrew Gaul
 Stephen M. Gava
+Xavier de Gaye
 Harry Henry Gebel
 Marius Gedminas
 Thomas Gellekum
@@ -341,6 +349,7 @@
 Michael Gilfix
 Christoph Gohlke
 Tim Golden
+Guilherme Gonçalves
 Chris Gonnerman
 David Goodger
 Hans de Graaff
@@ -404,6 +413,7 @@
 Albert Hofkamp
 Tomas Hoger
 Jonathan Hogg
+Akintayo Holder
 Gerrit Holl
 Shane Holloway
 Rune Holm
@@ -426,9 +436,11 @@
 Greg Humphreys
 Eric Huss
 Jeremy Hylton
+Ludwig Hähne
 Gerhard Häring
 Fredrik Håård
 Mihai Ibanescu
+Ali Ikinci
 Lars Immisch
 Bobby Impollonia
 Meador Inge
@@ -447,9 +459,11 @@
 Jack Jansen
 Bill Janssen
 Thomas Jarosch
+Zbyszek Jędrzejewski-Szmek
 Drew Jenkins
 Flemming Kjær Jensen
 MunSic Jeong
+Chris Jerdonek
 Orjan Johansen
 Fredrik Johansson
 Gregory K. Johnson
@@ -467,6 +481,7 @@
 Sijin Joseph
 Andreas Jung
 Tattoo Mabonzo K.
+Bohuslav Kabrda
 Bob Kahn
 Kurt B. Kaiser
 Tamito Kajiyama
@@ -474,7 +489,6 @@
 Rafe Kaplan
 Jacob Kaplan-Moss
 Jan Kaliszewski
-Arkady Koplyarov
 Lou Kates
 Hiroaki Kawai
 Sebastien Keim
@@ -502,11 +516,14 @@
 Kim Knapp
 Lenny Kneler
 Pat Knight
+Jeff Knupp
 Greg Kochanski
 Damon Kohler
 Marko Kohtala
+Arkady Koplyarov
 Vlad Korolev
 Joseph Koshy
+Jerzy Kozera
 Maksim Kozyarchuk
 Stefan Krah
 Bob Kras
@@ -531,6 +548,7 @@
 Piers Lauder
 Ben Laurie
 Simon Law
+Julia Lawall
 Chris Lawrence
 Brian Leair
 James Lee
@@ -546,11 +564,13 @@
 Robert Lehmann
 Petri Lehtinen
 Luke Kenneth Casson Leighton
+Tshepang Lekhonkhobe
 Marc-Andre Lemburg
 John Lenton
 Christopher Tur Lesniewski-Laas
 Mark Levinson
 William Lewis
+Akira Li
 Xuanji Li
 Robert van Liere
 Ross Light
@@ -588,6 +608,7 @@
 Vladimir Marangozov
 David Marek
 Doug Marien
+Sven Marnach
 Alex Martelli
 Anthony Martin
 Owen Martin
@@ -617,7 +638,9 @@
 Mike Meyer
 Steven Miale
 Trent Mick
+Tom Middleton
 Stan Mihai
+Stefan Mihaila
 Aristotelis Mikropoulos
 Damien Miller
 Chad Miller
@@ -627,6 +650,7 @@
 Andrii V. Mishkovskyi
 Dustin J. Mitchell
 Dom Mitchell
+Florian Mladitsch
 Doug Moen
 The Dragon De Monsyne
 Skip Montanaro
@@ -665,6 +689,7 @@
 Tim Northover
 Joe Norton
 Neal Norwitz
+Mikhail Novikov
 Michal Nowikowski
 Steffen Daode Nurpmeso
 Nigel O'Brian
@@ -695,6 +720,7 @@
 Alexandre Parenteau
 Dan Parisien
 Harri Pasanen
+Joe Peterson
 Randy Pausch
 Samuele Pedroni
 Marcel van der Peijl
@@ -759,6 +785,7 @@
 Bernhard Reiter
 Steven Reiz
 Roeland Rengelink
+Flávio Ribeiro
 Tim Rice
 Francesco Ricciardi
 Jan Pieter Riegel
@@ -776,6 +803,7 @@
 Mark Roddy
 Kevin Rodgers
 Giampaolo Rodola
+Adi Roiban
 Mike Romberg
 Armin Ronacher
 Case Roole
@@ -822,6 +850,7 @@
 Michael Schneider
 Peter Schneider-Kamp
 Arvin Schnell
+Robin Schreiber
 Chad J. Schroeder
 Sam Schulenburg
 Stefan Schwarzer
@@ -830,7 +859,7 @@
 Steven Scott
 Barry Scott
 Nick Seidenman
-Žiga Seilnach
+Žiga Seilnacht
 Yury Selivanov
 Fred Sells
 Jiwon Seo
@@ -839,6 +868,7 @@
 Denis Severson
 Ian Seyer
 Ha Shao
+Mark Shannon
 Richard Shapiro
 Bruce Sherwood
 Alexander Shigin
@@ -849,6 +879,7 @@
 Itamar Shtull-Trauring
 Eric Siegerman
 Paul Sijben
+Tim Silk
 Kirill Simonov
 Nathan Paul Simons
 Janne Sinkkonen
@@ -879,10 +910,12 @@
 Peter Stoehr
 Casper Stoel
 Michael Stone
+Serhiy Storchaka
 Ken Stox
 Dan Stromberg
 Daniel Stutzbach
 Andreas Stührk
+Colin Su
 Pal Subbiah
 Nathan Sullivan
 Mark Summerfield
@@ -967,6 +1000,7 @@
 Kevin Walzer
 Rodrigo Steinmuller Wanderley
 Greg Ward
+Zachary Ware
 Barry Warsaw
 Steve Waterbury
 Bob Watson
@@ -988,12 +1022,14 @@
 Gerry Wiener
 Frank Wierzbicki
 Bryce "Zooko" Wilcox-O'Hearn
+Jakub Wilk
 Jason Williams
 John Williams
 Sue Williams
 Gerald S. Williams
 Frank Willison
 Greg V. Wilson
+J Derek Wilson
 Jody Winston
 Collin Winter
 Dik Winter
@@ -1017,6 +1053,7 @@
 Florent Xicluna
 Hirokazu Yamamoto
 Ka-Ping Yee
+Jason Yeo
 Bob Yodlowski
 Danny Yoo
 George Yoshida
diff -r 3d0686d90f55 Misc/HISTORY
--- a/Misc/HISTORY
+++ b/Misc/HISTORY
@@ -2676,7 +2676,7 @@
   subclasses of str always behaved.  int/long/float, conversion of an instance
   to the base class has been moved to the proper nb_* magic slot and out of
   PyNumber_*().
-  Thanks Walter D�rwald.
+  Thanks Walter Dörwald.
 
 - Descriptors defined in C with a PyGetSetDef structure, where the setter is
   NULL, now raise an AttributeError when attempting to set or delete the
@@ -13998,7 +13998,7 @@
 required for asynchronous connects simpler and more efficient.
 
 - New "locale" module with (still experimental) interface to the
-standard C library locale interface, courtesy Martin von Loewis.  This
+standard C library locale interface, courtesy Martin von Löwis.  This
 does not repeat my mistake in 1.5a4 of always calling
 setlocale(LC_ALL, "").  In fact, we've pretty much decided that
 Python's standard numerical formatting operations should always use
diff -r 3d0686d90f55 Misc/NEWS
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -2,6 +2,558 @@
 Python News
 +++++++++++
 
+What's New in Python 3.2.4
+==========================
+
+*Release date: XX-XX-XXXX*
+
+Core and Builtins
+-----------------
+
+- Issue #15761: Fix crash when PYTHONEXECUTABLE is set on Mac OS X.
+
+- Issue #15726: Fix incorrect bounds checking in PyState_FindModule.
+  Patch by Robin Schreiber.
+
+- Issue #15604: Update uses of PyObject_IsTrue() to check for and handle
+  errors correctly.  Patch by Serhiy Storchaka.
+
+- Issue #13119: sys.stdout and sys.stderr are now using "\r\n" newline on
+  Windows, as Python 2.
+
+- Issue #14579: Fix CVE-2012-2135: vulnerability in the utf-16 decoder after
+  error handling.  Patch by Serhiy Storchaka.
+
+- Issue #15404: Refleak in PyMethodObject repr.
+
+- Issue #15394: An issue in PyModule_Create that caused references to
+  be leaked on some error paths has been fixed.  Patch by Julia Lawall.
+
+- Issue #15368: An issue that caused bytecode generation to be
+  non-deterministic when using randomized hashing (-R) has been fixed.
+
+- Issue #15020: The program name used to search for Python's path is now
+  "python3" under Unix, not "python".
+
+- Issue #15033: Fix the exit status bug when modules invoked using -m swith,
+  return the proper failure return value (1). Patch contributed by Jeff Knupp.
+
+- Issue #12268: File readline, readlines and read() or readall() methods
+  no longer lose data when an underlying read system call is interrupted.
+  IOError is no longer raised due to a read system call returning EINTR
+  from within these methods.
+
+- Issue #15142: Fix reference leak when deallocating instances of types
+  created using PyType_FromSpec().
+
+- Issue #10053: Don't close FDs when FileIO.__init__ fails. Loosely based on
+  the work by Hirokazu Yamamoto.
+
+- Issue #14775: Fix a potential quadratic dict build-up due to the garbage
+  collector repeatedly trying to untrack dicts.
+
+- Issue #14494: Fix __future__.py and its documentation to note that
+  absolute imports are the default behavior in 3.0 instead of 2.7.
+  Patch by Sven Marnach.
+
+- Issue #14761: Fix potential leak on an error case in the import machinery.
+
+- Issue #14699: Fix calling the classmethod descriptor directly.
+
+- Issue #14433: Prevent msvcrt crash in interactive prompt when stdin
+  is closed.
+
+- Issue #11603 (again): Setting __repr__ to __str__ now raises a RuntimeError
+  when repr() or str() is called on such an object.
+
+- Issue #14658: Fix binding a special method to a builtin implementation of a
+  special method with a different name.
+
+- Issue #14630: Fix a memory access bug for instances of a subclass of int
+  with value 0.
+
+- Issue #14612: Fix jumping around with blocks by setting f_lineno.
+
+- Issue #14607: Fix keyword-only arguments which started with ``__``.
+
+- Issue #13889: Check and (if necessary) set FPU control word before calling
+  any of the dtoa.c string <-> float conversion functions, on MSVC builds of
+  Python.  This fixes issues when embedding Python in a Delphi app.
+
+- Issue #14474: Save and restore exception state in thread.start_new_thread()
+  while writing error message if the thread leaves a unhandled exception.
+
+- Issue #13019: Fix potential reference leaks in bytearray.extend().  Patch
+  by Suman Saha.
+
+- Issue #14378: Fix compiling ast.ImportFrom nodes with a "__future__" string as
+  the module name that was not interned.
+
+- Issue #14331: Use significantly less stack space when importing modules by
+  allocating path buffers on the heap instead of the stack.
+
+- Issue #14334: Prevent in a segfault in type.__getattribute__ when it was not
+  passed strings.
+
+- Issue #1469629: Allow cycles through an object's __dict__ slot to be
+  collected. (For example if ``x.__dict__ is x``).
+
+- Issue #14172: Fix reference leak when marshalling a buffer-like object
+  (other than a bytes object).
+
+- Issue #13521: dict.setdefault() now does only one lookup for the given key,
+  making it "atomic" for many purposes.  Patch by Filip Gruszczyński.
+
+- Issue #14471: Fix a possible buffer overrun in the winreg module.
+
+Library
+-------
+
+- Issue #13370: Ensure that ctypes works on Mac OS X when Python is
+  compiled using the clang compiler
+
+- Issue #15544: Fix Decimal.__float__ to work with payload-carrying NaNs.
+
+- Issue #15249: BytesGenerator now correctly mangles From lines (when
+  requested) even if the body contains undecodable bytes.
+
+- Issue #15777: Fix a refleak in _posixsubprocess.
+
+- Issue #15199: Fix JavaScript's default MIME type to application/javascript.
+  Patch by Bohuslav Kabrda.
+
+- Issue #13579: string.Formatter now understands the 'a' conversion specifier.
+
+- Issue #15595: Fix subprocess.Popen(universal_newlines=True)
+  for certain locales (utf-16 and utf-32 family). Patch by Chris Jerdonek.
+
+- Issue #15477: In cmath and math modules, add workaround for platforms whose
+  system-supplied log1p function doesn't respect signs of zeros.
+
+- Issue #11062: Fix adding a message from file to Babyl mailbox.
+
+- Issue #15646: Prevent equivalent of a fork bomb when using
+  multiprocessing on Windows without the "if __name__ == '__main__'"
+  idiom.
+
+- Issue #15424: Add a __sizeof__ implementation for array objects.
+  Patch by Ludwig Hähne.
+
+- Issue #13052: Fix IDLE crashing when replace string in Search/Replace dialog
+  ended with '\'. Patch by Roger Serwy.
+
+- Issue #15538: Fix compilation of the getnameinfo() / getaddrinfo()
+  emulation code.  Patch by Philipp Hagemeister.
+
+- Issue #9803: Don't close IDLE on saving if breakpoint is open.
+  Patch by Roger Serwy.
+
+- Issue #12288: Consider '0' and '0.0' as valid initialvalue
+  for tkinter SimpleDialog.
+
+- Issue #15512: Add a __sizeof__ implementation for parser.
+  Patch by Serhiy Storchaka.
+
+- Issue #15469: Add a __sizeof__ implementation for deque objects.
+  Patch by Serhiy Storchaka.
+
+- Issue #15489: Add a __sizeof__ implementation for BytesIO objects.
+  Patch by Serhiy Storchaka.
+
+- Issue #15487: Add a __sizeof__ implementation for buffered I/O objects.
+  Patch by Serhiy Storchaka.
+
+- Issue #6056: Make multiprocessing use setblocking(True) on the
+  sockets it uses.  Original patch by J Derek Wilson.
+
+- Issue #15041: update "see also" list in tkinter documentation.
+
+- Issue #15402: An issue in the struct module that caused sys.getsizeof to
+  return incorrect results for struct.Struct instances has been fixed.
+  Initial patch by Serhiy Storchaka.
+
+- Issue #15232: when mangle_from is True, email.Generator now correctly mangles
+  lines that start with 'From ' that occur in a MIME preamble or epilogue.
+
+- Issue #13922: argparse no longer incorrectly strips '--'s that appear
+  after the first one.
+
+- Issue #12353: argparse now correctly handles null argument values.
+
+- Issues #10017 and #14998: Fix TypeError using pprint on dictionaries with
+  user-defined types as keys or other unorderable keys.
+
+- Issue #14635: telnetlib will use poll() rather than select() when possible
+  to avoid failing due to the select() file descriptor limit.
+
+- Issue #15180: Clarify posixpath.join() error message when mixing str & bytes
+
+- Issue #15230: runpy.run_path now correctly sets __package__ as described
+  in the documentation
+
+- Issue #14990: Correctly fail with SyntaxError on invalid encoding
+  declaration.
+
+- Issue #15247: FileIO now raises an error when given a file descriptor
+  pointing to a directory.
+
+- Issue #5346: Preserve permissions of mbox, MMDF and Babyl mailbox
+  files on flush().
+
+- Issue #10571: Fix the "--sign" option of distutils' upload command.
+  Patch by Jakub Wilk.
+
+- Issue #9559: If messages were only added, a new file is no longer
+  created and renamed over the old file when flush() is called on an
+  mbox, MMDF or Babyl mailbox.
+
+- Issue #14653: email.utils.mktime_tz() no longer relies on system
+  mktime() when timezone offest is supplied.
+
+- Fix GzipFile's handling of filenames given as bytes objects.
+
+- Issue #15101: Make pool finalizer avoid joining current thread.
+
+- Issue #15036: Mailbox no longer throws an error if a flush is done
+  between operations when removing or changing multiple items in mbox,
+  MMDF, or Babyl mailboxes.
+
+- Issue #10133: Make multiprocessing deallocate buffer if socket read
+  fails.  Patch by Hallvard B Furuseth.
+
+- Issue #13854: Make multiprocessing properly handle non-integer
+  non-string argument to SystemExit.
+
+- Issue #12157: Make pool.map() empty iterables correctly.  Initial
+  patch by mouad.
+
+- Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError
+  when the path existed and had the S_ISGID mode bit set when it was
+  not explicitly asked for.  This is no longer an exception as mkdir
+  cannot control if the OS sets that bit for it or not.
+
+- Issue #14962: Update text coloring in IDLE shell window after changing
+  options.  Patch by Roger Serwy.
+
+- Issue #10997: Prevent a duplicate entry in IDLE's "Recent Files" menu.
+
+- Issue #14443: Tell rpmbuild to use the correct version of Python in
+  bdist_rpm. Initial patch by Ross Lagerwall.
+
+- Issue #14929: Stop Idle 3.x from closing on Unicode decode errors when
+  grepping. Patch by Roger Serwy.
+
+- Issue #12510: Attempting to get invalid tooltip no longer closes Idle.
+  Other tooltipss have been corrected or improved and the number of tests
+  has been tripled. Original patch by Roger Serwy.
+
+- Issue #10365: File open dialog now works instead of crashing even when
+  the parent window is closed before the dialog. Patch by Roger Serwy.
+
+- Issue #14876: Use user-selected font for highlight configuration.
+
+- Issue #14920: Fix the help(urllib.parse) failure on locale C on terminals.
+  Have ascii characters in help.
+
+- Issue #14863: Update the documentation of os.fdopen() to reflect the
+  fact that it's only a thin wrapper around open() anymore.
+
+- Issue #14036: Add an additional check to validate that port in urlparse does
+  not go in illegal range and returns None.
+
+- Issue #14875: Use float('inf') instead of float('1e66666') in the json module.
+
+- Issue #14426: Correct the Date format in Expires attribute of Set-Cookie
+  Header in Cookie.py.
+
+- Issue #14721: Send the correct 'Content-length: 0' header when the body is an
+  empty string ''. Initial Patch contributed by Arve Knudsen.
+
+- Issue #14072: Fix parsing of 'tel' URIs in urlparse by making the check for
+  ports stricter.
+
+- Issue #9374: Generic parsing of query and fragment portions of url for any
+  scheme. Supported both by RFC3986 and RFC2396.
+
+- Issue #14798: Fix the functions in pyclbr to raise an ImportError
+  when the first part of a dotted name is not a package. Patch by
+  Xavier de Gaye.
+
+- Issue #14829: Fix bisect and range() indexing with large indices
+  (>= 2 ** 32) under 64-bit Windows.
+
+- Issue #14777: tkinter may return undecoded UTF-8 bytes as a string when
+  accessing the Tk clipboard.  Modify clipboad_get() to first request type
+  UTF8_STRING when no specific type is requested in an X11 windowing
+  environment, falling back to the current default type STRING if that fails.
+  Original patch by Thomas Kluyver.
+
+- Issue #12541: Be lenient with quotes around Realm field of HTTP Basic
+  Authentation in urllib2.
+
+- Issue #14662: Prevent shutil failures on OS X when destination does not
+  support chflag operations.  Patch by Hynek Schlawack.
+
+- Issue #14157: Fix time.strptime failing without a year on February 29th.
+  Patch by Hynek Schlawack.
+
+- Issue #14768: os.path.expanduser('~/a') doesn't works correctly when HOME is '/'.
+
+- Issue #14741: Fix missing support for Ellipsis ('...') in parser module.
+
+- Issue #14697: Fix missing support for set displays and set comprehensions in
+  parser module.
+
+- Issue #14701: Fix missing support for 'raise ... from' in parser module.
+
+- Issue #13183: Fix pdb skipping frames after hitting a breakpoint and running
+  step.  Patch by Xavier de Gaye.
+
+- Issue #14696: Fix parser module to understand 'nonlocal' declarations.
+
+- Issue #10941: Fix imaplib.Internaldate2tuple to produce correct result near
+  the DST transition.  Patch by Joe Peterson.
+
+- Issue #9154: Fix parser module to understand function annotations.
+
+- Issue #14664: It is now possible to use @unittest.skip{If,Unless} on a
+  test class that doesn't inherit from TestCase (i.e. a mixin).
+
+- Issue #14160: TarFile.extractfile() failed to resolve symbolic links when
+  the links were not located in an archive subdirectory.
+
+- Issue #14638: pydoc now treats non-string __name__ values as if they
+  were missing, instead of raising an error.
+
+- Issue #13684: Fix httplib tunnel issue of infinite loops for certain sites
+  which send EOF without trailing \r\n.
+
+- Issue #14629: Raise SyntaxError in tokenizer.detect_encoding if the
+  first two lines have non-UTF-8 characters without an encoding declaration.
+
+- Issue #14308: Fix an exception when a "dummy" thread is in the threading
+  module's active list after a fork().
+
+- Issue #14538: HTMLParser can now parse correctly start tags that contain
+  a bare '/'.
+
+- Issue #14452: SysLogHandler no longer inserts a UTF-8 BOM into the message.
+
+- Issue #13496: Fix potential overflow in bisect.bisect algorithm when applied
+  to a collection of size > sys.maxsize / 2.
+
+- Issue #14399: zipfile now recognizes that the archive has been modified even
+  if only the comment is changed.  In addition, the TypeError that results from
+  trying to set a non-binary value as a comment is now now raised at the time
+  the comment is set rather than at the time the zipfile is written.
+
+- Issue #7978: socketserver now restarts the select() call when EINTR is
+  returned.  This avoids crashing the server loop when a signal is received.
+  Patch by Jerzy Kozera.
+
+- Issue #14496: Fix wrong name in idlelib/tabbedpages.py.
+  Patch by Popa Claudiu.
+
+- Issue #14482: Raise a ValueError, not a NameError, when trying to create
+  a multiprocessing Client or Listener with an AF_UNIX type address under
+  Windows.  Patch by Popa Claudiu.
+
+- Issue #14151: Raise a ValueError, not a NameError, when trying to create
+  a multiprocessing Client or Listener with an AF_PIPE type address under
+  non-Windows platforms.  Patch by Popa Claudiu.
+
+- Issue #13872: socket.detach() now marks the socket closed (as mirrored
+  in the socket repr()).  Patch by Matt Joiner.
+
+- Issue #14406: Fix a race condition when using ``concurrent.futures.wait(
+  return_when=ALL_COMPLETED)``.  Patch by Matt Joiner.
+
+- Issue #14409: IDLE now properly executes commands in the Shell window
+  when it cannot read the normal config files on startup and
+  has to use the built-in default key bindings.
+  There was previously a bug in one of the defaults.
+
+- Issue #10340: asyncore - properly handle EINVAL in dispatcher constructor on
+  OSX; avoid to call handle_connect in case of a disconnected socket which
+  was not meant to connect.
+
+- Issue #12757: Fix the skipping of doctests when python is run with -OO so
+  that it works in unittest's verbose mode as well as non-verbose mode.
+
+- Issue #3573: IDLE hangs when passing invalid command line args
+  (directory(ies) instead of file(s)) (Patch by Guilherme Polo)
+
+- Issue #13694: asynchronous connect in asyncore.dispatcher does not set addr
+  attribute.
+
+- Issue #11686: Added missing entries to email package __all__ lists
+  (mostly the new Bytes classes).
+
+- Issue #10484: Fix the CGIHTTPServer's PATH_INFO handling problem.
+
+- Issue #11199: Fix the with urllib which hangs on particular ftp urls.
+
+- Issue #14062: Header objects now correctly respect the 'linesep' setting
+  when processed by BytesParser (which smtplib.SMTP.send_message uses).
+
+- Issue #14291: Email now defaults to utf-8 for non-ASCII unicode headers
+  instead of raising an error.  This fixes a regression relative to 2.7.
+
+- Issue #5219: Prevent event handler cascade in IDLE.
+
+- Issue #14184: Increase the default stack size for secondary threads on
+  Mac OS X to avoid interpreter crashes when using threads on 10.7.
+
+- Issue #10543: Fix unittest test discovery with Jython bytecode files.
+
+- Issue #14252: Fix subprocess.Popen.terminate() to not raise an error under
+  Windows when the child process has already exited.
+
+- Issue #14195: An issue that caused weakref.WeakSet instances to incorrectly
+  return True for a WeakSet instance 'a' in both 'a < a' and 'a > a' has been
+  fixed.
+
+- Issue #14177: marshal.loads() now raises TypeError when given an unicode
+  string.  Patch by Guilherme Gonçalves.
+
+- Issue #14159: Fix the len() of weak containers (WeakSet, WeakKeyDictionary,
+  WeakValueDictionary) to return a better approximation when some objects
+  are dead or dying.  Moreover, the implementation is now O(1) rather than
+  O(n).
+
+- Issue #13125: Silence spurious test_lib2to3 output when in non-verbose mode.
+  Patch by Mikhail Novikov.
+
+- Issue #13447: Add a test file to host regression tests for bugs in the
+  scripts found in the Tools directory.
+
+- Issue #8033: sqlite3: Fix 64-bit integer handling in user functions
+  on 32-bit architectures. Initial patch by Philippe Devalkeneer.
+
+Extension Modules
+-----------------
+
+- Issue #6493: An issue in ctypes on Windows that caused structure bitfields
+  of type ctypes.c_uint32 and width 32 to incorrectly be set has been fixed.
+
+- Issue #15000: Support the "unique" x32 architecture in _posixsubprocess.c.
+
+- Issue #9041: An issue in ctypes.c_longdouble, ctypes.c_double, and
+  ctypes.c_float that caused an incorrect exception to be returned in the
+  case of overflow has been fixed.
+
+- Issue #14212: The re module didn't retain a reference to buffers it was
+  scanning, resulting in segfaults.
+
+Tests
+-----
+
+- Issue #15747: ZFS always returns EOPNOTSUPP when attempting to set the
+  UF_IMMUTABLE flag (via either chflags or lchflags); refactor affected
+  tests in test_posix.py to account for this.
+
+- Issue #15285: Refactor the approach for testing connect timeouts using
+  two external hosts that have been configured specifically for this type
+  of test.
+
+- Issue #15615: Add some tests for the json module's handling of invalid
+  input data.  Patch by Kushal Das.
+
+- Issue #15496: Add directory removal helpers for tests on Windows.
+  Patch by Jeremy Kloth.
+
+- Issue #15467: Move helpers for __sizeof__ tests into test_support.
+  Patch by Serhiy Storchaka.
+
+- Issue #15320: Make iterating the list of tests thread-safe when running
+  tests in multiprocess mode. Patch by Chris Jerdonek.
+
+- Issue #15230: Adopted a more systematic approach in the runpy tests
+
+- Issue #15300: Ensure the temporary test working directories are in the same
+  parent folder when running tests in multiprocess mode from a Python build.
+  Patch by Chris Jerdonek.
+
+- test_nntplib now tolerates being run from behind NNTP gateways that add
+  "X-Antivirus" headers to articles
+
+- Issue #15043: test_gdb is now skipped entirely if gdb security settings
+  block loading of the gdb hooks
+
+- Issue #14026: In test_cmd_line_script, check that sys.argv is populated
+  correctly for the various invocation approaches (Patch by Jason Yeo)
+
+- Issue #14032: Fix incorrect variable name in test_cmd_line_script debugging
+  message (Patch by Jason Yeo)
+
+- Issue #14589: Update certificate chain for sha256.tbs-internet.com, fixing
+  a test failure in test_ssl.
+
+Build
+-----
+
+- Issue #15645: Ensure 2to3 grammar pickles are properly installed.
+
+- Issue #15560: Fix building _sqlite3 extension on OS X with an SDK.
+
+- Issue #8847: Disable COMDAT folding in Windows PGO builds.
+
+- Issue #14197: For OS X framework builds, ensure links to the shared
+  library are created with the proper ABI suffix.
+
+- Issue #14472: Update .gitignore. Patch by Matej Cepl.
+
+- The Windows build now uses OpenSSL 1.0.0j and bzip2 1.0.6.
+
+- Issue #14557: Fix extensions build on HP-UX. Patch by Adi Roiban.
+
+- Issue #14437: Fix building the _io module under Cygwin.
+
+- Issue #14387: Do not include accu.h from Python.h.
+
+- Issue #14359: Only use O_CLOEXEC in _posixmodule.c if it is defined.
+  Based on patch from Hervé Coatanhay.
+
+- Issue #14018: Fix OS X Tcl/Tk framework checking when using OS X SDKs.
+
+Documentation
+-------------
+
+- Issue #14674: Add a discussion of the json module's standard compliance.
+  Patch by Chris Rebert.
+
+- Issue #15630: Add an example for "continue" stmt in the tutorial. Patch by
+  Daniel Ellis.
+
+- Issue #15444: Use proper spelling for non-ASCII contributor names.  Patch
+  by Serhiy Storchaka.
+
+- Issue 15482: Properly document the default 'level' value for __import__()
+  while warning about using negative values.
+
+- Issue #15230: Clearly document some of the limitations of the runpy
+  module and nudge readers towards importlib when appropriate.
+
+- Issue #13557: Clarify effect of giving two different namespaces to exec or
+  execfile().
+
+- Issue #8799: Fix and improve the threading.Condition documentation.
+
+- Issue #14943: Correct a default argument value for winreg.OpenKey
+  and correctly list the argument names in the function's explanation.
+
+- Issue #14034: added the argparse tutorial.
+
+- Issue #15250: Document that filecmp.dircmp compares files shallowly. Patch
+  contributed by Chris Jerdonek.
+
+Tools/Demos
+-----------
+
+- Issue #14695: Fix missing support for starred assignments in
+  Tools/parser/unparse.py.
+
+
 What's New in Python 3.2.3?
 ===========================
 
@@ -156,9 +708,6 @@
 Library
 -------
 
-- Issue #8033: sqlite3: Fix 64-bit integer handling in user functions
-  on 32-bit architectures. Initial patch by Philippe Devalkeneer.
-
 - HTMLParser is now able to handle slashes in the start tag.
 
 - Issue #14001: CVE-2012-0845: xmlrpc: Fix an endless loop in
@@ -773,6 +1322,9 @@
 - Issue #12451: xml.dom.pulldom: parse() now opens files in binary mode instead
   of the text mode (using the locale encoding) to avoid encoding issues.
 
+- Issue #14443: Ensure that .py files are byte-compiled with the correct Python
+  executable within bdist_rpm even on older versions of RPM
+
 Extension Modules
 -----------------
 
diff -r 3d0686d90f55 Misc/python-config.in
--- a/Misc/python-config.in
+++ b/Misc/python-config.in
@@ -52,7 +52,8 @@
         if opt == '--ldflags':
             if not getvar('Py_ENABLE_SHARED'):
                 libs.insert(0, '-L' + getvar('LIBPL'))
-            libs.extend(getvar('LINKFORSHARED').split())
+            if not getvar('PYTHONFRAMEWORK'):
+                libs.extend(getvar('LINKFORSHARED').split())
         print(' '.join(libs))
 
     elif opt == '--extension-suffix':
diff -r 3d0686d90f55 Modules/_bisectmodule.c
--- a/Modules/_bisectmodule.c
+++ b/Modules/_bisectmodule.c
@@ -3,6 +3,7 @@
 Converted to C by Dmitry Vasiliev (dima at hlabs.spb.ru).
 */
 
+#define PY_SSIZE_T_CLEAN
 #include "Python.h"
 
 static Py_ssize_t
@@ -21,7 +22,10 @@
             return -1;
     }
     while (lo < hi) {
-        mid = (lo + hi) / 2;
+        /* The (size_t)cast ensures that the addition and subsequent division
+           are performed as unsigned operations, avoiding difficulties from
+           signed overflow.  (See issue 13496.) */
+        mid = ((size_t)lo + hi) / 2;
         litem = PySequence_GetItem(list, mid);
         if (litem == NULL)
             return -1;
@@ -121,7 +125,10 @@
             return -1;
     }
     while (lo < hi) {
-        mid = (lo + hi) / 2;
+        /* The (size_t)cast ensures that the addition and subsequent division
+           are performed as unsigned operations, avoiding difficulties from
+           signed overflow.  (See issue 13496.) */
+        mid = ((size_t)lo + hi) / 2;
         litem = PySequence_GetItem(list, mid);
         if (litem == NULL)
             return -1;
@@ -186,7 +193,7 @@
         if (PyList_Insert(list, index, item) < 0)
             return NULL;
     } else {
-        result = PyObject_CallMethod(list, "insert", "iO", index, item);
+        result = PyObject_CallMethod(list, "insert", "nO", index, item);
         if (result == NULL)
             return NULL;
         Py_DECREF(result);
diff -r 3d0686d90f55 Modules/_collectionsmodule.c
--- a/Modules/_collectionsmodule.c
+++ b/Modules/_collectionsmodule.c
@@ -933,6 +933,23 @@
 }
 
 static PyObject *
+deque_sizeof(dequeobject *deque, void *unused)
+{
+    Py_ssize_t res;
+    Py_ssize_t blocks;
+
+    res = sizeof(dequeobject);
+    blocks = (deque->leftindex + deque->len + BLOCKLEN - 1) / BLOCKLEN;
+    assert(deque->leftindex + deque->len - 1 ==
+           (blocks - 1) * BLOCKLEN + deque->rightindex);
+    res += blocks * sizeof(block);
+    return PyLong_FromSsize_t(res);
+}
+
+PyDoc_STRVAR(sizeof_doc,
+"D.__sizeof__() -- size of D in memory, in bytes");
+
+static PyObject *
 deque_get_maxlen(dequeobject *deque)
 {
     if (deque->maxlen == -1)
@@ -995,7 +1012,9 @@
     {"reverse",                 (PyCFunction)deque_reverse,
         METH_NOARGS,             reverse_doc},
     {"rotate",                  (PyCFunction)deque_rotate,
-        METH_VARARGS,           rotate_doc},
+        METH_VARARGS,            rotate_doc},
+    {"__sizeof__",              (PyCFunction)deque_sizeof,
+        METH_NOARGS,             sizeof_doc},
     {NULL,              NULL}   /* sentinel */
 };
 
diff -r 3d0686d90f55 Modules/_csv.c
--- a/Modules/_csv.c
+++ b/Modules/_csv.c
@@ -166,8 +166,12 @@
 {
     if (src == NULL)
         *target = dflt;
-    else
-        *target = PyObject_IsTrue(src);
+    else {
+        int b = PyObject_IsTrue(src);
+        if (b < 0)
+            return -1;
+        *target = b;
+    }
     return 0;
 }
 
diff -r 3d0686d90f55 Modules/_ctypes/cfield.c
--- a/Modules/_ctypes/cfield.c
+++ b/Modules/_ctypes/cfield.c
@@ -430,12 +430,8 @@
 #define LOW_BIT(x)  ((x) & 0xFFFF)
 #define NUM_BITS(x) ((x) >> 16)
 
-/* This seems nore a compiler issue than a Windows/non-Windows one */
-#ifdef MS_WIN32
-#  define BIT_MASK(size) ((1 << NUM_BITS(size))-1)
-#else
-#  define BIT_MASK(size) ((1LL << NUM_BITS(size))-1)
-#endif
+/* Doesn't work if NUM_BITS(size) == 0, but it never happens in SET() call. */
+#define BIT_MASK(type, size) (((((type)1 << (NUM_BITS(size) - 1)) - 1) << 1) + 1)
 
 /* This macro CHANGES the first parameter IN PLACE. For proper sign handling,
    we must first shift left, then right.
@@ -447,10 +443,10 @@
     }
 
 /* This macro RETURNS the first parameter with the bit field CHANGED. */
-#define SET(x, v, size)                                                 \
+#define SET(type, x, v, size)                                                 \
     (NUM_BITS(size) ?                                                   \
-     ( ( x & ~(BIT_MASK(size) << LOW_BIT(size)) ) | ( (v & BIT_MASK(size)) << LOW_BIT(size) ) ) \
-     : v)
+     ( ( (type)x & ~(BIT_MASK(type, size) << LOW_BIT(size)) ) | ( ((type)v & BIT_MASK(type, size)) << LOW_BIT(size) ) ) \
+     : (type)v)
 
 /* byte swapping macros */
 #define SWAP_2(v)                               \
@@ -522,7 +518,7 @@
     long val;
     if (get_long(value, &val) < 0)
         return NULL;
-    *(signed char *)ptr = (signed char)SET(*(signed char *)ptr, (signed char)val, size);
+    *(signed char *)ptr = SET(signed char, *(signed char *)ptr, val, size);
     _RET(value);
 }
 
@@ -541,8 +537,7 @@
     unsigned long val;
     if (get_ulong(value, &val) < 0)
         return NULL;
-    *(unsigned char *)ptr = (unsigned char)SET(*(unsigned char*)ptr,
-                                               (unsigned short)val, size);
+    *(unsigned char *)ptr = SET(unsigned char, *(unsigned char*)ptr, val, size);
     _RET(value);
 }
 
@@ -563,7 +558,7 @@
     if (get_long(value, &val) < 0)
         return NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, (short)val, size);
+    x = SET(short, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -578,7 +573,7 @@
         return NULL;
     memcpy(&field, ptr, sizeof(field));
     field = SWAP_2(field);
-    field = SET(field, (short)val, size);
+    field = SET(short, field, val, size);
     field = SWAP_2(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -611,7 +606,7 @@
     if (get_ulong(value, &val) < 0)
         return NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, (unsigned short)val, size);
+    x = SET(unsigned short, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -625,7 +620,7 @@
         return NULL;
     memcpy(&field, ptr, sizeof(field));
     field = SWAP_2(field);
-    field = SET(field, (unsigned short)val, size);
+    field = SET(unsigned short, field, val, size);
     field = SWAP_2(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -659,7 +654,7 @@
     if (get_long(value, &val) < 0)
         return NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, (int)val, size);
+    x = SET(int, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -673,7 +668,7 @@
         return NULL;
     memcpy(&field, ptr, sizeof(field));
     field = SWAP_INT(field);
-    field = SET(field, (int)val, size);
+    field = SET(int, field, val, size);
     field = SWAP_INT(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -760,7 +755,7 @@
     if (get_ulong(value, &val) < 0)
         return  NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, (unsigned int)val, size);
+    x = SET(unsigned int, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -773,7 +768,7 @@
     if (get_ulong(value, &val) < 0)
         return  NULL;
     memcpy(&field, ptr, sizeof(field));
-    field = (unsigned int)SET(field, (unsigned int)val, size);
+    field = SET(unsigned int, field, (unsigned int)val, size);
     field = SWAP_INT(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -807,7 +802,7 @@
     if (get_long(value, &val) < 0)
         return NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, val, size);
+    x = SET(long, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -821,7 +816,7 @@
         return NULL;
     memcpy(&field, ptr, sizeof(field));
     field = SWAP_LONG(field);
-    field = (long)SET(field, val, size);
+    field = SET(long, field, val, size);
     field = SWAP_LONG(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -855,7 +850,7 @@
     if (get_ulong(value, &val) < 0)
         return  NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, val, size);
+    x = SET(unsigned long, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -869,7 +864,7 @@
         return  NULL;
     memcpy(&field, ptr, sizeof(field));
     field = SWAP_LONG(field);
-    field = (unsigned long)SET(field, val, size);
+    field = SET(unsigned long, field, val, size);
     field = SWAP_LONG(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -904,7 +899,7 @@
     if (get_longlong(value, &val) < 0)
         return NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, val, size);
+    x = SET(PY_LONG_LONG, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -918,7 +913,7 @@
         return NULL;
     memcpy(&field, ptr, sizeof(field));
     field = SWAP_8(field);
-    field = (PY_LONG_LONG)SET(field, val, size);
+    field = SET(PY_LONG_LONG, field, val, size);
     field = SWAP_8(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -951,7 +946,7 @@
     if (get_ulonglong(value, &val) < 0)
         return NULL;
     memcpy(&x, ptr, sizeof(x));
-    x = SET(x, val, size);
+    x = SET(PY_LONG_LONG, x, val, size);
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -965,7 +960,7 @@
         return NULL;
     memcpy(&field, ptr, sizeof(field));
     field = SWAP_8(field);
-    field = (unsigned PY_LONG_LONG)SET(field, val, size);
+    field = SET(unsigned PY_LONG_LONG, field, val, size);
     field = SWAP_8(field);
     memcpy(ptr, &field, sizeof(field));
     _RET(value);
@@ -1002,12 +997,8 @@
     long double x;
 
     x = PyFloat_AsDouble(value);
-    if (x == -1 && PyErr_Occurred()) {
-        PyErr_Format(PyExc_TypeError,
-                     " float expected instead of %s instance",
-                     value->ob_type->tp_name);
+    if (x == -1 && PyErr_Occurred())
         return NULL;
-    }
     memcpy(ptr, &x, sizeof(long double));
     _RET(value);
 }
@@ -1026,12 +1017,8 @@
     double x;
 
     x = PyFloat_AsDouble(value);
-    if (x == -1 && PyErr_Occurred()) {
-        PyErr_Format(PyExc_TypeError,
-                     " float expected instead of %s instance",
-                     value->ob_type->tp_name);
+    if (x == -1 && PyErr_Occurred())
         return NULL;
-    }
     memcpy(ptr, &x, sizeof(double));
     _RET(value);
 }
@@ -1050,12 +1037,8 @@
     double x;
 
     x = PyFloat_AsDouble(value);
-    if (x == -1 && PyErr_Occurred()) {
-        PyErr_Format(PyExc_TypeError,
-                     " float expected instead of %s instance",
-                     value->ob_type->tp_name);
+    if (x == -1 && PyErr_Occurred())
         return NULL;
-    }
 #ifdef WORDS_BIGENDIAN
     if (_PyFloat_Pack8(x, (unsigned char *)ptr, 1))
         return NULL;
@@ -1082,12 +1065,8 @@
     float x;
 
     x = (float)PyFloat_AsDouble(value);
-    if (x == -1 && PyErr_Occurred()) {
-        PyErr_Format(PyExc_TypeError,
-                     " float expected instead of %s instance",
-                     value->ob_type->tp_name);
+    if (x == -1 && PyErr_Occurred())
         return NULL;
-    }
     memcpy(ptr, &x, sizeof(x));
     _RET(value);
 }
@@ -1106,12 +1085,8 @@
     float x;
 
     x = (float)PyFloat_AsDouble(value);
-    if (x == -1 && PyErr_Occurred()) {
-        PyErr_Format(PyExc_TypeError,
-                     " float expected instead of %s instance",
-                     value->ob_type->tp_name);
+    if (x == -1 && PyErr_Occurred())
         return NULL;
-    }
 #ifdef WORDS_BIGENDIAN
     if (_PyFloat_Pack4(x, (unsigned char *)ptr, 1))
         return NULL;
diff -r 3d0686d90f55 Modules/_ctypes/libffi_osx/x86/darwin64.S
--- a/Modules/_ctypes/libffi_osx/x86/darwin64.S
+++ b/Modules/_ctypes/libffi_osx/x86/darwin64.S
@@ -45,6 +45,7 @@
 _ffi_call_unix64:
 LUW0:
  movq  (%rsp), %r10    /* Load return address.  */
+ movq  %rdi, %r12    /* Save a copy of the register area. */
  leaq  (%rdi, %rsi), %rax  /* Find local stack base.  */
  movq  %rdx, (%rax)    /* Save flags.  */
  movq  %rcx, 8(%rax)   /* Save raddr.  */
@@ -52,7 +53,8 @@
  movq  %r10, 24(%rax)    /* Relocate return address.  */
  movq  %rax, %rbp    /* Finalize local stack frame.  */
 LUW1:
- movq  %rdi, %r10    /* Save a copy of the register area. */
+ /* movq  %rdi, %r10    // Save a copy of the register area. */
+ movq  %r12, %r10
  movq  %r8, %r11   /* Save a copy of the target fn.  */
  movl  %r9d, %eax    /* Set number of SSE registers.  */
 
@@ -255,7 +257,7 @@
  ret
  .align  3
 Lld_int8:
- movzbl  -24(%rsp), %eax
+ movzbl  -24(%rsp), %eax 
  ret
  .align  3
 Lld_int16:
diff -r 3d0686d90f55 Modules/_ctypes/libffi_osx/x86/x86-darwin.S
--- a/Modules/_ctypes/libffi_osx/x86/x86-darwin.S
+++ b/Modules/_ctypes/libffi_osx/x86/x86-darwin.S
@@ -198,8 +198,12 @@
 	je	Lcls_retldouble
 	cmpl	$FFI_TYPE_SINT64, %eax
 	je	Lcls_retllong
+	cmpl	$FFI_TYPE_UINT8, %eax
+	je	Lcls_retstruct1
 	cmpl	$FFI_TYPE_SINT8, %eax
 	je	Lcls_retstruct1
+	cmpl	$FFI_TYPE_UINT16, %eax
+	je	Lcls_retstruct2
 	cmpl	$FFI_TYPE_SINT16, %eax
 	je	Lcls_retstruct2
 	cmpl	$FFI_TYPE_STRUCT, %eax
diff -r 3d0686d90f55 Modules/_ctypes/libffi_osx/x86/x86-ffi64.c
--- a/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c
+++ b/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c
@@ -152,12 +152,42 @@
 		case FFI_TYPE_UINT64:
 		case FFI_TYPE_SINT64:
 		case FFI_TYPE_POINTER:
+#if 0
 			if (byte_offset + type->size <= 4)
 				classes[0] = X86_64_INTEGERSI_CLASS;
 			else
 				classes[0] = X86_64_INTEGER_CLASS;
 
 			return 1;
+#else
+		{
+			int size = byte_offset + type->size;
+
+			if (size <= 4)
+			{
+				classes[0] = X86_64_INTEGERSI_CLASS;
+				return 1;
+			}
+			else if (size <= 8)
+			{
+				classes[0] = X86_64_INTEGER_CLASS;
+				return 1;
+			}
+			else if (size <= 12)
+			{
+				classes[0] = X86_64_INTEGER_CLASS;
+				classes[1] = X86_64_INTEGERSI_CLASS;
+				return 2;
+			}
+			else if (size <= 16)
+			{
+				classes[0] = classes[1] = X86_64_INTEGERSI_CLASS;
+				return 2;
+			}
+			else
+				FFI_ASSERT (0);
+		}
+#endif
 
 		case FFI_TYPE_FLOAT:
 			if (byte_offset == 0)
@@ -213,6 +243,21 @@
 				byte_offset += (*ptr)->size;
 			}
 
+			if (words > 2)
+			{
+				/* When size > 16 bytes, if the first one isn't
+			           X86_64_SSE_CLASS or any other ones aren't
+			           X86_64_SSEUP_CLASS, everything should be passed in
+			           memory.  */
+				if (classes[0] != X86_64_SSE_CLASS)
+					return 0;
+
+				for (i = 1; i < words; i++)
+					if (classes[i] != X86_64_SSEUP_CLASS)
+						return 0;
+			}
+
+
 			/* Final merger cleanup.  */
 			for (i = 0; i < words; i++)
 			{
@@ -224,13 +269,20 @@
 				/*	The X86_64_SSEUP_CLASS should be always preceded by
 					X86_64_SSE_CLASS.  */
 				if (classes[i] == X86_64_SSEUP_CLASS
-					&& (i == 0 || classes[i - 1] != X86_64_SSE_CLASS))
+					&& classes[i - 1] != X86_64_SSE_CLASS
+					&& classes[i - 1] != X86_64_SSEUP_CLASS)
+				{
+					FFI_ASSERT(i != 0);
 					classes[i] = X86_64_SSE_CLASS;
+				}
 
 				/*  X86_64_X87UP_CLASS should be preceded by X86_64_X87_CLASS.  */
 				if (classes[i] == X86_64_X87UP_CLASS
-					&& (i == 0 || classes[i - 1] != X86_64_X87_CLASS))
+					&& classes[i - 1] != X86_64_X87_CLASS)
+				{
+					FFI_ASSERT(i != 0);
 					classes[i] = X86_64_SSE_CLASS;
+				}
 			}
 
 			return words;
@@ -369,6 +421,7 @@
 
 	cif->flags = flags;
 	cif->bytes = bytes;
+	cif->bytes = ALIGN(bytes,8);
 
 	return FFI_OK;
 }
@@ -449,7 +502,61 @@
 					case X86_64_INTEGER_CLASS:
 					case X86_64_INTEGERSI_CLASS:
 						reg_args->gpr[gprcount] = 0;
-						memcpy (&reg_args->gpr[gprcount], a, size < 8 ? size : 8);
+						switch (arg_types[i]->type) {
+						case FFI_TYPE_SINT8:
+						   {
+							int8_t shortval = *(int8_t*)a;
+							int64_t  actval = (int64_t)shortval;
+							reg_args->gpr[gprcount] = actval;
+							/*memcpy (&reg_args->gpr[gprcount], &actval, 8);*/
+							break;
+						   }
+
+						case FFI_TYPE_SINT16:
+						   {
+							int16_t shortval = *(int16_t*)a;
+							int64_t  actval = (int64_t)shortval;
+							memcpy (&reg_args->gpr[gprcount], &actval, 8);
+							break;
+						   }
+
+						case FFI_TYPE_SINT32:
+						   {
+							int32_t shortval = *(int32_t*)a;
+							int64_t  actval = (int64_t)shortval;
+							memcpy (&reg_args->gpr[gprcount], &actval, 8);
+							break;
+						   }
+
+						case FFI_TYPE_UINT8:
+						   {
+							u_int8_t shortval = *(u_int8_t*)a;
+							u_int64_t  actval = (u_int64_t)shortval;
+							/*memcpy (&reg_args->gpr[gprcount], &actval, 8);*/
+							reg_args->gpr[gprcount] = actval;
+							break;
+						   }
+
+						case FFI_TYPE_UINT16:
+						   {
+							u_int16_t shortval = *(u_int16_t*)a;
+							u_int64_t  actval = (u_int64_t)shortval;
+							memcpy (&reg_args->gpr[gprcount], &actval, 8);
+							break;
+						   }
+
+						case FFI_TYPE_UINT32:
+						   {
+							u_int32_t shortval = *(u_int32_t*)a;
+							u_int64_t  actval = (u_int64_t)shortval;
+							memcpy (&reg_args->gpr[gprcount], &actval, 8);
+							break;
+						   }
+
+						default:
+							//memcpy (&reg_args->gpr[gprcount], a, size < 8 ? size : 8);
+							reg_args->gpr[gprcount] = *(int64_t*)a;
+						}
 						gprcount++;
 						break;
 
@@ -505,12 +612,15 @@
 	return FFI_OK;
 }
 
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wmissing-prototypes"
 int
 ffi_closure_unix64_inner(
 	ffi_closure*	closure,
 	void*			rvalue,
 	RegisterArgs*	reg_args,
 	char*			argp)
+#pragma clang diagnostic pop
 {
 	ffi_cif*	cif = closure->cif;
 	void**		avalue = alloca(cif->nargs * sizeof(void *));
@@ -621,4 +731,4 @@
 	return ret;
 }
 
-#endif /* __x86_64__ */
\ No newline at end of file
+#endif /* __x86_64__ */
diff -r 3d0686d90f55 Modules/_ctypes/libffi_osx/x86/x86-ffi_darwin.c
--- a/Modules/_ctypes/libffi_osx/x86/x86-ffi_darwin.c
+++ b/Modules/_ctypes/libffi_osx/x86/x86-ffi_darwin.c
@@ -35,6 +35,8 @@
 /* ffi_prep_args is called by the assembly routine once stack space
  has been allocated for the function's arguments */
 
+void ffi_prep_args(char *stack, extended_cif *ecif);
+
 void ffi_prep_args(char *stack, extended_cif *ecif)
 {
     register unsigned int i;
@@ -433,4 +435,4 @@
 }
 
 #endif
-#endif	// __i386__
\ No newline at end of file
+#endif	// __i386__
diff -r 3d0686d90f55 Modules/_elementtree.c
--- a/Modules/_elementtree.c
+++ b/Modules/_elementtree.c
@@ -3114,8 +3114,13 @@
             expat_capi->size < sizeof(struct PyExpat_CAPI) ||
             expat_capi->MAJOR_VERSION != XML_MAJOR_VERSION ||
             expat_capi->MINOR_VERSION != XML_MINOR_VERSION ||
-            expat_capi->MICRO_VERSION != XML_MICRO_VERSION)
-            expat_capi = NULL;
+            expat_capi->MICRO_VERSION != XML_MICRO_VERSION) {
+            PyErr_SetString(PyExc_ImportError,
+                            "pyexpat version is incompatible");
+            return NULL;
+        }
+    } else {
+        return NULL;
     }
 #endif
 
diff -r 3d0686d90f55 Modules/_io/_iomodule.c
--- a/Modules/_io/_iomodule.c
+++ b/Modules/_io/_iomodule.c
@@ -58,7 +58,7 @@
 "\n"
 "At the top of the I/O hierarchy is the abstract base class IOBase. It\n"
 "defines the basic interface to a stream. Note, however, that there is no\n"
-"seperation between reading and writing to streams; implementations are\n"
+"separation between reading and writing to streams; implementations are\n"
 "allowed to throw an IOError if they do not support a given operation.\n"
 "\n"
 "Extending IOBase is RawIOBase which deals simply with the reading and\n"
@@ -264,9 +264,9 @@
 "\n"
 "* On output, if newline is None, any '\\n' characters written are\n"
 "  translated to the system default line separator, os.linesep. If\n"
-"  newline is '', no translation takes place. If newline is any of the\n"
-"  other legal values, any '\\n' characters written are translated to\n"
-"  the given string.\n"
+"  newline is '' or '\n', no translation takes place. If newline is any\n"
+"  of the other legal values, any '\\n' characters written are translated\n"
+"  to the given string.\n"
 "\n"
 "If closefd is False, the underlying file descriptor will be kept open\n"
 "when the file is closed. This does not work when a file name is given\n"
diff -r 3d0686d90f55 Modules/_io/_iomodule.h
--- a/Modules/_io/_iomodule.h
+++ b/Modules/_io/_iomodule.h
@@ -57,6 +57,11 @@
     int translated, int universal, PyObject *readnl,
     Py_UNICODE *start, Py_UNICODE *end, Py_ssize_t *consumed);
 
+/* Return 1 if an EnvironmentError with errno == EINTR is set (and then
+   clears the error indicator), 0 otherwise.
+   Should only be called when PyErr_Occurred() is true.
+*/
+extern int _PyIO_trap_eintr(void);
 
 #define DEFAULT_BUFFER_SIZE (8 * 1024)  /* bytes */
 
@@ -67,7 +72,7 @@
     PyObject *filename; /* Not used, but part of the IOError object */
     Py_ssize_t written;
 } PyBlockingIOErrorObject;
-PyAPI_DATA(PyObject *) PyExc_BlockingIOError;
+extern PyObject *PyExc_BlockingIOError;
 
 /*
  * Offset type for positioning.
diff -r 3d0686d90f55 Modules/_io/bufferedio.c
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -383,6 +383,17 @@
     Py_TYPE(self)->tp_free((PyObject *)self);
 }
 
+static PyObject *
+buffered_sizeof(buffered *self, void *unused)
+{
+    Py_ssize_t res;
+
+    res = sizeof(buffered);
+    if (self->buffer)
+        res += self->buffer_size;
+    return PyLong_FromSsize_t(res);
+}
+
 static int
 buffered_traverse(buffered *self, visitproc visit, void *arg)
 {
@@ -730,8 +741,8 @@
    clears the error indicator), 0 otherwise.
    Should only be called when PyErr_Occurred() is true.
 */
-static int
-_trap_eintr(void)
+int
+_PyIO_trap_eintr(void)
 {
     static PyObject *eintr_int = NULL;
     PyObject *typ, *val, *tb;
@@ -1314,7 +1325,7 @@
     */
     do {
         res = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_readinto, memobj, NULL);
-    } while (res == NULL && _trap_eintr());
+    } while (res == NULL && _PyIO_trap_eintr());
     Py_DECREF(memobj);
     if (res == NULL)
         return -1;
@@ -1591,6 +1602,7 @@
     {"seek", (PyCFunction)buffered_seek, METH_VARARGS},
     {"tell", (PyCFunction)buffered_tell, METH_NOARGS},
     {"truncate", (PyCFunction)buffered_truncate, METH_VARARGS},
+    {"__sizeof__", (PyCFunction)buffered_sizeof, METH_NOARGS},
     {NULL, NULL}
 };
 
@@ -1742,7 +1754,7 @@
         errno = 0;
         res = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_write, memobj, NULL);
         errnum = errno;
-    } while (res == NULL && _trap_eintr());
+    } while (res == NULL && _PyIO_trap_eintr());
     Py_DECREF(memobj);
     if (res == NULL)
         return -1;
@@ -1985,6 +1997,7 @@
     {"flush", (PyCFunction)buffered_flush, METH_NOARGS},
     {"seek", (PyCFunction)buffered_seek, METH_VARARGS},
     {"tell", (PyCFunction)buffered_tell, METH_NOARGS},
+    {"__sizeof__", (PyCFunction)buffered_sizeof, METH_NOARGS},
     {NULL, NULL}
 };
 
@@ -2384,6 +2397,7 @@
     {"readline", (PyCFunction)buffered_readline, METH_VARARGS},
     {"peek", (PyCFunction)buffered_peek, METH_VARARGS},
     {"write", (PyCFunction)bufferedwriter_write, METH_VARARGS},
+    {"__sizeof__", (PyCFunction)buffered_sizeof, METH_NOARGS},
     {NULL, NULL}
 };
 
diff -r 3d0686d90f55 Modules/_io/bytesio.c
--- a/Modules/_io/bytesio.c
+++ b/Modules/_io/bytesio.c
@@ -834,6 +834,17 @@
     return 0;
 }
 
+static PyObject *
+bytesio_sizeof(bytesio *self, void *unused)
+{
+    Py_ssize_t res;
+
+    res = sizeof(bytesio);
+    if (self->buf)
+        res += self->buf_size;
+    return PyLong_FromSsize_t(res);
+}
+
 static int
 bytesio_traverse(bytesio *self, visitproc visit, void *arg)
 {
@@ -876,6 +887,7 @@
     {"truncate",   (PyCFunction)bytesio_truncate,   METH_VARARGS, truncate_doc},
     {"__getstate__",  (PyCFunction)bytesio_getstate,  METH_NOARGS, NULL},
     {"__setstate__",  (PyCFunction)bytesio_setstate,  METH_O, NULL},
+    {"__sizeof__", (PyCFunction)bytesio_sizeof,     METH_NOARGS, NULL},
     {NULL, NULL}        /* sentinel */
 };
 
diff -r 3d0686d90f55 Modules/_io/fileio.c
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -166,22 +166,15 @@
    directories, so we need a check.  */
 
 static int
-dircheck(fileio* self, const char *name)
+dircheck(fileio* self, PyObject *nameobj)
 {
 #if defined(HAVE_FSTAT) && defined(S_IFDIR) && defined(EISDIR)
     struct stat buf;
     if (self->fd < 0)
         return 0;
     if (fstat(self->fd, &buf) == 0 && S_ISDIR(buf.st_mode)) {
-        char *msg = strerror(EISDIR);
-        PyObject *exc;
-        if (internal_close(self))
-            return -1;
-
-        exc = PyObject_CallFunction(PyExc_IOError, "(iss)",
-                                    EISDIR, msg, name);
-        PyErr_SetObject(PyExc_IOError, exc);
-        Py_XDECREF(exc);
+        errno = EISDIR;
+        PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, nameobj);
         return -1;
     }
 #endif
@@ -224,12 +217,17 @@
     int flags = 0;
     int fd = -1;
     int closefd = 1;
+    int fd_is_own = 0;
 
     assert(PyFileIO_Check(oself));
     if (self->fd >= 0) {
-        /* Have to close the existing file first. */
-        if (internal_close(self) < 0)
-            return -1;
+        if (self->closefd) {
+            /* Have to close the existing file first. */
+            if (internal_close(self) < 0)
+                return -1;
+        }
+        else
+            self->fd = -1;
     }
 
     if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|si:fileio",
@@ -358,6 +356,7 @@
 #endif
             self->fd = open(name, flags, 0666);
         Py_END_ALLOW_THREADS
+        fd_is_own = 1;
         if (self->fd < 0) {
 #ifdef MS_WINDOWS
             if (widename != NULL)
@@ -367,9 +366,9 @@
                 PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
             goto error;
         }
-        if (dircheck(self, name) < 0)
-            goto error;
     }
+    if (dircheck(self, nameobj) < 0)
+        goto error;
 
 #if defined(MS_WINDOWS) || defined(__CYGWIN__)
     /* don't translate newlines (\r\n <=> \n) */
@@ -384,13 +383,8 @@
            end of file (otherwise, it might be done only on the
            first write()). */
         PyObject *pos = portable_lseek(self->fd, NULL, 2);
-        if (pos == NULL) {
-            if (closefd) {
-                close(self->fd);
-                self->fd = -1;
-            }
+        if (pos == NULL)
             goto error;
-        }
         Py_DECREF(pos);
     }
 
@@ -398,6 +392,8 @@
 
  error:
     ret = -1;
+    if (!fd_is_own)
+        self->fd = -1;
     if (self->fd >= 0)
         internal_close(self);
 
@@ -602,6 +598,13 @@
         if (n == 0)
             break;
         if (n < 0) {
+            if (errno == EINTR) {
+                if (PyErr_CheckSignals()) {
+                    Py_DECREF(result);
+                    return NULL;
+                }
+                continue;
+            }
             if (total > 0)
                 break;
             if (errno == EAGAIN) {
diff -r 3d0686d90f55 Modules/_io/iobase.c
--- a/Modules/_io/iobase.c
+++ b/Modules/_io/iobase.c
@@ -482,8 +482,14 @@
 
         if (has_peek) {
             PyObject *readahead = PyObject_CallMethod(self, "peek", "i", 1);
-            if (readahead == NULL)
+            if (readahead == NULL) {
+                /* NOTE: PyErr_SetFromErrno() calls PyErr_CheckSignals()
+                   when EINTR occurs so we needn't do it ourselves. */
+                if (_PyIO_trap_eintr()) {
+                    continue;
+                }
                 goto fail;
+            }
             if (!PyBytes_Check(readahead)) {
                 PyErr_Format(PyExc_IOError,
                              "peek() should have returned a bytes object, "
@@ -516,8 +522,14 @@
         }
 
         b = PyObject_CallMethod(self, "read", "n", nreadahead);
-        if (b == NULL)
+        if (b == NULL) {
+            /* NOTE: PyErr_SetFromErrno() calls PyErr_CheckSignals()
+               when EINTR occurs so we needn't do it ourselves. */
+            if (_PyIO_trap_eintr()) {
+                continue;
+            }
             goto fail;
+        }
         if (!PyBytes_Check(b)) {
             PyErr_Format(PyExc_IOError,
                          "read() should have returned a bytes object, "
@@ -826,6 +838,11 @@
         PyObject *data = PyObject_CallMethod(self, "read",
                                              "i", DEFAULT_BUFFER_SIZE);
         if (!data) {
+            /* NOTE: PyErr_SetFromErrno() calls PyErr_CheckSignals()
+               when EINTR occurs so we needn't do it ourselves. */
+            if (_PyIO_trap_eintr()) {
+                continue;
+            }
             Py_DECREF(chunks);
             return NULL;
         }
diff -r 3d0686d90f55 Modules/_io/textio.c
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -622,15 +622,22 @@
     "errors determines the strictness of encoding and decoding (see the\n"
     "codecs.register) and defaults to \"strict\".\n"
     "\n"
-    "newline can be None, '', '\\n', '\\r', or '\\r\\n'.  It controls the\n"
-    "handling of line endings. If it is None, universal newlines is\n"
-    "enabled.  With this enabled, on input, the lines endings '\\n', '\\r',\n"
-    "or '\\r\\n' are translated to '\\n' before being returned to the\n"
-    "caller. Conversely, on output, '\\n' is translated to the system\n"
-    "default line seperator, os.linesep. If newline is any other of its\n"
-    "legal values, that newline becomes the newline when the file is read\n"
-    "and it is returned untranslated. On output, '\\n' is converted to the\n"
-    "newline.\n"
+    "newline controls how line endings are handled. It can be None, '',\n"
+    "'\\n', '\\r', and '\\r\\n'.  It works as follows:\n"
+    "\n"
+    "* On input, if newline is None, universal newlines mode is\n"
+    "  enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and\n"
+    "  these are translated into '\\n' before being returned to the\n"
+    "  caller. If it is '', universal newline mode is enabled, but line\n"
+    "  endings are returned to the caller untranslated. If it has any of\n"
+    "  the other legal values, input lines are only terminated by the given\n"
+    "  string, and the line ending is returned to the caller untranslated.\n"
+    "\n"
+    "* On output, if newline is None, any '\\n' characters written are\n"
+    "  translated to the system default line separator, os.linesep. If\n"
+    "  newline is '' or '\n', no translation takes place. If newline is any\n"
+    "  of the other legal values, any '\\n' characters written are translated\n"
+    "  to the given string.\n"
     "\n"
     "If line_buffering is True, a call to flush is implied when a call to\n"
     "write contains a newline character."
@@ -1039,8 +1046,11 @@
     res = PyObject_CallMethod(buffer, "seekable", NULL);
     if (res == NULL)
         goto error;
-    self->seekable = self->telling = PyObject_IsTrue(res);
+    r = PyObject_IsTrue(res);
     Py_DECREF(res);
+    if (r < 0)
+        goto error;
+    self->seekable = self->telling = r;
 
     self->has_read1 = PyObject_HasAttrString(buffer, "read1");
 
@@ -1541,8 +1551,14 @@
         /* Keep reading chunks until we have n characters to return */
         while (remaining > 0) {
             res = textiowrapper_read_chunk(self);
-            if (res < 0)
+            if (res < 0) {
+                /* NOTE: PyErr_SetFromErrno() calls PyErr_CheckSignals()
+                   when EINTR occurs so we needn't do it ourselves. */
+                if (_PyIO_trap_eintr()) {
+                    continue;
+                }
                 goto fail;
+            }
             if (res == 0)  /* EOF */
                 break;
             if (chunks == NULL) {
@@ -1701,8 +1717,14 @@
         while (!self->decoded_chars ||
                !PyUnicode_GET_SIZE(self->decoded_chars)) {
             res = textiowrapper_read_chunk(self);
-            if (res < 0)
+            if (res < 0) {
+                /* NOTE: PyErr_SetFromErrno() calls PyErr_CheckSignals()
+                   when EINTR occurs so we needn't do it ourselves. */
+                if (_PyIO_trap_eintr()) {
+                    continue;
+                }
                 goto error;
+            }
             if (res == 0)
                 break;
         }
diff -r 3d0686d90f55 Modules/_json.c
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -634,7 +634,7 @@
 
             /* read key */
             if (str[idx] != '"') {
-                raise_errmsg("Expecting property name", pystr, idx);
+                raise_errmsg("Expecting property name enclosed in double quotes", pystr, idx);
                 goto bail;
             }
             key = scanstring_unicode(pystr, idx + 1, strict, &next_idx);
@@ -655,7 +655,7 @@
             /* skip whitespace between key and : delimiter, read :, skip whitespace */
             while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
             if (idx > end_idx || str[idx] != ':') {
-                raise_errmsg("Expecting : delimiter", pystr, idx);
+                raise_errmsg("Expecting ':' delimiter", pystr, idx);
                 goto bail;
             }
             idx++;
@@ -695,7 +695,7 @@
                 break;
             }
             else if (str[idx] != ',') {
-                raise_errmsg("Expecting , delimiter", pystr, idx);
+                raise_errmsg("Expecting ',' delimiter", pystr, idx);
                 goto bail;
             }
             idx++;
@@ -777,7 +777,7 @@
                 break;
             }
             else if (str[idx] != ',') {
-                raise_errmsg("Expecting , delimiter", pystr, idx);
+                raise_errmsg("Expecting ',' delimiter", pystr, idx);
                 goto bail;
             }
             idx++;
diff -r 3d0686d90f55 Modules/_math.c
--- a/Modules/_math.c
+++ b/Modules/_math.c
@@ -189,6 +189,27 @@
    significant loss of precision that arises from direct evaluation when x is
    small. */
 
+#ifdef HAVE_LOG1P
+
+double
+_Py_log1p(double x)
+{
+    /* Some platforms supply a log1p function but don't respect the sign of
+       zero:  log1p(-0.0) gives 0.0 instead of the correct result of -0.0.
+
+       To save fiddling with configure tests and platform checks, we handle the
+       special case of zero input directly on all platforms.
+    */
+    if (x == 0.0) {
+        return x;
+    }
+    else {
+        return log1p(x);
+    }
+}
+
+#else
+
 double
 _Py_log1p(double x)
 {
@@ -230,3 +251,5 @@
         return log(1.+x);
     }
 }
+
+#endif /* ifdef HAVE_LOG1P */
diff -r 3d0686d90f55 Modules/_math.h
--- a/Modules/_math.h
+++ b/Modules/_math.h
@@ -36,10 +36,6 @@
 #define m_expm1 _Py_expm1
 #endif
 
-#ifdef HAVE_LOG1P
-#define m_log1p log1p
-#else
-/* if the system doesn't have log1p, use the substitute
-   function defined in Modules/_math.c. */
+/* Use the substitute from _math.c on all platforms:
+   it includes workarounds for buggy handling of zeros. */
 #define m_log1p _Py_log1p
-#endif
diff -r 3d0686d90f55 Modules/_multiprocessing/socket_connection.c
--- a/Modules/_multiprocessing/socket_connection.c
+++ b/Modules/_multiprocessing/socket_connection.c
@@ -117,7 +117,7 @@
 conn_recv_string(ConnectionObject *conn, char *buffer,
                  size_t buflength, char **newbuffer, size_t maxlength)
 {
-    int res;
+    Py_ssize_t res;
     UINT32 ulength;
 
     *newbuffer = NULL;
@@ -132,20 +132,23 @@
     if (ulength > maxlength)
         return MP_BAD_MESSAGE_LENGTH;
 
-    if (ulength <= buflength) {
-        Py_BEGIN_ALLOW_THREADS
-        res = _conn_recvall(conn->handle, buffer, (size_t)ulength);
-        Py_END_ALLOW_THREADS
-        return res < 0 ? res : ulength;
-    } else {
-        *newbuffer = PyMem_Malloc((size_t)ulength);
-        if (*newbuffer == NULL)
+    if (ulength > buflength) {
+        *newbuffer = buffer = PyMem_Malloc((size_t)ulength);
+        if (buffer == NULL)
             return MP_MEMORY_ERROR;
-        Py_BEGIN_ALLOW_THREADS
-        res = _conn_recvall(conn->handle, *newbuffer, (size_t)ulength);
-        Py_END_ALLOW_THREADS
-        return res < 0 ? (Py_ssize_t)res : (Py_ssize_t)ulength;
     }
+
+    Py_BEGIN_ALLOW_THREADS
+    res = _conn_recvall(conn->handle, buffer, (size_t)ulength);
+    Py_END_ALLOW_THREADS
+
+    if (res >= 0) {
+        res = (Py_ssize_t)ulength;
+    } else if (*newbuffer != NULL) {
+        PyMem_Free(*newbuffer);
+        *newbuffer = NULL;
+    }
+    return res;
 }
 
 /*
diff -r 3d0686d90f55 Modules/_multiprocessing/win32_functions.c
--- a/Modules/_multiprocessing/win32_functions.c
+++ b/Modules/_multiprocessing/win32_functions.c
@@ -244,6 +244,7 @@
     Py_INCREF(&Win32Type);
 
     WIN32_CONSTANT(F_DWORD, ERROR_ALREADY_EXISTS);
+    WIN32_CONSTANT(F_DWORD, ERROR_NO_DATA);
     WIN32_CONSTANT(F_DWORD, ERROR_PIPE_BUSY);
     WIN32_CONSTANT(F_DWORD, ERROR_PIPE_CONNECTED);
     WIN32_CONSTANT(F_DWORD, ERROR_SEM_TIMEOUT);
diff -r 3d0686d90f55 Modules/_posixsubprocess.c
--- a/Modules/_posixsubprocess.c
+++ b/Modules/_posixsubprocess.c
@@ -175,8 +175,15 @@
  * chooses to break compatibility with all existing binaries.  Highly Unlikely.
  */
 struct linux_dirent {
+#if defined(__x86_64__) && defined(__ILP32__)
+   /* Support the wacky x32 ABI (fake 32-bit userspace speaking to x86_64
+    * kernel interfaces) - https://sites.google.com/site/x32abi/ */
+   unsigned long long d_ino;
+   unsigned long long d_off;
+#else
    unsigned long  d_ino;        /* Inode number */
    unsigned long  d_off;        /* Offset to next linux_dirent */
+#endif
    unsigned short d_reclen;     /* Length of this linux_dirent */
    char           d_name[256];  /* Filename (null-terminated) */
 };
@@ -202,7 +209,18 @@
     int fd_dir_fd;
     if (start_fd >= end_fd)
         return;
-        fd_dir_fd = open(FD_DIR, O_RDONLY | O_CLOEXEC, 0);
+#ifdef O_CLOEXEC
+    fd_dir_fd = open(FD_DIR, O_RDONLY | O_CLOEXEC, 0);
+#else
+    fd_dir_fd = open(FD_DIR, O_RDONLY, 0);
+#ifdef FD_CLOEXEC
+    {
+        int old = fcntl(fd_dir_fd, F_GETFD);
+        if (old != -1)
+            fcntl(fd_dir_fd, F_SETFD, old | FD_CLOEXEC);
+    }
+#endif
+#endif
     if (fd_dir_fd == -1) {
         /* No way to get a list of open fds. */
         _close_fds_by_brute_force(start_fd, end_fd, py_fds_to_keep);
@@ -507,6 +525,8 @@
         return NULL;
 
     close_fds = PyObject_IsTrue(py_close_fds);
+    if (close_fds < 0)
+        return NULL;
     if (close_fds && errpipe_write < 3) {  /* precondition */
         PyErr_SetString(PyExc_ValueError, "errpipe_write must be >= 3");
         return NULL;
@@ -546,8 +566,10 @@
     }
 
     exec_array = _PySequence_BytesToCharpArray(executable_list);
-    if (!exec_array)
+    if (!exec_array) {
+        Py_XDECREF(gc_module);
         return NULL;
+    }
 
     /* Convert args and env into appropriate arguments for exec() */
     /* These conversions are done in the parent process to avoid allocating
@@ -557,6 +579,8 @@
         /* Equivalent to:  */
         /*  tuple(PyUnicode_FSConverter(arg) for arg in process_args)  */
         fast_args = PySequence_Fast(process_args, "argv must be a tuple");
+        if (fast_args == NULL)
+            goto cleanup;
         num_args = PySequence_Fast_GET_SIZE(fast_args);
         converted_args = PyTuple_New(num_args);
         if (converted_args == NULL)
diff -r 3d0686d90f55 Modules/_sre.c
--- a/Modules/_sre.c
+++ b/Modules/_sre.c
@@ -1664,7 +1664,7 @@
 }
 
 static void*
-getstring(PyObject* string, Py_ssize_t* p_length, int* p_charsize)
+getstring(PyObject* string, Py_ssize_t* p_length, int* p_charsize, Py_buffer *view)
 {
     /* given a python object, return a data pointer, a length (in
        characters), and a character size.  return NULL if the object
@@ -1674,7 +1674,6 @@
     Py_ssize_t size, bytes;
     int charsize;
     void* ptr;
-    Py_buffer view;
 
     /* Unicode objects do not support the buffer API. So, get the data
        directly instead. */
@@ -1686,26 +1685,21 @@
     }
 
     /* get pointer to string buffer */
-    view.len = -1;
+    view->len = -1;
     buffer = Py_TYPE(string)->tp_as_buffer;
     if (!buffer || !buffer->bf_getbuffer ||
-        (*buffer->bf_getbuffer)(string, &view, PyBUF_SIMPLE) < 0) {
+        (*buffer->bf_getbuffer)(string, view, PyBUF_SIMPLE) < 0) {
             PyErr_SetString(PyExc_TypeError, "expected string or buffer");
             return NULL;
     }
 
     /* determine buffer size */
-    bytes = view.len;
-    ptr = view.buf;
-
-    /* Release the buffer immediately --- possibly dangerous
-       but doing something else would require some re-factoring
-    */
-    PyBuffer_Release(&view);
+    bytes = view->len;
+    ptr = view->buf;
 
     if (bytes < 0) {
         PyErr_SetString(PyExc_TypeError, "buffer has negative size");
-        return NULL;
+        goto err;
     }
 
     /* determine character size */
@@ -1719,7 +1713,7 @@
 #endif
     else {
         PyErr_SetString(PyExc_TypeError, "buffer size mismatch");
-        return NULL;
+        goto err;
     }
 
     *p_length = size;
@@ -1728,8 +1722,13 @@
     if (ptr == NULL) {
             PyErr_SetString(PyExc_ValueError,
                             "Buffer is NULL");
+            goto err;
     }
     return ptr;
+  err:
+    PyBuffer_Release(view);
+    view->buf = NULL;
+    return NULL;
 }
 
 LOCAL(PyObject*)
@@ -1747,20 +1746,21 @@
     state->lastmark = -1;
     state->lastindex = -1;
 
-    ptr = getstring(string, &length, &charsize);
+    state->buffer.buf = NULL;
+    ptr = getstring(string, &length, &charsize, &state->buffer);
     if (!ptr)
-        return NULL;
-
-	if (charsize == 1 && pattern->charsize > 1) {
-		PyErr_SetString(PyExc_TypeError,
+        goto err;
+
+    if (charsize == 1 && pattern->charsize > 1) {
+        PyErr_SetString(PyExc_TypeError,
 			"can't use a string pattern on a bytes-like object");
-		return NULL;
-	}
-	if (charsize > 1 && pattern->charsize == 1) {
-		PyErr_SetString(PyExc_TypeError,
+        goto err;
+    }
+    if (charsize > 1 && pattern->charsize == 1) {
+        PyErr_SetString(PyExc_TypeError,
 			"can't use a bytes pattern on a string-like object");
-		return NULL;
-	}
+        goto err;
+    }
 
     /* adjust boundaries */
     if (start < 0)
@@ -1797,11 +1797,17 @@
         state->lower = sre_lower;
 
     return string;
+  err:
+    if (state->buffer.buf)
+        PyBuffer_Release(&state->buffer);
+    return NULL;
 }
 
 LOCAL(void)
 state_fini(SRE_STATE* state)
 {
+    if (state->buffer.buf)
+        PyBuffer_Release(&state->buffer);
     Py_XDECREF(state->string);
     data_stack_dealloc(state);
 }
@@ -1863,6 +1869,8 @@
 {
     if (self->weakreflist != NULL)
         PyObject_ClearWeakRefs((PyObject *) self);
+    if (self->view.buf)
+        PyBuffer_Release(&self->view);
     Py_XDECREF(self->pattern);
     Py_XDECREF(self->groupindex);
     Py_XDECREF(self->indexgroup);
@@ -2297,6 +2305,7 @@
     Py_ssize_t i, b, e;
     int bint;
     int filter_is_callable;
+    Py_buffer view;
 
     if (PyCallable_Check(ptemplate)) {
         /* sub/subn takes either a function or a template */
@@ -2306,7 +2315,8 @@
     } else {
         /* if not callable, check if it's a literal string */
         int literal;
-        ptr = getstring(ptemplate, &n, &bint);
+        view.buf = NULL;
+        ptr = getstring(ptemplate, &n, &bint, &view);
         b = bint;
         if (ptr) {
             if (b == 1) {
@@ -2320,6 +2330,8 @@
             PyErr_Clear();
             literal = 0;
         }
+        if (view.buf)
+            PyBuffer_Release(&view);
         if (literal) {
             filter = ptemplate;
             Py_INCREF(filter);
@@ -2661,6 +2673,7 @@
     Py_ssize_t groups = 0;
     PyObject* groupindex = NULL;
     PyObject* indexgroup = NULL;
+
     if (!PyArg_ParseTuple(args, "OiO!|nOO", &pattern, &flags,
                           &PyList_Type, &code, &groups,
                           &groupindex, &indexgroup))
@@ -2675,6 +2688,7 @@
     self->pattern = NULL;
     self->groupindex = NULL;
     self->indexgroup = NULL;
+    self->view.buf = NULL;
 
     self->codesize = n;
 
@@ -2694,15 +2708,15 @@
         return NULL;
     }
 
-	if (pattern == Py_None)
-		self->charsize = -1;
-	else {
-		Py_ssize_t p_length;
-		if (!getstring(pattern, &p_length, &self->charsize)) {
-			Py_DECREF(self);
-			return NULL;
-		}
-	}
+    if (pattern == Py_None)
+        self->charsize = -1;
+    else {
+        Py_ssize_t p_length;
+        if (!getstring(pattern, &p_length, &self->charsize, &self->view)) {
+            Py_DECREF(self);
+            return NULL;
+        }
+    }
 
     Py_INCREF(pattern);
     self->pattern = pattern;
diff -r 3d0686d90f55 Modules/_ssl.c
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -883,6 +883,7 @@
     int len;
     int verification;
     PyObject *binary_mode = Py_None;
+    int b;
 
     if (!PyArg_ParseTuple(args, "|O:peer_certificate", &binary_mode))
         return NULL;
@@ -890,7 +891,10 @@
     if (!self->peer_cert)
         Py_RETURN_NONE;
 
-    if (PyObject_IsTrue(binary_mode)) {
+    b = PyObject_IsTrue(binary_mode);
+    if (b < 0)
+        return NULL;
+    if (b) {
         /* return cert in DER-encoded format */
 
         unsigned char *bytes_buf = NULL;
diff -r 3d0686d90f55 Modules/_struct.c
--- a/Modules/_struct.c
+++ b/Modules/_struct.c
@@ -1665,6 +1665,18 @@
     return PyLong_FromSsize_t(self->s_size);
 }
 
+PyDoc_STRVAR(s_sizeof__doc__,
+"S.__sizeof__() -> size of S in memory, in bytes");
+
+static PyObject *
+s_sizeof(PyStructObject *self, void *unused)
+{
+    Py_ssize_t size;
+
+    size = sizeof(PyStructObject) + sizeof(formatcode) * (self->s_len + 1);
+    return PyLong_FromSsize_t(size);
+}
+
 /* List of functions */
 
 static struct PyMethodDef s_methods[] = {
@@ -1673,6 +1685,7 @@
     {"unpack",          s_unpack,       METH_O, s_unpack__doc__},
     {"unpack_from",     (PyCFunction)s_unpack_from, METH_VARARGS|METH_KEYWORDS,
                     s_unpack_from__doc__},
+    {"__sizeof__",      (PyCFunction)s_sizeof, METH_NOARGS, s_sizeof__doc__},
     {NULL,       NULL}          /* sentinel */
 };
 
diff -r 3d0686d90f55 Modules/_threadmodule.c
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -994,14 +994,17 @@
             PyErr_Clear();
         else {
             PyObject *file;
+            PyObject *exc, *value, *tb;
             PySys_WriteStderr(
                 "Unhandled exception in thread started by ");
+            PyErr_Fetch(&exc, &value, &tb);
             file = PySys_GetObject("stderr");
             if (file != NULL && file != Py_None)
                 PyFile_WriteObject(boot->func, file, 0);
             else
                 PyObject_Print(boot->func, stderr, 0);
             PySys_WriteStderr("\n");
+            PyErr_Restore(exc, value, tb);
             PyErr_PrintEx(0);
         }
     }
diff -r 3d0686d90f55 Modules/_tkinter.c
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -3135,8 +3135,10 @@
 
     PyDict_SetItemString(d, "TkappType", (PyObject *)&Tkapp_Type);
 
-    if (PyType_Ready(&Tktt_Type) < 0)
+    if (PyType_Ready(&Tktt_Type) < 0) {
+        Py_DECREF(m);
         return NULL;
+    }
     PyDict_SetItemString(d, "TkttType", (PyObject *)&Tktt_Type);
 
     Py_TYPE(&PyTclObject_Type) = &PyType_Type;
diff -r 3d0686d90f55 Modules/arraymodule.c
--- a/Modules/arraymodule.c
+++ b/Modules/arraymodule.c
@@ -1510,6 +1510,19 @@
 an array of some other type.");
 
 
+static PyObject *
+array_sizeof(arrayobject *self, PyObject *unused)
+{
+    Py_ssize_t res;
+    res = sizeof(arrayobject) + self->allocated * self->ob_descr->itemsize;
+    return PyLong_FromSsize_t(res);
+}
+
+PyDoc_STRVAR(sizeof_doc,
+"__sizeof__() -> int\n\
+\n\
+Size of the array in memory, in bytes.");
+
 
 /*********************** Pickling support ************************/
 
@@ -2077,6 +2090,8 @@
      tobytes_doc},
     {"tounicode",   (PyCFunction)array_tounicode,       METH_NOARGS,
      tounicode_doc},
+    {"__sizeof__",      (PyCFunction)array_sizeof,      METH_NOARGS,
+     sizeof_doc},
     {NULL,              NULL}           /* sentinel */
 };
 
diff -r 3d0686d90f55 Modules/gcmodule.c
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -116,6 +116,46 @@
     http://mail.python.org/pipermail/python-dev/2008-June/080579.html
 */
 
+/*
+   NOTE: about untracking of mutable objects.
+   
+   Certain types of container cannot participate in a reference cycle, and
+   so do not need to be tracked by the garbage collector. Untracking these
+   objects reduces the cost of garbage collections. However, determining
+   which objects may be untracked is not free, and the costs must be
+   weighed against the benefits for garbage collection.
+
+   There are two possible strategies for when to untrack a container:
+
+   i) When the container is created.
+   ii) When the container is examined by the garbage collector.
+
+   Tuples containing only immutable objects (integers, strings etc, and
+   recursively, tuples of immutable objects) do not need to be tracked.
+   The interpreter creates a large number of tuples, many of which will
+   not survive until garbage collection. It is therefore not worthwhile
+   to untrack eligible tuples at creation time.
+
+   Instead, all tuples except the empty tuple are tracked when created. 
+   During garbage collection it is determined whether any surviving tuples 
+   can be untracked. A tuple can be untracked if all of its contents are 
+   already not tracked. Tuples are examined for untracking in all garbage 
+   collection cycles. It may take more than one cycle to untrack a tuple.
+
+   Dictionaries containing only immutable objects also do not need to be
+   tracked. Dictionaries are untracked when created. If a tracked item is
+   inserted into a dictionary (either as a key or value), the dictionary
+   becomes tracked. During a full garbage collection (all generations),
+   the collector will untrack any dictionaries whose contents are not
+   tracked.
+
+   The module provides the python function is_tracked(obj), which returns
+   the CURRENT tracking status of the object. Subsequent garbage
+   collections may change the tracking status of the object.
+   
+   Untracking of certain containers was introduced in issue #4688, and 
+   the algorithm was refined in response to issue #14775.
+*/
 
 /* set for debugging information */
 #define DEBUG_STATS             (1<<0) /* print collection statistics */
@@ -437,9 +477,6 @@
             if (PyTuple_CheckExact(op)) {
                 _PyTuple_MaybeUntrack(op);
             }
-            else if (PyDict_CheckExact(op)) {
-                _PyDict_MaybeUntrack(op);
-            }
         }
         else {
             /* This *may* be unreachable.  To make progress,
@@ -457,6 +494,20 @@
     }
 }
 
+/* Try to untrack all currently tracked dictionaries */
+static void
+untrack_dicts(PyGC_Head *head)
+{
+    PyGC_Head *next, *gc = head->gc.gc_next;
+    while (gc != head) {
+        PyObject *op = FROM_GC(gc);
+        next = gc->gc.gc_next;
+        if (PyDict_CheckExact(op))
+            _PyDict_MaybeUntrack(op);
+        gc = next;
+    }
+}
+
 /* Return true if object has a finalization method. */
 static int
 has_finalizer(PyObject *op)
@@ -857,6 +908,9 @@
         gc_list_merge(young, old);
     }
     else {
+        /* We only untrack dicts in full collections, to avoid quadratic
+           dict build-up. See issue #14775. */
+        untrack_dicts(young);
         long_lived_pending = 0;
         long_lived_total = gc_list_size(young);
     }
diff -r 3d0686d90f55 Modules/getaddrinfo.c
--- a/Modules/getaddrinfo.c
+++ b/Modules/getaddrinfo.c
@@ -430,7 +430,7 @@
                 break;
 #ifdef ENABLE_IPV6
             case AF_INET6:
-                pfx = ((struct in6_addr *)pton)->s6_addr8[0];
+                pfx = ((struct in6_addr *)pton)->s6_addr[0];
                 if (pfx == 0 || pfx == 0xfe || pfx == 0xff)
                     pai->ai_flags &= ~AI_CANONNAME;
                 break;
diff -r 3d0686d90f55 Modules/getnameinfo.c
--- a/Modules/getnameinfo.c
+++ b/Modules/getnameinfo.c
@@ -161,7 +161,7 @@
         break;
 #ifdef ENABLE_IPV6
     case AF_INET6:
-        pfx = ((struct sockaddr_in6 *)sa)->sin6_addr.s6_addr8[0];
+        pfx = ((struct sockaddr_in6 *)sa)->sin6_addr.s6_addr[0];
         if (pfx == 0 || pfx == 0xfe || pfx == 0xff)
             flags |= NI_NUMERICHOST;
         break;
diff -r 3d0686d90f55 Modules/itertoolsmodule.c
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -903,11 +903,13 @@
         }
         ok = PyObject_IsTrue(good);
         Py_DECREF(good);
-        if (!ok) {
+        if (ok == 0) {
             lz->start = 1;
             return item;
         }
         Py_DECREF(item);
+        if (ok < 0)
+            return NULL;
     }
 }
 
@@ -1043,10 +1045,11 @@
     }
     ok = PyObject_IsTrue(good);
     Py_DECREF(good);
-    if (ok)
+    if (ok > 0)
         return item;
     Py_DECREF(item);
-    lz->stop = 1;
+    if (ok == 0)
+        lz->stop = 1;
     return NULL;
 }
 
@@ -2959,9 +2962,11 @@
             ok = PyObject_IsTrue(good);
             Py_DECREF(good);
         }
-        if (!ok)
+        if (ok == 0)
             return item;
         Py_DECREF(item);
+        if (ok < 0)
+            return NULL;
     }
 }
 
diff -r 3d0686d90f55 Modules/main.c
--- a/Modules/main.c
+++ b/Modules/main.c
@@ -616,7 +616,7 @@
        script. */
     if ((p = Py_GETENV("PYTHONEXECUTABLE")) && *p != '\0') {
         wchar_t* buffer;
-        size_t len = strlen(p);
+        size_t len = strlen(p) + 1;
         size_t r;
 
         buffer = malloc(len * sizeof(wchar_t));
@@ -673,7 +673,7 @@
         sts = run_command(command, &cf);
         free(command);
     } else if (module) {
-        sts = RunModule(module, 1);
+        sts = (RunModule(module, 1) != 0);
     }
     else {
 
diff -r 3d0686d90f55 Modules/mathmodule.c
--- a/Modules/mathmodule.c
+++ b/Modules/mathmodule.c
@@ -694,13 +694,13 @@
         return NULL;
     }
     if (Py_IS_INFINITY(r) && Py_IS_FINITE(x)) {
-                    if (can_overflow)
-                            PyErr_SetString(PyExc_OverflowError,
-                                    "math range error"); /* overflow */
-            else
-                PyErr_SetString(PyExc_ValueError,
-                    "math domain error"); /* singularity */
-            return NULL;
+        if (can_overflow)
+            PyErr_SetString(PyExc_OverflowError,
+                            "math range error"); /* overflow */
+        else
+            PyErr_SetString(PyExc_ValueError,
+                            "math domain error"); /* singularity */
+        return NULL;
     }
     if (Py_IS_FINITE(r) && errno && is_error(r))
         /* this branch unnecessary on most platforms */
diff -r 3d0686d90f55 Modules/parsermodule.c
--- a/Modules/parsermodule.c
+++ b/Modules/parsermodule.c
@@ -167,6 +167,7 @@
 
 
 static void parser_free(PyST_Object *st);
+static PyObject* parser_sizeof(PyST_Object *, void *);
 static PyObject* parser_richcompare(PyObject *left, PyObject *right, int op);
 static PyObject* parser_compilest(PyST_Object *, PyObject *, PyObject *);
 static PyObject* parser_isexpr(PyST_Object *, PyObject *, PyObject *);
@@ -187,7 +188,8 @@
         PyDoc_STR("Creates a list-tree representation of this ST.")},
     {"totuple",         (PyCFunction)parser_st2tuple,   PUBLIC_METHOD_TYPE,
         PyDoc_STR("Creates a tuple-tree representation of this ST.")},
-
+    {"__sizeof__",      (PyCFunction)parser_sizeof,     METH_NOARGS,
+        PyDoc_STR("Returns size in memory, in bytes.")},
     {NULL, NULL, 0, NULL}
 };
 
@@ -361,6 +363,15 @@
     PyObject_Del(st);
 }
 
+static PyObject *
+parser_sizeof(PyST_Object *st, void *unused)
+{
+    Py_ssize_t res;
+
+    res = sizeof(PyST_Object) + _PyNode_SizeOf(st->st_node);
+    return PyLong_FromSsize_t(res);
+}
+
 
 /*  parser_st2tuple(PyObject* self, PyObject* args, PyObject* kw)
  *
@@ -390,10 +401,14 @@
         int lineno = 0;
         int col_offset = 0;
         if (line_option != NULL) {
-            lineno = (PyObject_IsTrue(line_option) != 0) ? 1 : 0;
+            lineno = PyObject_IsTrue(line_option);
+            if (lineno < 0)
+                return NULL;
         }
         if (col_option != NULL) {
-            col_offset = (PyObject_IsTrue(col_option) != 0) ? 1 : 0;
+            col_offset = PyObject_IsTrue(col_option);
+            if (col_offset < 0)
+                return NULL;
         }
         /*
          *  Convert ST into a tuple representation.  Use Guido's function,
@@ -433,10 +448,14 @@
         int lineno = 0;
         int col_offset = 0;
         if (line_option != 0) {
-            lineno = PyObject_IsTrue(line_option) ? 1 : 0;
+            lineno = PyObject_IsTrue(line_option);
+            if (lineno < 0)
+                return NULL;
         }
-        if (col_option != NULL) {
-            col_offset = (PyObject_IsTrue(col_option) != 0) ? 1 : 0;
+        if (col_option != 0) {
+            col_offset = PyObject_IsTrue(col_option);
+            if (col_offset < 0)
+                return NULL;
         }
         /*
          *  Convert ST into a tuple representation.  Use Guido's function,
@@ -938,6 +957,7 @@
 #define validate_doublestar(ch) validate_terminal(ch, DOUBLESTAR, "**")
 #define validate_dot(ch)        validate_terminal(ch,        DOT, ".")
 #define validate_at(ch)         validate_terminal(ch,         AT, "@")
+#define validate_rarrow(ch)     validate_terminal(ch,     RARROW, "->")
 #define validate_name(ch, str)  validate_terminal(ch,       NAME, str)
 
 #define VALIDATER(n)    static int validate_##n(node *tree)
@@ -953,7 +973,8 @@
 VALIDATER(return_stmt);         VALIDATER(raise_stmt);
 VALIDATER(import_stmt);         VALIDATER(import_stmt);
 VALIDATER(import_name);         VALIDATER(yield_stmt);
-VALIDATER(global_stmt);         VALIDATER(assert_stmt);
+VALIDATER(global_stmt);         VALIDATER(nonlocal_stmt);
+VALIDATER(assert_stmt);
 VALIDATER(compound_stmt);       VALIDATER(test_or_star_expr);
 VALIDATER(while);               VALIDATER(for);
 VALIDATER(try);                 VALIDATER(except_clause);
@@ -1226,68 +1247,68 @@
     return 0;
 }
 
-/* '*' vfpdef (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef
+/* '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef
  * ..or tfpdef in place of vfpdef. vfpdef: NAME; tfpdef: NAME [':' test]
  */
 static int
 validate_varargslist_trailer(node *tree, int start)
 {
     int nch = NCH(tree);
-    int res = 0, i;
-    int sym;
+    int res = 0;
 
     if (nch <= start) {
         err_string("expected variable argument trailer for varargslist");
         return 0;
     }
-    sym = TYPE(CHILD(tree, start));
-    if (sym == STAR) {
+    if (TYPE(CHILD(tree, start)) == STAR) {
         /*
-         * '*' vfpdef (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef
+         * '*' [vfpdef]
          */
-        if (nch-start == 2)
-            res = validate_vfpdef(CHILD(tree, start+1));
-        else if (nch-start == 5 && TYPE(CHILD(tree, start+2)) == COMMA)
-            res = (validate_vfpdef(CHILD(tree, start+1))
-                   && validate_comma(CHILD(tree, start+2))
-                   && validate_doublestar(CHILD(tree, start+3))
-                   && validate_vfpdef(CHILD(tree, start+4)));
+        res = validate_star(CHILD(tree, start++));
+        if (res && start < nch && (TYPE(CHILD(tree, start)) == vfpdef ||
+                                   TYPE(CHILD(tree, start)) == tfpdef))
+            res = validate_vfpdef(CHILD(tree, start++));
+        /*
+         * (',' vfpdef ['=' test])*
+         */
+        while (res && start + 1 < nch && (
+                   TYPE(CHILD(tree, start + 1)) == vfpdef ||
+                   TYPE(CHILD(tree, start + 1)) == tfpdef)) {
+            res = (validate_comma(CHILD(tree, start++))
+                   && validate_vfpdef(CHILD(tree, start++)));
+            if (res && start + 1 < nch && TYPE(CHILD(tree, start)) == EQUAL)
+                res = (validate_equal(CHILD(tree, start++))
+                       && validate_test(CHILD(tree, start++)));
+        }
+        /*
+         * [',' '**' vfpdef]
+         */
+        if (res && start + 2 < nch && TYPE(CHILD(tree, start+1)) == DOUBLESTAR)
+            res = (validate_comma(CHILD(tree, start++))
+                   && validate_doublestar(CHILD(tree, start++))
+                   && validate_vfpdef(CHILD(tree, start++)));
+    }
+    else if (TYPE(CHILD(tree, start)) == DOUBLESTAR) {
+        /*
+         * '**' vfpdef
+         */
+        if (start + 1 < nch)
+            res = (validate_doublestar(CHILD(tree, start++))
+                   && validate_vfpdef(CHILD(tree, start++)));
         else {
-            /* skip over vfpdef (',' vfpdef ['=' test])*  */
-            i = start + 1;
-            if (TYPE(CHILD(tree, i)) == vfpdef ||
-                TYPE(CHILD(tree, i)) == tfpdef) { /* skip over vfpdef or tfpdef */
-                i += 1;
-            }
-            while (res && i+1 < nch) { /* validate  (',' vfpdef ['=' test])* */
-                res = validate_comma(CHILD(tree, i));
-                if (TYPE(CHILD(tree, i+1)) == DOUBLESTAR)
-                    break;
-                res = res && validate_vfpdef(CHILD(tree, i+1));
-                if (res && i+2 < nch && TYPE(CHILD(tree, i+2)) == EQUAL) {
-                    res = res && (i+3 < nch)
-                          && validate_test(CHILD(tree, i+3));
-                    i += 4;
-                }
-                else {
-                    i += 2;
-                }
-            }
-            /* [',' '**' vfpdef] */
-            if (res && i+1 < nch && TYPE(CHILD(tree, i+1)) == DOUBLESTAR) {
-                res = validate_vfpdef(CHILD(tree, i+2));
-            }
+            res = 0;
+            err_string("expected vfpdef after ** in varargslist trailer");
         }
     }
-    else if (sym == DOUBLESTAR) {
-        /*
-         *  '**' NAME
-         */
-        if (nch-start == 2)
-            res = validate_vfpdef(CHILD(tree, start+1));
+    else {
+        res = 0;
+        err_string("expected * or ** in varargslist trailer");
     }
-    if (!res)
-        err_string("illegal variable argument trailer for varargslist");
+
+    if (res && start != nch) {
+        res = 0;
+        err_string("unexpected extra children in varargslist trailer");
+    }
     return res;
 }
 
@@ -1476,6 +1497,7 @@
               || (ntype == flow_stmt)
               || (ntype == import_stmt)
               || (ntype == global_stmt)
+              || (ntype == nonlocal_stmt)
               || (ntype == assert_stmt))
             res = validate_node(CHILD(tree, 0));
         else {
@@ -1605,31 +1627,30 @@
 }
 
 
+/*
+ *  raise_stmt:
+ *
+ *  'raise' [test ['from' test]]
+ */
 static int
 validate_raise_stmt(node *tree)
 {
     int nch = NCH(tree);
     int res = (validate_ntype(tree, raise_stmt)
-               && ((nch == 1) || (nch == 2) || (nch == 4) || (nch == 6)));
+               && ((nch == 1) || (nch == 2) || (nch == 4)));
+
+    if (!res && !PyErr_Occurred())
+        (void) validate_numnodes(tree, 2, "raise");
 
     if (res) {
         res = validate_name(CHILD(tree, 0), "raise");
         if (res && (nch >= 2))
             res = validate_test(CHILD(tree, 1));
-        if (res && nch > 2) {
-            res = (validate_comma(CHILD(tree, 2))
+        if (res && (nch == 4)) {
+            res = (validate_name(CHILD(tree, 2), "from")
                    && validate_test(CHILD(tree, 3)));
-            if (res && (nch > 4))
-                res = (validate_comma(CHILD(tree, 4))
-                       && validate_test(CHILD(tree, 5)));
         }
     }
-    else
-        (void) validate_numnodes(tree, 2, "raise");
-    if (res && (nch == 4))
-        res = (validate_comma(CHILD(tree, 2))
-               && validate_test(CHILD(tree, 3)));
-
     return (res);
 }
 
@@ -1833,8 +1854,10 @@
 }
 
 
-
-
+/*  global_stmt:
+ *
+ *  'global' NAME (',' NAME)*
+ */
 static int
 validate_global_stmt(node *tree)
 {
@@ -1856,6 +1879,30 @@
     return (res);
 }
 
+/*  nonlocal_stmt:
+ *
+ *  'nonlocal' NAME (',' NAME)*
+ */
+static int
+validate_nonlocal_stmt(node *tree)
+{
+    int j;
+    int nch = NCH(tree);
+    int res = (validate_ntype(tree, nonlocal_stmt)
+               && is_even(nch) && (nch >= 2));
+
+    if (!res && !PyErr_Occurred())
+        err_string("illegal nonlocal statement");
+
+    if (res)
+        res = (validate_name(CHILD(tree, 0), "nonlocal")
+               && validate_ntype(CHILD(tree, 1), NAME));
+    for (j = 2; res && (j < nch); j += 2)
+        res = (validate_comma(CHILD(tree, j))
+               && validate_ntype(CHILD(tree, j + 1), NAME));
+
+    return res;
+}
 
 /*  assert_stmt:
  *
@@ -2361,17 +2408,13 @@
             break;
           case NAME:
           case NUMBER:
+          case ELLIPSIS:
             res = (nch == 1);
             break;
           case STRING:
             for (pos = 1; res && (pos < nch); ++pos)
                 res = validate_ntype(CHILD(tree, pos), STRING);
             break;
-          case DOT:
-            res = (nch == 3 &&
-                   validate_ntype(CHILD(tree, 1), DOT) &&
-                   validate_ntype(CHILD(tree, 2), DOT));
-            break;
           default:
             res = 0;
             break;
@@ -2495,23 +2538,36 @@
     return ok;
 }
 
-/*  funcdef:
- *
- *     -5   -4         -3  -2    -1
- *  'def' NAME parameters ':' suite
- */
+/* funcdef: 'def' NAME parameters ['->' test] ':' suite */
+
 static int
 validate_funcdef(node *tree)
 {
     int nch = NCH(tree);
-    int ok = (validate_ntype(tree, funcdef)
-               && (nch == 5)
-               && validate_name(RCHILD(tree, -5), "def")
-               && validate_ntype(RCHILD(tree, -4), NAME)
-               && validate_colon(RCHILD(tree, -2))
-               && validate_parameters(RCHILD(tree, -3))
-               && validate_suite(RCHILD(tree, -1)));
-    return ok;
+    int res = validate_ntype(tree, funcdef);
+    if (res) {
+        if (nch == 5) {
+            res = (validate_name(CHILD(tree, 0), "def")
+                   && validate_ntype(CHILD(tree, 1), NAME)
+                   && validate_parameters(CHILD(tree, 2))
+                   && validate_colon(CHILD(tree, 3))
+                   && validate_suite(CHILD(tree, 4)));
+        }
+        else if (nch == 7) {
+            res = (validate_name(CHILD(tree, 0), "def")
+                   && validate_ntype(CHILD(tree, 1), NAME)
+                   && validate_parameters(CHILD(tree, 2))
+                   && validate_rarrow(CHILD(tree, 3))
+                   && validate_test(CHILD(tree, 4))
+                   && validate_colon(CHILD(tree, 5))
+                   && validate_suite(CHILD(tree, 6)));
+        }
+        else {
+            res = 0;
+            err_string("illegal number of children for funcdef");
+        }
+    }
+    return res;
 }
 
 
@@ -2824,34 +2880,92 @@
                                     validate_expr_or_star_expr, "exprlist"));
 }
 
-
+/*
+ *  dictorsetmaker:
+ *
+ *  (test ':' test (comp_for | (',' test ':' test)* [','])) |
+ *  (test (comp_for | (',' test)* [',']))
+ */
 static int
 validate_dictorsetmaker(node *tree)
 {
     int nch = NCH(tree);
-    int res = (validate_ntype(tree, dictorsetmaker)
-               && (nch >= 3)
-               && validate_test(CHILD(tree, 0))
-               && validate_colon(CHILD(tree, 1))
-               && validate_test(CHILD(tree, 2)));
-
-    if (res && ((nch % 4) == 0))
-        res = validate_comma(CHILD(tree, --nch));
-    else if (res)
-        res = ((nch % 4) == 3);
-
-    if (res && (nch > 3)) {
-        int pos = 3;
-        /*  ( ',' test ':' test )*  */
-        while (res && (pos < nch)) {
-            res = (validate_comma(CHILD(tree, pos))
-                   && validate_test(CHILD(tree, pos + 1))
-                   && validate_colon(CHILD(tree, pos + 2))
-                   && validate_test(CHILD(tree, pos + 3)));
-            pos += 4;
+    int res;
+    int i = 0;
+
+    res = validate_ntype(tree, dictorsetmaker);
+    if (!res)
+        return 0;
+
+    if (nch - i < 1) {
+        (void) validate_numnodes(tree, 1, "dictorsetmaker");
+        return 0;
+    }
+
+    res = validate_test(CHILD(tree, i++));
+    if (!res)
+        return 0;
+
+    if (nch - i >= 2 && TYPE(CHILD(tree, i)) == COLON) {
+        /* Dictionary display or dictionary comprehension. */
+        res = (validate_colon(CHILD(tree, i++))
+               && validate_test(CHILD(tree, i++)));
+        if (!res)
+            return 0;
+
+        if (nch - i >= 1 && TYPE(CHILD(tree, i)) == comp_for) {
+            /* Dictionary comprehension. */
+            res = validate_comp_for(CHILD(tree, i++));
+            if (!res)
+                return 0;
+        }
+        else {
+            /* Dictionary display. */
+            while (nch - i >= 4) {
+                res = (validate_comma(CHILD(tree, i++))
+                       && validate_test(CHILD(tree, i++))
+                       && validate_colon(CHILD(tree, i++))
+                       && validate_test(CHILD(tree, i++)));
+                if (!res)
+                    return 0;
+            }
+            if (nch - i == 1) {
+                res = validate_comma(CHILD(tree, i++));
+                if (!res)
+                    return 0;
+            }
         }
     }
-    return (res);
+    else {
+        /* Set display or set comprehension. */
+        if (nch - i >= 1 && TYPE(CHILD(tree, i)) == comp_for) {
+            /* Set comprehension. */
+            res = validate_comp_for(CHILD(tree, i++));
+            if (!res)
+                return 0;
+        }
+        else {
+            /* Set display. */
+            while (nch - i >= 2) {
+                res = (validate_comma(CHILD(tree, i++))
+                       && validate_test(CHILD(tree, i++)));
+                if (!res)
+                    return 0;
+            }
+            if (nch - i == 1) {
+                res = validate_comma(CHILD(tree, i++));
+                if (!res)
+                    return 0;
+            }
+        }
+    }
+
+    if (nch - i > 0) {
+        err_string("Illegal trailing nodes for dictorsetmaker.");
+        return 0;
+    }
+
+    return 1;
 }
 
 
@@ -2907,8 +3021,8 @@
             break;
           case small_stmt:
             /*
-             *  expr_stmt | del_stmt | pass_stmt | flow_stmt
-             *  | import_stmt | global_stmt | assert_stmt
+             *  expr_stmt | del_stmt | pass_stmt | flow_stmt |
+             *  import_stmt | global_stmt | nonlocal_stmt | assert_stmt
              */
             res = validate_small_stmt(tree);
             break;
@@ -2975,6 +3089,9 @@
           case global_stmt:
             res = validate_global_stmt(tree);
             break;
+          case nonlocal_stmt:
+            res = validate_nonlocal_stmt(tree);
+            break;
           case assert_stmt:
             res = validate_assert_stmt(tree);
             break;
diff -r 3d0686d90f55 Modules/pyexpat.c
--- a/Modules/pyexpat.c
+++ b/Modules/pyexpat.c
@@ -1033,13 +1033,16 @@
 xmlparse_UseForeignDTD(xmlparseobject *self, PyObject *args)
 {
     PyObject *flagobj = NULL;
-    XML_Bool flag = XML_TRUE;
+    int flag = 1;
     enum XML_Error rc;
-    if (!PyArg_UnpackTuple(args, "UseForeignDTD", 0, 1, &flagobj))
+    if (!PyArg_ParseTuple(args, "O:UseForeignDTD", &flagobj))
         return NULL;
-    if (flagobj != NULL)
-        flag = PyObject_IsTrue(flagobj) ? XML_TRUE : XML_FALSE;
-    rc = XML_UseForeignDTD(self->itself, flag);
+    if (flagobj != NULL) {
+        flag = PyObject_IsTrue(flagobj);
+        if (flag < 0)
+            return NULL;
+    }
+    rc = XML_UseForeignDTD(self->itself, flag ? XML_TRUE : XML_FALSE);
     if (rc != XML_ERROR_NONE) {
         return set_error(self, rc);
     }
@@ -1397,7 +1400,10 @@
     }
     assert(PyUnicode_Check(name));
     if (PyUnicode_CompareWithASCIIString(name, "buffer_text") == 0) {
-        if (PyObject_IsTrue(v)) {
+        int b = PyObject_IsTrue(v);
+        if (b < 0)
+            return -1;
+        if (b) {
             if (self->buffer == NULL) {
                 self->buffer = malloc(self->buffer_size);
                 if (self->buffer == NULL) {
@@ -1416,25 +1422,25 @@
         return 0;
     }
     if (PyUnicode_CompareWithASCIIString(name, "namespace_prefixes") == 0) {
-        if (PyObject_IsTrue(v))
-            self->ns_prefixes = 1;
-        else
-            self->ns_prefixes = 0;
+        int b = PyObject_IsTrue(v);
+        if (b < 0)
+            return -1;
+        self->ns_prefixes = b;
         XML_SetReturnNSTriplet(self->itself, self->ns_prefixes);
         return 0;
     }
     if (PyUnicode_CompareWithASCIIString(name, "ordered_attributes") == 0) {
-        if (PyObject_IsTrue(v))
-            self->ordered_attributes = 1;
-        else
-            self->ordered_attributes = 0;
+        int b = PyObject_IsTrue(v);
+        if (b < 0)
+            return -1;
+        self->ordered_attributes = b;
         return 0;
     }
     if (PyUnicode_CompareWithASCIIString(name, "specified_attributes") == 0) {
-        if (PyObject_IsTrue(v))
-            self->specified_attributes = 1;
-        else
-            self->specified_attributes = 0;
+        int b = PyObject_IsTrue(v);
+        if (b < 0)
+            return -1;
+        self->specified_attributes = b;
         return 0;
     }
 
diff -r 3d0686d90f55 Modules/python.c
--- a/Modules/python.c
+++ b/Modules/python.c
@@ -22,9 +22,9 @@
 int
 main(int argc, char **argv)
 {
-    wchar_t **argv_copy = (wchar_t **)PyMem_Malloc(sizeof(wchar_t*)*argc);
+    wchar_t **argv_copy = (wchar_t **)PyMem_Malloc(sizeof(wchar_t*)*(argc+1));
     /* We need a second copies, as Python might modify the first one. */
-    wchar_t **argv_copy2 = (wchar_t **)PyMem_Malloc(sizeof(wchar_t*)*argc);
+    wchar_t **argv_copy2 = (wchar_t **)PyMem_Malloc(sizeof(wchar_t*)*(argc+1));
     int i, res;
     char *oldloc;
     /* 754 requires that FP exceptions run in "no stop" mode by default,
@@ -58,6 +58,8 @@
         }
         argv_copy2[i] = argv_copy[i];
     }
+    argv_copy2[argc] = argv_copy[argc] = NULL;
+
     setlocale(LC_ALL, oldloc);
     free(oldloc);
     res = Py_Main(argc, argv_copy);
diff -r 3d0686d90f55 Modules/selectmodule.c
--- a/Modules/selectmodule.c
+++ b/Modules/selectmodule.c
@@ -101,7 +101,7 @@
 
         /* any intervening fileno() calls could decr this refcnt */
         if (!(o = PySequence_Fast_GET_ITEM(fast_seq, i)))
-            return -1;
+            goto finally;
 
         Py_INCREF(o);
         v = PyObject_AsFileDescriptor( o );
@@ -421,6 +421,7 @@
     if (PyDict_GetItem(self->dict, key) == NULL) {
         errno = ENOENT;
         PyErr_SetFromErrno(PyExc_IOError);
+        Py_DECREF(key);
         return NULL;
     }
     value = PyLong_FromLong(events);
diff -r 3d0686d90f55 Modules/socketmodule.c
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -804,7 +804,7 @@
 /* Lock to allow python interpreter to continue, but only allow one
    thread to be in gethostbyname or getaddrinfo */
 #if defined(USE_GETHOSTBYNAME_LOCK) || defined(USE_GETADDRINFO_LOCK)
-PyThread_type_lock netdb_lock;
+static PyThread_type_lock netdb_lock;
 #endif
 
 
@@ -1347,7 +1347,7 @@
                 "getsockaddrarg: port must be 0-65535.");
             return 0;
         }
-        if (flowinfo < 0 || flowinfo > 0xfffff) {
+        if (flowinfo > 0xfffff) {
             PyErr_SetString(
                 PyExc_OverflowError,
                 "getsockaddrarg: flowinfo must be 0-1048575.");
@@ -4129,7 +4129,7 @@
     if (!PyArg_ParseTuple(sa, "si|II",
                           &hostp, &port, &flowinfo, &scope_id))
         return NULL;
-    if (flowinfo < 0 || flowinfo > 0xfffff) {
+    if (flowinfo > 0xfffff) {
         PyErr_SetString(PyExc_OverflowError,
                         "getsockaddrarg: flowinfo must be 0-1048575.");
         return NULL;
diff -r 3d0686d90f55 Modules/sre.h
--- a/Modules/sre.h
+++ b/Modules/sre.h
@@ -31,6 +31,7 @@
     int flags; /* flags used when compiling pattern source */
     PyObject *weakreflist; /* List of weak references */
 	int charsize; /* pattern charsize (or -1) */
+    Py_buffer view;
     /* pattern code */
     Py_ssize_t codesize;
     SRE_CODE code[1];
@@ -80,6 +81,7 @@
     char* data_stack;
     size_t data_stack_size;
     size_t data_stack_base;
+    Py_buffer buffer;
     /* current repeat context */
     SRE_REPEAT *repeat;
     /* hooks */
diff -r 3d0686d90f55 Modules/timemodule.c
--- a/Modules/timemodule.c
+++ b/Modules/timemodule.c
@@ -504,7 +504,7 @@
     fmt = PyBytes_AS_STRING(format);
 #endif
 
-#if defined(MS_WINDOWS)
+#if defined(MS_WINDOWS) && !defined(HAVE_WCSFTIME)
     /* check that the format string contains only valid directives */
     for(outbuf = strchr(fmt, '%');
         outbuf != NULL;
@@ -516,7 +516,8 @@
             !strchr("aAbBcdHIjmMpSUwWxXyYzZ%", outbuf[1]))
         {
             PyErr_SetString(PyExc_ValueError, "Invalid format string");
-            return 0;
+            Py_DECREF(format);
+            return NULL;
         }
     }
 #endif
diff -r 3d0686d90f55 Objects/abstract.c
--- a/Objects/abstract.c
+++ b/Objects/abstract.c
@@ -2728,6 +2728,13 @@
     if (argc == -1)
         return NULL;
 
+    assert(argc >= 0);
+
+    if ((size_t)argc > (PY_SSIZE_T_MAX-sizeof(char *)) / sizeof(char *)) {
+        PyErr_NoMemory();
+        return NULL;
+    }
+
     array = malloc((argc + 1) * sizeof(char *));
     if (array == NULL) {
         PyErr_NoMemory();
@@ -2736,6 +2743,11 @@
     for (i = 0; i < argc; ++i) {
         char *data;
         item = PySequence_GetItem(self, i);
+        if (item == NULL) {
+            /* NULL terminate before freeing. */
+            array[i] = NULL;
+            goto fail;
+        }
         data = PyBytes_AsString(item);
         if (data == NULL) {
             /* NULL terminate before freeing. */
diff -r 3d0686d90f55 Objects/accu.c
--- a/Objects/accu.c
+++ b/Objects/accu.c
@@ -1,6 +1,7 @@
 /* Accumulator struct implementation */
 
 #include "Python.h"
+#include "accu.h"
 
 static PyObject *
 join_list_unicode(PyObject *lst)
diff -r 3d0686d90f55 Objects/bytearrayobject.c
--- a/Objects/bytearrayobject.c
+++ b/Objects/bytearrayobject.c
@@ -2234,8 +2234,10 @@
     }
 
     bytearray_obj = PyByteArray_FromStringAndSize(NULL, buf_size);
-    if (bytearray_obj == NULL)
+    if (bytearray_obj == NULL) {
+        Py_DECREF(it);
         return NULL;
+    }
     buf = PyByteArray_AS_STRING(bytearray_obj);
 
     while ((item = PyIter_Next(it)) != NULL) {
@@ -2268,8 +2270,10 @@
         return NULL;
     }
 
-    if (bytearray_setslice(self, Py_SIZE(self), Py_SIZE(self), bytearray_obj) == -1)
+    if (bytearray_setslice(self, Py_SIZE(self), Py_SIZE(self), bytearray_obj) == -1) {
+        Py_DECREF(bytearray_obj);
         return NULL;
+    }
     Py_DECREF(bytearray_obj);
 
     Py_RETURN_NONE;
diff -r 3d0686d90f55 Objects/bytesobject.c
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -875,7 +875,9 @@
     register unsigned char *p;
     register Py_hash_t x;
 
+#ifdef Py_DEBUG
     assert(_Py_HashSecret_Initialized);
+#endif
     if (a->ob_shash != -1)
         return a->ob_shash;
     len = Py_SIZE(a);
diff -r 3d0686d90f55 Objects/classobject.c
--- a/Objects/classobject.c
+++ b/Objects/classobject.c
@@ -243,8 +243,10 @@
     else {
         klassname = PyObject_GetAttrString(klass, "__name__");
         if (klassname == NULL) {
-            if (!PyErr_ExceptionMatches(PyExc_AttributeError))
+            if (!PyErr_ExceptionMatches(PyExc_AttributeError)) {
+                Py_XDECREF(funcname);
                 return NULL;
+            }
             PyErr_Clear();
         }
         else if (!PyUnicode_Check(klassname)) {
diff -r 3d0686d90f55 Objects/descrobject.c
--- a/Objects/descrobject.c
+++ b/Objects/descrobject.c
@@ -256,14 +256,52 @@
 classmethoddescr_call(PyMethodDescrObject *descr, PyObject *args,
                       PyObject *kwds)
 {
-    PyObject *func, *result;
+    Py_ssize_t argc;
+    PyObject *self, *func, *result;
 
-    func = PyCFunction_New(descr->d_method, (PyObject *)PyDescr_TYPE(descr));
+    /* Make sure that the first argument is acceptable as 'self' */
+    assert(PyTuple_Check(args));
+    argc = PyTuple_GET_SIZE(args);
+    if (argc < 1) {
+        PyErr_Format(PyExc_TypeError,
+                     "descriptor '%V' of '%.100s' "
+                     "object needs an argument",
+                     descr_name((PyDescrObject *)descr), "?",
+                     PyDescr_TYPE(descr)->tp_name);
+        return NULL;
+    }
+    self = PyTuple_GET_ITEM(args, 0);
+    if (!PyType_Check(self)) {
+        PyErr_Format(PyExc_TypeError,
+                     "descriptor '%V' requires a type "
+                     "but received a '%.100s'",
+                     descr_name((PyDescrObject *)descr), "?",
+                     PyDescr_TYPE(descr)->tp_name,
+                     self->ob_type->tp_name);
+        return NULL;
+    }
+    if (!PyType_IsSubtype((PyTypeObject *)self, PyDescr_TYPE(descr))) {
+        PyErr_Format(PyExc_TypeError,
+                     "descriptor '%V' "
+                     "requires a subtype of '%.100s' "
+                     "but received '%.100s",
+                     descr_name((PyDescrObject *)descr), "?",
+                     PyDescr_TYPE(descr)->tp_name,
+                     self->ob_type->tp_name);
+        return NULL;
+    }
+
+    func = PyCFunction_New(descr->d_method, self);
     if (func == NULL)
         return NULL;
-
+    args = PyTuple_GetSlice(args, 1, argc);
+    if (args == NULL) {
+        Py_DECREF(func);
+        return NULL;
+    }
     result = PyEval_CallObjectWithKeywords(func, args, kwds);
     Py_DECREF(func);
+    Py_DECREF(args);
     return result;
 }
 
diff -r 3d0686d90f55 Objects/dictobject.c
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -510,27 +510,16 @@
     _PyObject_GC_UNTRACK(op);
 }
 
-
 /*
-Internal routine to insert a new item into the table.
-Used both by the internal resize routine and by the public insert routine.
-Eats a reference to key and one to value.
-Returns -1 if an error occurred, or 0 on success.
+Internal routine to insert a new item into the table when you have entry object.
+Used by insertdict.
 */
 static int
-insertdict(register PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value)
+insertdict_by_entry(register PyDictObject *mp, PyObject *key, Py_hash_t hash,
+                    PyDictEntry *ep, PyObject *value)
 {
     PyObject *old_value;
-    register PyDictEntry *ep;
-    typedef PyDictEntry *(*lookupfunc)(PyDictObject *, PyObject *, Py_hash_t);
 
-    assert(mp->ma_lookup != NULL);
-    ep = mp->ma_lookup(mp, key, hash);
-    if (ep == NULL) {
-        Py_DECREF(key);
-        Py_DECREF(value);
-        return -1;
-    }
     MAINTAIN_TRACKING(mp, key, value);
     if (ep->me_value != NULL) {
         old_value = ep->me_value;
@@ -553,6 +542,28 @@
     return 0;
 }
 
+
+/*
+Internal routine to insert a new item into the table.
+Used both by the internal resize routine and by the public insert routine.
+Eats a reference to key and one to value.
+Returns -1 if an error occurred, or 0 on success.
+*/
+static int
+insertdict(register PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value)
+{
+    register PyDictEntry *ep;
+
+    assert(mp->ma_lookup != NULL);
+    ep = mp->ma_lookup(mp, key, hash);
+    if (ep == NULL) {
+        Py_DECREF(key);
+        Py_DECREF(value);
+        return -1;
+    }
+    return insertdict_by_entry(mp, key, hash, ep, value);
+}
+
 /*
 Internal routine used by dictresize() to insert an item which is
 known to be absent from the dict.  This routine also assumes that
@@ -776,39 +787,26 @@
     return ep->me_value;
 }
 
-/* CAUTION: PyDict_SetItem() must guarantee that it won't resize the
- * dictionary if it's merely replacing the value for an existing key.
- * This means that it's safe to loop over a dictionary with PyDict_Next()
- * and occasionally replace a value -- but you can't insert new keys or
- * remove them.
- */
-int
-PyDict_SetItem(register PyObject *op, PyObject *key, PyObject *value)
+static int
+dict_set_item_by_hash_or_entry(register PyObject *op, PyObject *key,
+                               Py_hash_t hash, PyDictEntry *ep, PyObject *value)
 {
     register PyDictObject *mp;
-    register Py_hash_t hash;
     register Py_ssize_t n_used;
 
-    if (!PyDict_Check(op)) {
-        PyErr_BadInternalCall();
-        return -1;
-    }
-    assert(key);
-    assert(value);
     mp = (PyDictObject *)op;
-    if (!PyUnicode_CheckExact(key) ||
-        (hash = ((PyUnicodeObject *) key)->hash) == -1)
-    {
-        hash = PyObject_Hash(key);
-        if (hash == -1)
-            return -1;
-    }
     assert(mp->ma_fill <= mp->ma_mask);  /* at least one empty slot */
     n_used = mp->ma_used;
     Py_INCREF(value);
     Py_INCREF(key);
-    if (insertdict(mp, key, hash, value) != 0)
-        return -1;
+    if (ep == NULL) {
+        if (insertdict(mp, key, hash, value) != 0)
+            return -1;
+    }
+    else {
+        if (insertdict_by_entry(mp, key, hash, ep, value) != 0)
+            return -1;
+    }
     /* If we added a key, we can safely resize.  Otherwise just return!
      * If fill >= 2/3 size, adjust size.  Normally, this doubles or
      * quaduples the size, but it's also possible for the dict to shrink
@@ -828,6 +826,36 @@
     return dictresize(mp, (mp->ma_used > 50000 ? 2 : 4) * mp->ma_used);
 }
 
+/* CAUTION: PyDict_SetItem() must guarantee that it won't resize the
+ * dictionary if it's merely replacing the value for an existing key.
+ * This means that it's safe to loop over a dictionary with PyDict_Next()
+ * and occasionally replace a value -- but you can't insert new keys or
+ * remove them.
+ */
+int
+PyDict_SetItem(register PyObject *op, PyObject *key, PyObject *value)
+{
+    register Py_hash_t hash;
+
+    if (!PyDict_Check(op)) {
+        PyErr_BadInternalCall();
+        return -1;
+    }
+    assert(key);
+    assert(value);
+    if (PyUnicode_CheckExact(key)) {
+        hash = ((PyUnicodeObject *) key)->hash;
+        if (hash == -1)
+            hash = PyObject_Hash(key);
+    }
+    else {
+        hash = PyObject_Hash(key);
+        if (hash == -1)
+            return -1;
+    }
+    return dict_set_item_by_hash_or_entry(op, key, hash, NULL, value);
+}
+
 int
 PyDict_DelItem(PyObject *op, PyObject *key)
 {
@@ -1797,9 +1825,9 @@
         return NULL;
     val = ep->me_value;
     if (val == NULL) {
-        val = failobj;
-        if (PyDict_SetItem((PyObject*)mp, key, failobj))
-            val = NULL;
+        if (dict_set_item_by_hash_or_entry((PyObject*)mp, key, hash, ep,
+                                           failobj) == 0)
+            val = failobj;
     }
     Py_XINCREF(val);
     return val;
diff -r 3d0686d90f55 Objects/frameobject.c
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -199,6 +199,7 @@
         case SETUP_LOOP:
         case SETUP_EXCEPT:
         case SETUP_FINALLY:
+        case SETUP_WITH:
             blockstack[blockstack_top++] = addr;
             in_finally[blockstack_top-1] = 0;
             break;
@@ -206,7 +207,7 @@
         case POP_BLOCK:
             assert(blockstack_top > 0);
             setup_op = code[blockstack[blockstack_top-1]];
-            if (setup_op == SETUP_FINALLY) {
+            if (setup_op == SETUP_FINALLY || setup_op == SETUP_WITH) {
                 in_finally[blockstack_top-1] = 1;
             }
             else {
@@ -221,7 +222,7 @@
              * be seeing such an END_FINALLY.) */
             if (blockstack_top > 0) {
                 setup_op = code[blockstack[blockstack_top-1]];
-                if (setup_op == SETUP_FINALLY) {
+                if (setup_op == SETUP_FINALLY || setup_op == SETUP_WITH) {
                     blockstack_top--;
                 }
             }
@@ -283,6 +284,7 @@
         case SETUP_LOOP:
         case SETUP_EXCEPT:
         case SETUP_FINALLY:
+        case SETUP_WITH:
             delta_iblock++;
             break;
 
diff -r 3d0686d90f55 Objects/genobject.c
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -129,7 +129,7 @@
 }
 
 PyDoc_STRVAR(close_doc,
-"close(arg) -> raise GeneratorExit inside generator.");
+"close() -> raise GeneratorExit inside generator.");
 
 static PyObject *
 gen_close(PyGenObject *gen, PyObject *args)
diff -r 3d0686d90f55 Objects/listobject.c
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -1,6 +1,7 @@
 /* List object implementation */
 
 #include "Python.h"
+#include "accu.h"
 
 #ifdef STDC_HEADERS
 #include <stddef.h>
diff -r 3d0686d90f55 Objects/longobject.c
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -156,9 +156,7 @@
     if (i < 0)
         i = -(i);
     if (i < 2) {
-        sdigit ival = src->ob_digit[0];
-        if (Py_SIZE(src) < 0)
-            ival = -ival;
+        sdigit ival = MEDIUM_VALUE(src);
         CHECK_SMALL_INT(ival);
     }
     result = _PyLong_New(i);
diff -r 3d0686d90f55 Objects/moduleobject.c
--- a/Objects/moduleobject.c
+++ b/Objects/moduleobject.c
@@ -117,8 +117,10 @@
     d = PyModule_GetDict((PyObject*)m);
     if (module->m_methods != NULL) {
         n = PyUnicode_FromString(name);
-        if (n == NULL)
+        if (n == NULL) {
+            Py_DECREF(m);
             return NULL;
+        }
         for (ml = module->m_methods; ml->ml_name != NULL; ml++) {
             if ((ml->ml_flags & METH_CLASS) ||
                 (ml->ml_flags & METH_STATIC)) {
@@ -126,16 +128,19 @@
                                 "module functions cannot set"
                                 " METH_CLASS or METH_STATIC");
                 Py_DECREF(n);
+                Py_DECREF(m);
                 return NULL;
             }
             v = PyCFunction_NewEx(ml, (PyObject*)m, n);
             if (v == NULL) {
                 Py_DECREF(n);
+                Py_DECREF(m);
                 return NULL;
             }
             if (PyDict_SetItemString(d, ml->ml_name, v) != 0) {
                 Py_DECREF(v);
                 Py_DECREF(n);
+                Py_DECREF(m);
                 return NULL;
             }
             Py_DECREF(v);
@@ -146,6 +151,7 @@
         v = PyUnicode_FromString(module->m_doc);
         if (v == NULL || PyDict_SetItemString(d, "__doc__", v) != 0) {
             Py_XDECREF(v);
+            Py_DECREF(m);
             return NULL;
         }
         Py_DECREF(v);
diff -r 3d0686d90f55 Objects/rangeobject.c
--- a/Objects/rangeobject.c
+++ b/Objects/rangeobject.c
@@ -307,7 +307,7 @@
 static PyObject *
 range_item(rangeobject *r, Py_ssize_t i)
 {
-    PyObject *res, *arg = PyLong_FromLong(i);
+    PyObject *res, *arg = PyLong_FromSsize_t(i);
     if (!arg) {
         return NULL;
     }
diff -r 3d0686d90f55 Objects/structseq.c
--- a/Objects/structseq.c
+++ b/Objects/structseq.c
@@ -383,6 +383,8 @@
 PyStructSequence_NewType(PyStructSequence_Desc *desc)
 {
     PyTypeObject *result = (PyTypeObject*)PyType_GenericAlloc(&PyType_Type, 0);
-    PyStructSequence_InitType(result, desc);
+    if (result != NULL) {
+        PyStructSequence_InitType(result, desc);
+    }
     return result;
 }
diff -r 3d0686d90f55 Objects/tupleobject.c
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -2,6 +2,7 @@
 /* Tuple object implementation */
 
 #include "Python.h"
+#include "accu.h"
 
 /* Speed optimization to avoid frequent malloc/free of small tuples */
 #ifndef PyTuple_MAXSAVESIZE
diff -r 3d0686d90f55 Objects/typeobject.c
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -340,11 +340,15 @@
        abc.ABCMeta.__new__, so this function doesn't do anything
        special to update subclasses.
     */
-    int res;
+    int abstract, res;
     if (value != NULL) {
+        abstract = PyObject_IsTrue(value);
+        if (abstract < 0)
+            return -1;
         res = PyDict_SetItemString(type->tp_dict, "__abstractmethods__", value);
     }
     else {
+        abstract = 0;
         res = PyDict_DelItemString(type->tp_dict, "__abstractmethods__");
         if (res && PyErr_ExceptionMatches(PyExc_KeyError)) {
             PyErr_SetString(PyExc_AttributeError, "__abstractmethods__");
@@ -353,12 +357,10 @@
     }
     if (res == 0) {
         PyType_Modified(type);
-        if (value && PyObject_IsTrue(value)) {
+        if (abstract)
             type->tp_flags |= Py_TPFLAGS_IS_ABSTRACT;
-        }
-        else {
+        else
             type->tp_flags &= ~Py_TPFLAGS_IS_ABSTRACT;
-        }
     }
     return res;
 }
@@ -458,26 +460,23 @@
     for (i = 0; i < PyTuple_GET_SIZE(value); i++) {
         ob = PyTuple_GET_ITEM(value, i);
         if (!PyType_Check(ob)) {
-            PyErr_Format(
-                PyExc_TypeError,
-    "%s.__bases__ must be tuple of old- or new-style classes, not '%s'",
-                            type->tp_name, Py_TYPE(ob)->tp_name);
-                    return -1;
+            PyErr_Format(PyExc_TypeError,
+                         "%s.__bases__ must be tuple of old- or "
+                         "new-style classes, not '%s'",
+                         type->tp_name, Py_TYPE(ob)->tp_name);
+            return -1;
         }
-        if (PyType_Check(ob)) {
-            if (PyType_IsSubtype((PyTypeObject*)ob, type)) {
-                PyErr_SetString(PyExc_TypeError,
-            "a __bases__ item causes an inheritance cycle");
-                return -1;
-            }
+        if (PyType_IsSubtype((PyTypeObject*)ob, type)) {
+            PyErr_SetString(PyExc_TypeError,
+                            "a __bases__ item causes an inheritance cycle");
+            return -1;
         }
     }
 
     new_base = best_base(value);
 
-    if (!new_base) {
+    if (!new_base)
         return -1;
-    }
 
     if (!compatible_for_assignment(type->tp_base, new_base, "__bases__"))
         return -1;
@@ -830,8 +829,13 @@
         assert(base);
     }
 
-    /* There's no need to clear the instance dict (if any);
-       the collector will call its tp_clear handler. */
+    /* Clear the instance dict (if any), to break cycles involving only
+       __dict__ slots (as in the case 'self.__dict__ is self'). */
+    if (type->tp_dictoffset != base->tp_dictoffset) {
+        PyObject **dictptr = _PyObject_GetDictPtr(self);
+        if (dictptr && *dictptr)
+            Py_CLEAR(*dictptr);
+    }
 
     if (baseclear)
         return baseclear(self);
@@ -2358,33 +2362,39 @@
       return NULL;
     res->ht_name = PyUnicode_FromString(spec->name);
     if (!res->ht_name)
-	goto fail;
+        goto fail;
     res->ht_type.tp_name = _PyUnicode_AsString(res->ht_name);
     if (!res->ht_type.tp_name)
-	goto fail;
+        goto fail;
 
     res->ht_type.tp_basicsize = spec->basicsize;
     res->ht_type.tp_itemsize = spec->itemsize;
     res->ht_type.tp_flags = spec->flags | Py_TPFLAGS_HEAPTYPE;
 
     for (slot = spec->slots; slot->slot; slot++) {
-	if (slot->slot >= sizeof(slotoffsets)/sizeof(slotoffsets[0])) {
-	    PyErr_SetString(PyExc_RuntimeError, "invalid slot offset");
-	    goto fail;
-	}
-	*(void**)(res_start + slotoffsets[slot->slot]) = slot->pfunc;
+        if (slot->slot >= sizeof(slotoffsets)/sizeof(slotoffsets[0])) {
+            PyErr_SetString(PyExc_RuntimeError, "invalid slot offset");
+            goto fail;
+        }
+        *(void**)(res_start + slotoffsets[slot->slot]) = slot->pfunc;
 
         /* need to make a copy of the docstring slot, which usually
            points to a static string literal */
         if (slot->slot == Py_tp_doc) {
-            ssize_t len = strlen(slot->pfunc)+1;
+            size_t len = strlen(slot->pfunc)+1;
             char *tp_doc = PyObject_MALLOC(len);
             if (tp_doc == NULL)
-	    	goto fail;
+                goto fail;
             memcpy(tp_doc, slot->pfunc, len);
             res->ht_type.tp_doc = tp_doc;
         }
     }
+    if (res->ht_type.tp_dealloc == NULL) {
+        /* It's a heap type, so needs the heap types' dealloc.
+           subtype_dealloc will call the base type's tp_dealloc, if
+           necessary. */
+        res->ht_type.tp_dealloc = subtype_dealloc;
+    }
 
     if (PyType_Ready(&res->ht_type) < 0)
         goto fail;
@@ -2457,6 +2467,13 @@
     PyObject *meta_attribute, *attribute;
     descrgetfunc meta_get;
 
+    if (!PyUnicode_Check(name)) {
+        PyErr_Format(PyExc_TypeError,
+                     "attribute name must be string, not '%.200s'",
+                     name->ob_type->tp_name);
+        return NULL;
+    }
+
     /* Initialize this type (we'll assume the metatype is initialized) */
     if (type->tp_dict == NULL) {
         if (PyType_Ready(type) < 0)
@@ -2919,7 +2936,7 @@
     unaryfunc f;
 
     f = Py_TYPE(self)->tp_repr;
-    if (f == NULL || f == object_str)
+    if (f == NULL)
         f = object_repr;
     return f(self);
 }
@@ -3518,6 +3535,7 @@
 
     for (; meth->ml_name != NULL; meth++) {
         PyObject *descr;
+        int err;
         if (PyDict_GetItemString(dict, meth->ml_name) &&
             !(meth->ml_flags & METH_COEXIST))
                 continue;
@@ -3541,9 +3559,10 @@
         }
         if (descr == NULL)
             return -1;
-        if (PyDict_SetItemString(dict, meth->ml_name, descr) < 0)
+        err = PyDict_SetItemString(dict, meth->ml_name, descr);
+        Py_DECREF(descr);
+        if (err < 0)
             return -1;
-        Py_DECREF(descr);
     }
     return 0;
 }
@@ -5748,7 +5767,8 @@
             }
             continue;
         }
-        if (Py_TYPE(descr) == &PyWrapperDescr_Type) {
+        if (Py_TYPE(descr) == &PyWrapperDescr_Type &&
+            ((PyWrapperDescrObject *)descr)->d_base->name_strobj == p->name_strobj) {
             void **tptr = resolve_slotdups(type, p->name_strobj);
             if (tptr == NULL || tptr == ptr)
                 generic = p->function;
diff -r 3d0686d90f55 Objects/unicodeobject.c
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -3425,7 +3425,7 @@
     /* Unpack UTF-16 encoded data */
     p = unicode->str;
     q = (unsigned char *)s;
-    e = q + size - 1;
+    e = q + size;
 
     if (byteorder)
         bo = *byteorder;
@@ -3476,8 +3476,20 @@
 #endif
 
     aligned_end = (const unsigned char *) ((size_t) e & ~LONG_PTR_MASK);
-    while (q < e) {
+    while (1) {
         Py_UNICODE ch;
+        if (e - q < 2) {
+            /* remaining byte at the end? (size should be even) */
+            if (q == e || consumed)
+                break;
+            errmsg = "truncated data";
+            startinpos = ((const char *)q) - starts;
+            endinpos = ((const char *)e) - starts;
+            outpos = p - PyUnicode_AS_UNICODE(unicode);
+            goto utf16Error;
+            /* The remaining input chars are ignored if the callback
+               chooses to skip the input */
+        }
         /* First check for possible aligned read of a C 'long'. Unaligned
            reads are more expensive, better to defer to another iteration. */
         if (!((size_t) q & LONG_PTR_MASK)) {
@@ -3546,8 +3558,8 @@
             }
             p = _p;
             q = _q;
-            if (q >= e)
-                break;
+            if (e - q < 2)
+                continue;
         }
         ch = (q[ihi] << 8) | q[ilo];
 
@@ -3559,10 +3571,10 @@
         }
 
         /* UTF-16 code pair: */
-        if (q > e) {
+        if (e - q < 2) {
             errmsg = "unexpected end of data";
             startinpos = (((const char *)q) - 2) - starts;
-            endinpos = ((const char *)e) + 1 - starts;
+            endinpos = ((const char *)e) - starts;
             goto utf16Error;
         }
         if (0xD800 <= ch && ch <= 0xDBFF) {
@@ -3606,31 +3618,9 @@
                 &outpos,
                 &p))
             goto onError;
-    }
-    /* remaining byte at the end? (size should be even) */
-    if (e == q) {
-        if (!consumed) {
-            errmsg = "truncated data";
-            startinpos = ((const char *)q) - starts;
-            endinpos = ((const char *)e) + 1 - starts;
-            outpos = p - PyUnicode_AS_UNICODE(unicode);
-            if (unicode_decode_call_errorhandler(
-                    errors,
-                    &errorHandler,
-                    "utf16", errmsg,
-                    &starts,
-                    (const char **)&e,
-                    &startinpos,
-                    &endinpos,
-                    &exc,
-                    (const char **)&q,
-                    &unicode,
-                    &outpos,
-                    &p))
-                goto onError;
-            /* The remaining input chars are ignored if the callback
-               chooses to skip the input */
-        }
+        /* Update data because unicode_decode_call_errorhandler might have
+           changed the input object. */
+        aligned_end = (const unsigned char *) ((size_t) e & ~LONG_PTR_MASK);
     }
 
     if (byteorder)
@@ -7673,7 +7663,9 @@
     Py_UNICODE *p;
     Py_hash_t x;
 
+#ifdef Py_DEBUG
     assert(_Py_HashSecret_Initialized);
+#endif
     if (self->hash != -1)
         return self->hash;
     len = Py_SIZE(self);
@@ -9208,10 +9200,6 @@
 }
 
 static PyMethodDef unicode_methods[] = {
-
-    /* Order is according to common usage: often used methods should
-       appear first, since lookup is done sequentially. */
-
     {"encode", (PyCFunction) unicode_encode, METH_VARARGS | METH_KEYWORDS, encode__doc__},
     {"replace", (PyCFunction) unicode_replace, METH_VARARGS, replace__doc__},
     {"split", (PyCFunction) unicode_split, METH_VARARGS, split__doc__},
@@ -9975,11 +9963,15 @@
 }
 
 PyDoc_STRVAR(unicode_doc,
-             "str(string[, encoding[, errors]]) -> str\n\
+             "str(object[, encoding[, errors]]) -> str\n\
 \n\
-Create a new string object from the given encoded string.\n\
-encoding defaults to the current default string encoding.\n\
-errors can be 'strict', 'replace' or 'ignore' and defaults to 'strict'.");
+Create a new string object from the given object. If encoding or\n\
+errors is specified, then the object must expose a data buffer\n\
+that will be decoded using the given encoding and error handler.\n\
+Otherwise, returns the result of object.__str__() (if defined)\n\
+or repr(object).\n\
+encoding defaults to sys.getdefaultencoding().\n\
+errors defaults to 'strict'.");
 
 static PyObject *unicode_iter(PyObject *seq);
 
diff -r 3d0686d90f55 PC/VC6/bz2.dsp
--- a/PC/VC6/bz2.dsp
+++ b/PC/VC6/bz2.dsp
@@ -44,7 +44,7 @@
 # PROP Target_Dir ""
 F90=df.exe
 # ADD BASE CPP /nologo /MT /W3 /GX /O2 /D "Py_BUILD_CORE_MODULE" /D "WIN32" /D "NDEBUG" /D "_WINDOWS" /YX /FD /c
-# ADD CPP /nologo /MD /W3 /GX /Zi /O2 /I "..\..\Include" /I ".." /I "..\..\..\bzip2-1.0.5" /D "Py_BUILD_CORE_MODULE" /D "NDEBUG" /D "WIN32" /D "_WINDOWS" /YX /FD /c
+# ADD CPP /nologo /MD /W3 /GX /Zi /O2 /I "..\..\Include" /I ".." /I "..\..\..\bzip2-1.0.6" /D "Py_BUILD_CORE_MODULE" /D "NDEBUG" /D "WIN32" /D "_WINDOWS" /YX /FD /c
 # ADD BASE MTL /nologo /D "NDEBUG" /mktyplib203 /o "NUL" /win32
 # ADD MTL /nologo /D "NDEBUG" /mktyplib203 /o "NUL" /win32
 # ADD BASE RSC /l 0x409 /d "NDEBUG"
@@ -54,7 +54,7 @@
 # ADD BSC32 /nologo
 LINK32=link.exe
 # ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:windows /dll /machine:I386
-# ADD LINK32 ..\..\..\bzip2-1.0.5\libbz2.lib /nologo /base:"0x1D170000" /subsystem:windows /dll /debug /machine:I386 /nodefaultlib:"libc" /out:"./bz2.pyd"
+# ADD LINK32 ..\..\..\bzip2-1.0.6\libbz2.lib /nologo /base:"0x1D170000" /subsystem:windows /dll /debug /machine:I386 /nodefaultlib:"libc" /out:"./bz2.pyd"
 # SUBTRACT LINK32 /pdb:none /nodefaultlib
 
 !ELSEIF  "$(CFG)" == "bz2 - Win32 Debug"
@@ -72,7 +72,7 @@
 # PROP Target_Dir ""
 F90=df.exe
 # ADD BASE CPP /nologo /MTd /W3 /Gm /GX /Zi /Od /D "Py_BUILD_CORE_MODULE" /D "WIN32" /D "_DEBUG" /D "_WINDOWS" /YX /FD /c
-# ADD CPP /nologo /MDd /W3 /Gm /GX /Zi /Od /I "..\..\Include" /I ".." /I "..\..\..\bzip2-1.0.5" /D "Py_BUILD_CORE_MODULE" /D "_DEBUG" /D "WIN32" /D "_WINDOWS" /YX /FD /c
+# ADD CPP /nologo /MDd /W3 /Gm /GX /Zi /Od /I "..\..\Include" /I ".." /I "..\..\..\bzip2-1.0.6" /D "Py_BUILD_CORE_MODULE" /D "_DEBUG" /D "WIN32" /D "_WINDOWS" /YX /FD /c
 # ADD BASE MTL /nologo /D "_DEBUG" /mktyplib203 /o "NUL" /win32
 # ADD MTL /nologo /D "_DEBUG" /mktyplib203 /o "NUL" /win32
 # ADD BASE RSC /l 0x409 /d "_DEBUG"
@@ -82,7 +82,7 @@
 # ADD BSC32 /nologo
 LINK32=link.exe
 # ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:windows /dll /debug /machine:I386 /pdbtype:sept
-# ADD LINK32 ..\..\..\bzip2-1.0.5\libbz2.lib /nologo /base:"0x1D170000" /subsystem:windows /dll /debug /machine:I386 /nodefaultlib:"msvcrt" /nodefaultlib:"libc" /out:"./bz2_d.pyd" /pdbtype:sept
+# ADD LINK32 ..\..\..\bzip2-1.0.6\libbz2.lib /nologo /base:"0x1D170000" /subsystem:windows /dll /debug /machine:I386 /nodefaultlib:"msvcrt" /nodefaultlib:"libc" /out:"./bz2_d.pyd" /pdbtype:sept
 # SUBTRACT LINK32 /pdb:none
 
 !ENDIF 
diff -r 3d0686d90f55 PC/VC6/pythoncore.dsp
--- a/PC/VC6/pythoncore.dsp
+++ b/PC/VC6/pythoncore.dsp
@@ -655,6 +655,10 @@
 # End Source File
 # Begin Source File
 
+SOURCE=..\..\Python\random.c
+# End Source File
+# Begin Source File
+
 SOURCE=..\..\Objects\rangeobject.c
 # End Source File
 # Begin Source File
diff -r 3d0686d90f55 PC/VC6/readme.txt
--- a/PC/VC6/readme.txt
+++ b/PC/VC6/readme.txt
@@ -120,14 +120,14 @@
     Download the source from the python.org copy into the dist
     directory:
 
-    svn export http://svn.python.org/projects/external/bzip2-1.0.5
+    svn export http://svn.python.org/projects/external/bzip2-1.0.6
 
     And requires building bz2 first.
 
-    cd dist\bzip2-1.0.5
+    cd dist\bzip2-1.0.6
     nmake -f makefile.msc
 
-    All of this managed to build bzip2-1.0.5\libbz2.lib, which the Python
+    All of this managed to build bzip2-1.0.6\libbz2.lib, which the Python
     project links in.
 
 
@@ -153,10 +153,9 @@
 
     Unpack into the "dist" directory, retaining the folder name from
     the archive - for example, the latest stable OpenSSL will install as
-        dist/openssl-1.0.0a
+        dist/openssl-1.0.0j
 
-    You can (theoretically) use any version of OpenSSL you like - the
-    build process will automatically select the latest version.
+    You need to use version 1.0.0j of OpenSSL.
 
     You can install the NASM assembler from
         http://www.nasm.us/
diff -r 3d0686d90f55 PC/VS7.1/pythoncore.vcproj
--- a/PC/VS7.1/pythoncore.vcproj
+++ b/PC/VS7.1/pythoncore.vcproj
@@ -767,6 +767,9 @@
 			RelativePath="..\..\Python\pythonrun.c">
 		</File>
 		<File
+			RelativePath="..\..\Python\random.c">
+		</File>
+		<File
 			RelativePath="..\..\Objects\rangeobject.c">
 		</File>
 		<File
diff -r 3d0686d90f55 PC/VS7.1/select.vcproj
--- a/PC/VS7.1/select.vcproj
+++ b/PC/VS7.1/select.vcproj
@@ -35,7 +35,7 @@
 				Name="VCCustomBuildTool"/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				OutputFile="./select.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
@@ -89,7 +89,7 @@
 				Name="VCCustomBuildTool"/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				OutputFile="./select_d.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
@@ -151,7 +151,7 @@
 			<Tool
 				Name="VCLinkerTool"
 				AdditionalOptions=" /MACHINE:IA64 /USELINK:MS_SDK"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				OutputFile="./select.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
@@ -213,7 +213,7 @@
 			<Tool
 				Name="VCLinkerTool"
 				AdditionalOptions=" /MACHINE:AMD64 /USELINK:MS_SDK"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				OutputFile="./select.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
diff -r 3d0686d90f55 PC/VS8.0/bz2.vcproj
--- a/PC/VS8.0/bz2.vcproj
+++ b/PC/VS8.0/bz2.vcproj
@@ -532,7 +532,7 @@
 			</File>
 		</Filter>
 		<Filter
-			Name="bzip2 1.0.5 Header Files"
+			Name="bzip2 1.0.6 Header Files"
 			>
 			<File
 				RelativePath="$(bz2Dir)\bzlib.h"
@@ -544,7 +544,7 @@
 			</File>
 		</Filter>
 		<Filter
-			Name="bzip2 1.0.5 Source Files"
+			Name="bzip2 1.0.6 Source Files"
 			>
 			<File
 				RelativePath="$(bz2Dir)\blocksort.c"
diff -r 3d0686d90f55 PC/VS8.0/pyproject.vsprops
--- a/PC/VS8.0/pyproject.vsprops
+++ b/PC/VS8.0/pyproject.vsprops
@@ -54,11 +54,11 @@
 	/>
 	<UserMacro
 		Name="bz2Dir"
-		Value="$(externalsDir)\bzip2-1.0.5"
+		Value="$(externalsDir)\bzip2-1.0.6"
 	/>
 	<UserMacro
 		Name="opensslDir"
-		Value="$(externalsDir)\openssl-1.0.0a"
+		Value="$(externalsDir)\openssl-1.0.0j"
 	/>
 	<UserMacro
 		Name="tcltkDir"
diff -r 3d0686d90f55 PC/VS8.0/pythoncore.vcproj
--- a/PC/VS8.0/pythoncore.vcproj
+++ b/PC/VS8.0/pythoncore.vcproj
@@ -1883,6 +1883,10 @@
 				>
 			</File>
 			<File
+				RelativePath="..\..\Python\random.c"
+				>
+			</File>
+			<File
 				RelativePath="..\..\Python\structmember.c"
 				>
 			</File>
diff -r 3d0686d90f55 PC/VS8.0/select.vcproj
--- a/PC/VS8.0/select.vcproj
+++ b/PC/VS8.0/select.vcproj
@@ -54,7 +54,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -116,7 +116,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -178,7 +178,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -241,7 +241,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -303,7 +303,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -366,7 +366,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 				TargetMachine="17"
@@ -429,7 +429,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -492,7 +492,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 				TargetMachine="17"
diff -r 3d0686d90f55 PC/_subprocess.c
--- a/PC/_subprocess.c
+++ b/PC/_subprocess.c
@@ -684,6 +684,7 @@
     defint(d, "WAIT_OBJECT_0", WAIT_OBJECT_0);
     defint(d, "CREATE_NEW_CONSOLE", CREATE_NEW_CONSOLE);
     defint(d, "CREATE_NEW_PROCESS_GROUP", CREATE_NEW_PROCESS_GROUP);
+    defint(d, "STILL_ACTIVE", STILL_ACTIVE);
 
     return m;
 }
diff -r 3d0686d90f55 PC/pyconfig.h
--- a/PC/pyconfig.h
+++ b/PC/pyconfig.h
@@ -340,7 +340,7 @@
 #	define SIZEOF_FPOS_T 8
 #	define SIZEOF_HKEY 8
 #	define SIZEOF_SIZE_T 8
-/* configure.in defines HAVE_LARGEFILE_SUPPORT iff HAVE_LONG_LONG,
+/* configure.ac defines HAVE_LARGEFILE_SUPPORT iff HAVE_LONG_LONG,
    sizeof(off_t) > sizeof(long), and sizeof(PY_LONG_LONG) >= sizeof(off_t).
    On Win64 the second condition is not true, but if fpos_t replaces off_t
    then this is true. The uses of HAVE_LARGEFILE_SUPPORT imply that Win64
diff -r 3d0686d90f55 PC/python3.mak
--- a/PC/python3.mak
+++ b/PC/python3.mak
@@ -1,4 +1,4 @@
-$(OutDir)python32.dll:	python3.def $(OutDir)python32stub.lib
+$(OutDir)python3.dll:	python3.def $(OutDir)python32stub.lib
 	cl /LD /Fe$(OutDir)python3.dll python3dll.c python3.def $(OutDir)python32stub.lib
 
 $(OutDir)python32stub.lib:	python32stub.def
@@ -7,4 +7,4 @@
 clean:
 	del $(OutDir)python3.dll $(OutDir)python3.lib $(OutDir)python32stub.lib $(OutDir)python3.exp $(OutDir)python32stub.exp
 
-rebuild: clean $(OutDir)python32.dll
+rebuild: clean $(OutDir)python3.dll
diff -r 3d0686d90f55 PC/winreg.c
--- a/PC/winreg.c
+++ b/PC/winreg.c
@@ -1110,7 +1110,7 @@
      * nul.  RegEnumKeyEx requires a 257 character buffer to
      * retrieve such a key name. */
     wchar_t tmpbuf[257];
-    DWORD len = sizeof(tmpbuf); /* includes NULL terminator */
+    DWORD len = sizeof(tmpbuf)/sizeof(wchar_t); /* includes NULL terminator */
 
     if (!PyArg_ParseTuple(args, "Oi:EnumKey", &obKey, &index))
         return NULL;
diff -r 3d0686d90f55 PCbuild/build_ssl.py
--- a/PCbuild/build_ssl.py
+++ b/PCbuild/build_ssl.py
@@ -63,37 +63,13 @@
     print(" Please install ActivePerl and ensure it appears on your path")
     return None
 
-# Locate the best SSL directory given a few roots to look into.
-def find_best_ssl_dir(sources):
-    candidates = []
-    for s in sources:
-        try:
-            # note: do not abspath s; the build will fail if any
-            # higher up directory name has spaces in it.
-            fnames = os.listdir(s)
-        except os.error:
-            fnames = []
-        for fname in fnames:
-            fqn = os.path.join(s, fname)
-            if os.path.isdir(fqn) and fname.startswith("openssl-"):
-                candidates.append(fqn)
-    # Now we have all the candidates, locate the best.
-    best_parts = []
-    best_name = None
-    for c in candidates:
-        parts = re.split("[.-]", os.path.basename(c))[1:]
-        # eg - openssl-0.9.7-beta1 - ignore all "beta" or any other qualifiers
-        if len(parts) >= 4:
-            continue
-        if parts > best_parts:
-            best_parts = parts
-            best_name = c
-    if best_name is not None:
-        print("Found an SSL directory at '%s'" % (best_name,))
-    else:
-        print("Could not find an SSL directory in '%s'" % (sources,))
-    sys.stdout.flush()
-    return best_name
+# Fetch SSL directory from VC properties
+def get_ssl_dir():
+    propfile = (os.path.join(os.path.dirname(__file__), 'pyproject.vsprops'))
+    with open(propfile) as f:
+        m = re.search('openssl-([^"]+)"', f.read())
+        return "..\..\openssl-"+m.group(1)
+
 
 def create_makefile64(makefile, m32):
     """Create and fix makefile for 64bit
@@ -202,7 +178,7 @@
         print("No Perl installation was found. Existing Makefiles are used.")
     sys.stdout.flush()
     # Look for SSL 2 levels up from pcbuild - ie, same place zlib etc all live.
-    ssl_dir = find_best_ssl_dir(("..\\..",))
+    ssl_dir = get_ssl_dir()
     if ssl_dir is None:
         sys.exit(1)
 
diff -r 3d0686d90f55 PCbuild/bz2.vcproj
--- a/PCbuild/bz2.vcproj
+++ b/PCbuild/bz2.vcproj
@@ -532,7 +532,7 @@
 			</File>
 		</Filter>
 		<Filter
-			Name="bzip2 1.0.5 Header Files"
+			Name="bzip2 1.0.6 Header Files"
 			>
 			<File
 				RelativePath="$(bz2Dir)\bzlib.h"
@@ -544,7 +544,7 @@
 			</File>
 		</Filter>
 		<Filter
-			Name="bzip2 1.0.5 Source Files"
+			Name="bzip2 1.0.6 Source Files"
 			>
 			<File
 				RelativePath="$(bz2Dir)\blocksort.c"
diff -r 3d0686d90f55 PCbuild/pginstrument.vsprops
--- a/PCbuild/pginstrument.vsprops
+++ b/PCbuild/pginstrument.vsprops
@@ -22,7 +22,7 @@
 	<Tool
 		Name="VCLinkerTool"
 		OptimizeReferences="2"
-		EnableCOMDATFolding="2"
+		EnableCOMDATFolding="1"
 		LinkTimeCodeGeneration="2"
 		ProfileGuidedDatabase="$(SolutionDir)$(PlatformName)-pgi\$(TargetName).pgd"
 		ImportLibrary="$(OutDirPGI)\$(TargetName).lib"
diff -r 3d0686d90f55 PCbuild/pyproject.vsprops
--- a/PCbuild/pyproject.vsprops
+++ b/PCbuild/pyproject.vsprops
@@ -54,11 +54,11 @@
 	/>
 	<UserMacro
 		Name="bz2Dir"
-		Value="$(externalsDir)\bzip2-1.0.5"
+		Value="$(externalsDir)\bzip2-1.0.6"
 	/>
 	<UserMacro
 		Name="opensslDir"
-		Value="$(externalsDir)\openssl-1.0.0a"
+		Value="$(externalsDir)\openssl-1.0.0j"
 	/>
 	<UserMacro
 		Name="tcltkDir"
diff -r 3d0686d90f55 PCbuild/readme.txt
--- a/PCbuild/readme.txt
+++ b/PCbuild/readme.txt
@@ -118,28 +118,28 @@
     Download the source from the python.org copy into the dist
     directory:
 
-    svn export http://svn.python.org/projects/external/bzip2-1.0.5
+    svn export http://svn.python.org/projects/external/bzip2-1.0.6
 
     ** NOTE: if you use the Tools\buildbot\external(-amd64).bat approach for
     obtaining external sources then you don't need to manually get the source
     above via subversion. **
 
     A custom pre-link step in the bz2 project settings should manage to
-    build bzip2-1.0.5\libbz2.lib by magic before bz2.pyd (or bz2_d.pyd) is
+    build bzip2-1.0.6\libbz2.lib by magic before bz2.pyd (or bz2_d.pyd) is
     linked in PCbuild\.
     However, the bz2 project is not smart enough to remove anything under
-    bzip2-1.0.5\ when you do a clean, so if you want to rebuild bzip2.lib
-    you need to clean up bzip2-1.0.5\ by hand.
+    bzip2-1.0.6\ when you do a clean, so if you want to rebuild bzip2.lib
+    you need to clean up bzip2-1.0.6\ by hand.
 
     All of this managed to build libbz2.lib in 
-    bzip2-1.0.5\$platform-$configuration\, which the Python project links in.
+    bzip2-1.0.6\$platform-$configuration\, which the Python project links in.
 
 _ssl
     Python wrapper for the secure sockets library.
 
     Get the source code through
 
-    svn export http://svn.python.org/projects/external/openssl-1.0.0a
+    svn export http://svn.python.org/projects/external/openssl-1.0.0j
 
     ** NOTE: if you use the Tools\buildbot\external(-amd64).bat approach for
     obtaining external sources then you don't need to manually get the source
diff -r 3d0686d90f55 PCbuild/select.vcproj
--- a/PCbuild/select.vcproj
+++ b/PCbuild/select.vcproj
@@ -54,7 +54,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -116,7 +116,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -178,7 +178,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -241,7 +241,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -303,7 +303,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -366,7 +366,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 				TargetMachine="17"
@@ -429,7 +429,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -492,7 +492,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 				TargetMachine="17"
diff -r 3d0686d90f55 PCbuild/xxlimited.vcproj
--- a/PCbuild/xxlimited.vcproj
+++ b/PCbuild/xxlimited.vcproj
@@ -56,7 +56,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -119,7 +119,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -182,7 +182,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -245,7 +245,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 				TargetMachine="17"
@@ -309,7 +309,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 			/>
@@ -372,7 +372,7 @@
 			/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalDependencies="wsock32.lib"
+				AdditionalDependencies="ws2_32.lib"
 				IgnoreDefaultLibraryNames="libc"
 				BaseAddress="0x1D110000"
 				TargetMachine="17"
diff -r 3d0686d90f55 Parser/myreadline.c
--- a/Parser/myreadline.c
+++ b/Parser/myreadline.c
@@ -37,42 +37,41 @@
 {
     char *p;
     int err;
+#ifdef MS_WINDOWS
+    int i;
+#endif
+
     while (1) {
         if (PyOS_InputHook != NULL)
             (void)(PyOS_InputHook)();
         errno = 0;
         clearerr(fp);
-        p = fgets(buf, len, fp);
+        if (_PyVerify_fd(fileno(fp)))
+            p = fgets(buf, len, fp);
+        else
+            p = NULL;
         if (p != NULL)
             return 0; /* No error */
         err = errno;
 #ifdef MS_WINDOWS
-        /* In the case of a Ctrl+C or some other external event
-           interrupting the operation:
-           Win2k/NT: ERROR_OPERATION_ABORTED is the most recent Win32
-           error code (and feof() returns TRUE).
-           Win9x: Ctrl+C seems to have no effect on fgets() returning
-           early - the signal handler is called, but the fgets()
-           only returns "normally" (ie, when Enter hit or feof())
+        /* Ctrl-C anywhere on the line or Ctrl-Z if the only character
+           on a line will set ERROR_OPERATION_ABORTED. Under normal
+           circumstances Ctrl-C will also have caused the SIGINT handler
+           to fire. This signal fires in another thread and is not
+           guaranteed to have occurred before this point in the code.
+
+           Therefore: check in a small loop to see if the trigger has
+           fired, in which case assume this is a Ctrl-C event. If it
+           hasn't fired within 10ms assume that this is a Ctrl-Z on its
+           own or that the signal isn't going to fire for some other
+           reason and drop through to check for EOF.
         */
         if (GetLastError()==ERROR_OPERATION_ABORTED) {
-            /* Signals come asynchronously, so we sleep a brief
-               moment before checking if the handler has been
-               triggered (we cant just return 1 before the
-               signal handler has been called, as the later
-               signal may be treated as a separate interrupt).
-            */
+            for (i = 0; i < 10; i++) {
+                if (PyOS_InterruptOccurred())
+                    return 1;
             Sleep(1);
-            if (PyOS_InterruptOccurred()) {
-                return 1; /* Interrupt */
             }
-            /* Either the sleep wasn't long enough (need a
-               short loop retrying?) or not interrupted at all
-               (in which case we should revisit the whole thing!)
-               Logging some warning would be nice.  assert is not
-               viable as under the debugger, the various dialogs
-               mean the condition is not true.
-            */
         }
 #endif /* MS_WINDOWS */
         if (feof(fp)) {
diff -r 3d0686d90f55 Parser/node.c
--- a/Parser/node.c
+++ b/Parser/node.c
@@ -114,6 +114,7 @@
 
 /* Forward */
 static void freechildren(node *);
+static Py_ssize_t sizeofchildren(node *n);
 
 
 void
@@ -125,6 +126,16 @@
     }
 }
 
+Py_ssize_t
+_PyNode_SizeOf(node *n)
+{
+    Py_ssize_t res = 0;
+
+    if (n != NULL)
+        res = sizeof(node) + sizeofchildren(n);
+    return res;
+}
+
 static void
 freechildren(node *n)
 {
@@ -136,3 +147,18 @@
     if (STR(n) != NULL)
         PyObject_FREE(STR(n));
 }
+
+static Py_ssize_t
+sizeofchildren(node *n)
+{
+    Py_ssize_t res = 0;
+    int i;
+    for (i = NCH(n); --i >= 0; )
+        res += sizeofchildren(CHILD(n, i));
+    if (n->n_child != NULL)
+        /* allocated size of n->n_child array */
+        res += XXXROUNDUP(NCH(n)) * sizeof(node);
+    if (STR(n) != NULL)
+        res += strlen(STR(n)) + 1;
+    return res;
+}
diff -r 3d0686d90f55 Parser/parsetok.c
--- a/Parser/parsetok.c
+++ b/Parser/parsetok.c
@@ -127,7 +127,7 @@
 {
     parser_state *ps;
     node *n;
-    int started = 0, handling_import = 0, handling_with = 0;
+    int started = 0;
 
     if ((ps = PyParser_New(g, start)) == NULL) {
         fprintf(stderr, "no mem for new parser\n");
@@ -154,7 +154,6 @@
         }
         if (type == ENDMARKER && started) {
             type = NEWLINE; /* Add an extra newline */
-            handling_with = handling_import = 0;
             started = 0;
             /* Add the right number of dedent tokens,
                except if a certain flag is given --
diff -r 3d0686d90f55 Python/ast.c
--- a/Python/ast.c
+++ b/Python/ast.c
@@ -645,7 +645,7 @@
 }
 
 static arg_ty
-compiler_arg(struct compiling *c, const node *n)
+ast_for_arg(struct compiling *c, const node *n)
 {
     identifier name;
     expr_ty annotation = NULL;
@@ -666,12 +666,6 @@
     }
 
     return arg(name, annotation, c->c_arena);
-#if 0
-    result = Tuple(args, Store, LINENO(n), n->n_col_offset, c->c_arena);
-    if (!set_context(c, result, Store, n))
-        return NULL;
-    return result;
-#endif
 }
 
 /* returns -1 if failed to handle keyword only arguments
@@ -859,7 +853,7 @@
                              "non-default argument follows default argument");
                     return NULL;
                 }
-                arg = compiler_arg(c, ch);
+                arg = ast_for_arg(c, ch);
                 if (!arg)
                     return NULL;
                 asdl_seq_SET(posargs, k++, arg);
diff -r 3d0686d90f55 Python/bltinmodule.c
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -158,10 +158,8 @@
             cls = PyEval_CallObjectWithKeywords(meta, margs, mkw);
             Py_DECREF(margs);
         }
-        if (cls != NULL && PyCell_Check(cell)) {
-            Py_INCREF(cls);
-            PyCell_SET(cell, cls);
-        }
+        if (cls != NULL && PyCell_Check(cell))
+            PyCell_Set(cell, cls);
         Py_DECREF(cell);
     }
     Py_DECREF(ns);
@@ -430,9 +428,11 @@
             ok = PyObject_IsTrue(good);
             Py_DECREF(good);
         }
-        if (ok)
+        if (ok > 0)
             return item;
         Py_DECREF(item);
+        if (ok < 0)
+            return NULL;
     }
 }
 
@@ -631,6 +631,8 @@
             mod_ty mod;
 
             arena = PyArena_New();
+            if (arena == NULL)
+                goto error;
             mod = PyAST_obj2mod(cmd, arena, mode);
             if (mod == NULL) {
                 PyArena_Free(arena);
diff -r 3d0686d90f55 Python/compile.c
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -367,16 +367,33 @@
 static PyObject *
 dictbytype(PyObject *src, int scope_type, int flag, int offset)
 {
-    Py_ssize_t pos = 0, i = offset, scope;
+    Py_ssize_t i = offset, scope, num_keys, key_i;
     PyObject *k, *v, *dest = PyDict_New();
+    PyObject *sorted_keys;
 
     assert(offset >= 0);
     if (dest == NULL)
         return NULL;
 
-    while (PyDict_Next(src, &pos, &k, &v)) {
+    /* Sort the keys so that we have a deterministic order on the indexes
+       saved in the returned dictionary.  These indexes are used as indexes
+       into the free and cell var storage.  Therefore if they aren't
+       deterministic, then the generated bytecode is not deterministic.
+    */
+    sorted_keys = PyDict_Keys(src);
+    if (sorted_keys == NULL)
+        return NULL;
+    if (PyList_Sort(sorted_keys) != 0) {
+        Py_DECREF(sorted_keys);
+        return NULL;
+    }
+    num_keys = PyList_GET_SIZE(sorted_keys);
+
+    for (key_i = 0; key_i < num_keys; key_i++) {
         /* XXX this should probably be a macro in symtable.h */
         long vi;
+        k = PyList_GET_ITEM(sorted_keys, key_i);
+        v = PyDict_GetItem(src, k);
         assert(PyLong_Check(v));
         vi = PyLong_AS_LONG(v);
         scope = (vi >> SCOPE_OFFSET) & SCOPE_MASK;
@@ -384,12 +401,14 @@
         if (scope == scope_type || vi & flag) {
             PyObject *tuple, *item = PyLong_FromLong(i);
             if (item == NULL) {
+                Py_DECREF(sorted_keys);
                 Py_DECREF(dest);
                 return NULL;
             }
             i++;
             tuple = PyTuple_Pack(2, k, k->ob_type);
             if (!tuple || PyDict_SetItem(dest, tuple, item) < 0) {
+                Py_DECREF(sorted_keys);
                 Py_DECREF(item);
                 Py_DECREF(dest);
                 Py_XDECREF(tuple);
@@ -399,6 +418,7 @@
             Py_DECREF(tuple);
         }
     }
+    Py_DECREF(sorted_keys);
     return dest;
 }
 
@@ -1319,7 +1339,11 @@
         arg_ty arg = asdl_seq_GET(kwonlyargs, i);
         expr_ty default_ = asdl_seq_GET(kw_defaults, i);
         if (default_) {
-            ADDOP_O(c, LOAD_CONST, arg->arg, consts);
+            PyObject *mangled = _Py_Mangle(c->u->u_private, arg->arg);
+            if (!mangled)
+                return -1;
+            ADDOP_O(c, LOAD_CONST, mangled, consts);
+            Py_DECREF(mangled);
             if (!compiler_visit_expr(c, default_)) {
                 return -1;
             }
diff -r 3d0686d90f55 Python/future.c
--- a/Python/future.c
+++ b/Python/future.c
@@ -60,13 +60,6 @@
 {
     int i, found_docstring = 0, done = 0, prev_line = 0;
 
-    static PyObject *future;
-    if (!future) {
-        future = PyUnicode_InternFromString("__future__");
-        if (!future)
-            return 0;
-    }
-
     if (!(mod->kind == Module_kind || mod->kind == Interactive_kind))
         return 1;
 
@@ -93,7 +86,9 @@
         */
 
         if (s->kind == ImportFrom_kind) {
-            if (s->v.ImportFrom.module == future) {
+            identifier modname = s->v.ImportFrom.module;
+            if (modname &&
+                !PyUnicode_CompareWithASCIIString(modname, "__future__")) {
                 if (done) {
                     PyErr_SetString(PyExc_SyntaxError,
                                     ERR_LATE_FUTURE);
diff -r 3d0686d90f55 Python/getargs.c
--- a/Python/getargs.c
+++ b/Python/getargs.c
@@ -1633,6 +1633,7 @@
     case 'z': /* string or None */
     case 'y': /* bytes */
     case 'u': /* unicode string */
+    case 'Z': /* unicode string or None */
     case 'w': /* buffer, read-write */
         {
             (void) va_arg(*p_va, char **);
diff -r 3d0686d90f55 Python/import.c
--- a/Python/import.c
+++ b/Python/import.c
@@ -1291,9 +1291,9 @@
 {
     struct stat st;
     FILE *fpc;
-    char buf[MAXPATHLEN+1];
+    char *buf;
     char *cpathname;
-    PyCodeObject *co;
+    PyCodeObject *co = NULL;
     PyObject *m;
 
     if (fstat(fileno(fp), &st) != 0) {
@@ -1310,6 +1310,10 @@
          */
         st.st_mtime &= 0xFFFFFFFF;
     }
+    buf = PyMem_MALLOC(MAXPATHLEN+1);
+    if (buf == NULL) {
+        return PyErr_NoMemory();
+    }
     cpathname = make_compiled_pathname(
         pathname, buf, (size_t)MAXPATHLEN + 1, !Py_OptimizeFlag);
     if (cpathname != NULL &&
@@ -1317,9 +1321,9 @@
         co = read_compiled_module(cpathname, fpc);
         fclose(fpc);
         if (co == NULL)
-            return NULL;
+            goto error_exit;
         if (update_compiled_module(co, pathname) < 0)
-            return NULL;
+            goto error_exit;
         if (Py_VerboseFlag)
             PySys_WriteStderr("import %s # precompiled from %s\n",
                 name, cpathname);
@@ -1328,13 +1332,16 @@
     else {
         co = parse_source_module(pathname, fp);
         if (co == NULL)
-            return NULL;
+            goto error_exit;
         if (Py_VerboseFlag)
             PySys_WriteStderr("import %s # from %s\n",
                 name, pathname);
         if (cpathname) {
             PyObject *ro = PySys_GetObject("dont_write_bytecode");
-            if (ro == NULL || !PyObject_IsTrue(ro))
+            int b = (ro == NULL) ? 0 : PyObject_IsTrue(ro);
+            if (b < 0)
+                goto error_exit;
+            if (!b)
                 write_compiled_module(co, cpathname, &st);
         }
     }
@@ -1342,7 +1349,13 @@
         name, (PyObject *)co, pathname, cpathname);
     Py_DECREF(co);
 
+    PyMem_FREE(buf);
     return m;
+
+error_exit:
+    Py_XDECREF(co);
+    PyMem_FREE(buf);
+    return NULL;
 }
 
 /* Get source file -> unicode or None
@@ -1351,7 +1364,7 @@
 static PyObject *
 get_sourcefile(char *file)
 {
-    char py[MAXPATHLEN + 1];
+    char *py = NULL;
     Py_ssize_t len;
     PyObject *u;
     struct stat statbuf;
@@ -1366,6 +1379,10 @@
         return PyUnicode_DecodeFSDefault(file);
     }
 
+    py = PyMem_MALLOC(MAXPATHLEN+1);
+    if (py == NULL) {
+        return PyErr_NoMemory();
+    }
     /* Start by trying to turn PEP 3147 path into source path.  If that
      * fails, just chop off the trailing character, i.e. legacy pyc path
      * to py.
@@ -1382,6 +1399,7 @@
     else {
         u = PyUnicode_DecodeFSDefault(file);
     }
+    PyMem_FREE(py);
     return u;
 }
 
@@ -1401,7 +1419,7 @@
     PyObject *file = NULL;
     PyObject *path = NULL;
     int err;
-    char buf[MAXPATHLEN+1];
+    char *buf = NULL;
     FILE *fp = NULL;
     struct filedescr *fdp;
 
@@ -1423,8 +1441,13 @@
         err = PyDict_SetItemString(d, "__path__", path);
     if (err != 0)
         goto error;
+    buf = PyMem_MALLOC(MAXPATHLEN+1);
+    if (buf == NULL) {
+        PyErr_NoMemory();
+        goto error;
+    }
     buf[0] = '\0';
-    fdp = find_module(name, "__init__", path, buf, sizeof(buf), &fp, NULL);
+    fdp = find_module(name, "__init__", path, buf, MAXPATHLEN+1, &fp, NULL);
     if (fdp == NULL) {
         if (PyErr_ExceptionMatches(PyExc_ImportError)) {
             PyErr_Clear();
@@ -1442,6 +1465,8 @@
   error:
     m = NULL;
   cleanup:
+    if (buf)
+        PyMem_FREE(buf);
     Py_XDECREF(path);
     Py_XDECREF(file);
     return m;
@@ -1571,7 +1596,7 @@
     static struct filedescr fd_frozen = {"", "", PY_FROZEN};
     static struct filedescr fd_builtin = {"", "", C_BUILTIN};
     static struct filedescr fd_package = {"", "", PKG_DIRECTORY};
-    char name[MAXPATHLEN+1];
+    char *name;
 #if defined(PYOS_OS2)
     size_t saved_len;
     size_t saved_namelen;
@@ -1585,6 +1610,11 @@
                         "module name is too long");
         return NULL;
     }
+    name = PyMem_MALLOC(MAXPATHLEN+1);
+    if (name == NULL) {
+        PyErr_NoMemory();
+        return NULL;
+    }
     strcpy(name, subname);
 
     /* sys.meta_path import hook */
@@ -1596,7 +1626,7 @@
             PyErr_SetString(PyExc_RuntimeError,
                             "sys.meta_path must be a list of "
                             "import hooks");
-            return NULL;
+            goto error_exit;
         }
         Py_INCREF(meta_path);  /* zap guard */
         npath = PyList_Size(meta_path);
@@ -1609,12 +1639,13 @@
                                          path : Py_None);
             if (loader == NULL) {
                 Py_DECREF(meta_path);
-                return NULL;  /* true error */
+                goto error_exit;  /* true error */
             }
             if (loader != Py_None) {
                 /* a loader was found */
                 *p_loader = loader;
                 Py_DECREF(meta_path);
+                PyMem_FREE(name);
                 return &importhookdescr;
             }
             Py_DECREF(loader);
@@ -1624,18 +1655,21 @@
 
     if (find_frozen(fullname) != NULL) {
         strcpy(buf, fullname);
+        PyMem_FREE(name);
         return &fd_frozen;
     }
 
     if (path == NULL) {
         if (is_builtin(name)) {
             strcpy(buf, name);
+            PyMem_FREE(name);
             return &fd_builtin;
         }
 #ifdef MS_COREDLL
         fp = PyWin_FindRegisteredModule(name, &fdp, buf, buflen);
         if (fp != NULL) {
             *p_fp = fp;
+            PyMem_FREE(name);
             return fdp;
         }
 #endif
@@ -1645,7 +1679,7 @@
     if (path == NULL || !PyList_Check(path)) {
         PyErr_SetString(PyExc_RuntimeError,
                         "sys.path must be a list of directory names");
-        return NULL;
+        goto error_exit;
     }
 
     path_hooks = PySys_GetObject("path_hooks");
@@ -1653,14 +1687,14 @@
         PyErr_SetString(PyExc_RuntimeError,
                         "sys.path_hooks must be a list of "
                         "import hooks");
-        return NULL;
+        goto error_exit;
     }
     path_importer_cache = PySys_GetObject("path_importer_cache");
     if (path_importer_cache == NULL ||
         !PyDict_Check(path_importer_cache)) {
         PyErr_SetString(PyExc_RuntimeError,
                         "sys.path_importer_cache must be a dict");
-        return NULL;
+        goto error_exit;
     }
 
     npath = PyList_Size(path);
@@ -1671,11 +1705,11 @@
         const char *base;
         Py_ssize_t size;
         if (!v)
-            return NULL;
+            goto error_exit;
         if (PyUnicode_Check(v)) {
             v = PyUnicode_EncodeFSDefault(v);
             if (v == NULL)
-                return NULL;
+                goto error_exit;
         }
         else if (!PyBytes_Check(v))
             continue;
@@ -1703,7 +1737,7 @@
             importer = get_path_importer(path_importer_cache,
                                          path_hooks, origv);
             if (importer == NULL) {
-                return NULL;
+                goto error_exit;
             }
             /* Note: importer is a borrowed reference */
             if (importer != Py_None) {
@@ -1712,10 +1746,11 @@
                                              "find_module",
                                              "s", fullname);
                 if (loader == NULL)
-                    return NULL;  /* error */
+                    goto error_exit;  /* error */
                 if (loader != Py_None) {
                     /* a loader was found */
                     *p_loader = loader;
+                    PyMem_FREE(name);
                     return &importhookdescr;
                 }
                 Py_DECREF(loader);
@@ -1740,19 +1775,20 @@
             S_ISDIR(statbuf.st_mode) &&         /* it's a directory */
             case_ok(buf, len, namelen, name)) { /* case matches */
             if (find_init_module(buf)) { /* and has __init__.py */
+                PyMem_FREE(name);
                 return &fd_package;
             }
             else {
                 int err;
                 PyObject *unicode = PyUnicode_DecodeFSDefault(buf);
                 if (unicode == NULL)
-                    return NULL;
+                    goto error_exit;
                 err = PyErr_WarnFormat(PyExc_ImportWarning, 1,
                     "Not importing directory '%U': missing __init__.py",
                     unicode);
                 Py_DECREF(unicode);
                 if (err)
-                    return NULL;
+                    goto error_exit;
             }
         }
 #endif
@@ -1833,10 +1869,15 @@
     if (fp == NULL) {
         PyErr_Format(PyExc_ImportError,
                      "No module named %.200s", name);
-        return NULL;
+        goto error_exit;
     }
     *p_fp = fp;
+    PyMem_FREE(name);
     return fdp;
+
+error_exit:
+    PyMem_FREE(name);
+    return NULL;
 }
 
 /* case_ok(char* buf, Py_ssize_t len, Py_ssize_t namelen, char* name)
@@ -2416,7 +2457,7 @@
 import_module_level(char *name, PyObject *globals, PyObject *locals,
                     PyObject *fromlist, int level)
 {
-    char buf[MAXPATHLEN+1];
+    char *buf;
     Py_ssize_t buflen = 0;
     PyObject *parent, *head, *next, *tail;
 
@@ -2430,14 +2471,18 @@
         return NULL;
     }
 
+    buf = PyMem_MALLOC(MAXPATHLEN+1);
+    if (buf == NULL) {
+        return PyErr_NoMemory();
+    }
     parent = get_parent(globals, buf, &buflen, level);
     if (parent == NULL)
-        return NULL;
+        goto error_exit;
 
     head = load_next(parent, level < 0 ? Py_None : parent, &name, buf,
                         &buflen);
     if (head == NULL)
-        return NULL;
+        goto error_exit;
 
     tail = head;
     Py_INCREF(tail);
@@ -2446,7 +2491,7 @@
         Py_DECREF(tail);
         if (next == NULL) {
             Py_DECREF(head);
-            return NULL;
+            goto error_exit;
         }
         tail = next;
     }
@@ -2458,26 +2503,38 @@
         Py_DECREF(head);
         PyErr_SetString(PyExc_ValueError,
                         "Empty module name");
-        return NULL;
+        goto error_exit;
     }
 
     if (fromlist != NULL) {
-        if (fromlist == Py_None || !PyObject_IsTrue(fromlist))
+        int b = (fromlist == Py_None) ? 0 : PyObject_IsTrue(fromlist);
+        if (b < 0) {
+            Py_DECREF(tail);
+            Py_DECREF(head);
+            goto error_exit;
+        }
+        if (!b)
             fromlist = NULL;
     }
 
     if (fromlist == NULL) {
         Py_DECREF(tail);
+        PyMem_FREE(buf);
         return head;
     }
 
     Py_DECREF(head);
     if (!ensure_fromlist(tail, fromlist, buf, buflen, 0)) {
         Py_DECREF(tail);
-        return NULL;
+        goto error_exit;
     }
 
+    PyMem_FREE(buf);
     return tail;
+
+error_exit:
+    PyMem_FREE(buf);
+    return NULL;
 }
 
 PyObject *
@@ -2880,7 +2937,7 @@
     }
     else {
         PyObject *path, *loader = NULL;
-        char buf[MAXPATHLEN+1];
+        char *buf;
         struct filedescr *fdp;
         FILE *fp = NULL;
 
@@ -2895,11 +2952,16 @@
             }
         }
 
+        buf = PyMem_MALLOC(MAXPATHLEN+1);
+        if (buf == NULL) {
+            return PyErr_NoMemory();
+        }
         buf[0] = '\0';
         fdp = find_module(fullname, subname, path, buf, MAXPATHLEN+1,
                           &fp, &loader);
         Py_XDECREF(path);
         if (fdp == NULL) {
+            PyMem_FREE(buf);
             if (!PyErr_ExceptionMatches(PyExc_ImportError))
                 return NULL;
             PyErr_Clear();
@@ -2914,6 +2976,7 @@
             Py_XDECREF(m);
             m = NULL;
         }
+        PyMem_FREE(buf);
     }
 
     return m;
@@ -2931,7 +2994,7 @@
     PyObject *modules = PyImport_GetModuleDict();
     PyObject *path = NULL, *loader = NULL, *existing_m = NULL;
     char *name, *subname;
-    char buf[MAXPATHLEN+1];
+    char *buf;
     struct filedescr *fdp;
     FILE *fp = NULL;
     PyObject *newm;
@@ -2991,6 +3054,11 @@
         if (path == NULL)
             PyErr_Clear();
     }
+    buf = PyMem_MALLOC(MAXPATHLEN+1);
+    if (buf == NULL) {
+        Py_XDECREF(path);
+        return PyErr_NoMemory();
+    }
     buf[0] = '\0';
     fdp = find_module(name, subname, path, buf, MAXPATHLEN+1, &fp, &loader);
     Py_XDECREF(path);
@@ -2998,6 +3066,7 @@
     if (fdp == NULL) {
         Py_XDECREF(loader);
         imp_modules_reloading_clear();
+        PyMem_FREE(buf);
         return NULL;
     }
 
@@ -3015,6 +3084,7 @@
         PyDict_SetItemString(modules, name, m);
     }
     imp_modules_reloading_clear();
+    PyMem_FREE(buf);
     return newm;
 }
 
@@ -3168,26 +3238,32 @@
     PyObject *fob, *ret;
     PyObject *pathobj;
     struct filedescr *fdp;
-    char pathname[MAXPATHLEN+1];
+    char *pathname;
     FILE *fp = NULL;
     int fd = -1;
     char *found_encoding = NULL;
     char *encoding = NULL;
 
+    pathname = PyMem_MALLOC(MAXPATHLEN+1);
+    if (pathname == NULL) {
+        return PyErr_NoMemory();
+    }
     pathname[0] = '\0';
     if (path == Py_None)
         path = NULL;
     fdp = find_module(NULL, name, path, pathname, MAXPATHLEN+1, &fp, NULL);
     if (fdp == NULL)
-        return NULL;
+        goto error_exit;
     if (fp != NULL) {
         fd = fileno(fp);
         if (fd != -1)
             fd = dup(fd);
         fclose(fp);
         fp = NULL;
-        if (fd == -1)
-            return PyErr_SetFromErrno(PyExc_OSError);
+        if (fd == -1) {
+            PyErr_SetFromErrno(PyExc_OSError);
+            goto error_exit;
+        }
     }
     if (fd != -1) {
         if (strchr(fdp->mode, 'b') == NULL) {
@@ -3197,7 +3273,7 @@
             lseek(fd, 0, 0); /* Reset position */
             if (found_encoding == NULL && PyErr_Occurred()) {
                 close(fd);
-                return NULL;
+                goto error_exit;
             }
             encoding = (found_encoding != NULL) ? found_encoding :
                    (char*)PyUnicode_GetDefaultEncoding();
@@ -3207,7 +3283,7 @@
         if (fob == NULL) {
             close(fd);
             PyMem_FREE(found_encoding);
-            return NULL;
+            goto error_exit;
         }
     }
     else {
@@ -3218,8 +3294,12 @@
     ret = Py_BuildValue("NN(ssi)",
                   fob, pathobj, fdp->suffix, fdp->mode, fdp->type);
     PyMem_FREE(found_encoding);
-
+    PyMem_FREE(pathname);
     return ret;
+
+error_exit:
+    PyMem_FREE(pathname);
+    return NULL;
 }
 
 static PyObject *
@@ -3509,7 +3589,7 @@
 {
     static char *kwlist[] = {"path", "debug_override", NULL};
 
-    char buf[MAXPATHLEN+1];
+    char *buf;
     PyObject *pathbytes;
     char *cpathname;
     PyObject *debug_override = NULL;
@@ -3526,6 +3606,10 @@
         return NULL;
     }
 
+    buf = PyMem_MALLOC(MAXPATHLEN+1);
+    if (buf == NULL) {
+        return PyErr_NoMemory();
+    }
     cpathname = make_compiled_pathname(
         PyBytes_AS_STRING(pathbytes),
         buf, MAXPATHLEN+1, debug);
@@ -3533,9 +3617,14 @@
 
     if (cpathname == NULL) {
         PyErr_Format(PyExc_SystemError, "path buffer too short");
+        PyMem_FREE(buf);
         return NULL;
     }
-    return PyUnicode_DecodeFSDefault(buf);
+    {
+        PyObject *ret = PyUnicode_DecodeFSDefault(buf);
+        PyMem_FREE(buf);
+        return ret;
+    }
 }
 
 PyDoc_STRVAR(doc_cache_from_source,
@@ -3556,7 +3645,7 @@
 
     PyObject *pathname_obj;
     char *pathname;
-    char buf[MAXPATHLEN+1];
+    char *buf;
 
     if (!PyArg_ParseTupleAndKeywords(
                 args, kws, "O&", kwlist,
@@ -3564,14 +3653,23 @@
         return NULL;
 
     pathname = PyBytes_AS_STRING(pathname_obj);
+    buf = PyMem_MALLOC(MAXPATHLEN+1);
+    if (buf == NULL) {
+        return PyErr_NoMemory();
+    }
     if (make_source_pathname(pathname, buf) == NULL) {
         PyErr_Format(PyExc_ValueError, "Not a PEP 3147 pyc path: %s",
                      pathname);
         Py_DECREF(pathname_obj);
+        PyMem_FREE(buf);
         return NULL;
     }
     Py_DECREF(pathname_obj);
-    return PyUnicode_FromString(buf);
+    {
+        PyObject *ret = PyUnicode_FromString(buf);
+        PyMem_FREE(buf);
+        return ret;
+    }
 }
 
 PyDoc_STRVAR(doc_source_from_cache,
diff -r 3d0686d90f55 Python/marshal.c
--- a/Python/marshal.c
+++ b/Python/marshal.c
@@ -411,11 +411,12 @@
     else if (PyObject_CheckBuffer(v)) {
         /* Write unknown buffer-style objects as a string */
         char *s;
-        PyBufferProcs *pb = v->ob_type->tp_as_buffer;
         Py_buffer view;
-        if ((*pb->bf_getbuffer)(v, &view, PyBUF_SIMPLE) != 0) {
+        if (PyObject_GetBuffer(v, &view, PyBUF_SIMPLE) != 0) {
             w_byte(TYPE_UNKNOWN, p);
+            p->depth--;
             p->error = WFERR_UNMARSHALLABLE;
+            return;
         }
         w_byte(TYPE_STRING, p);
         n = view.len;
@@ -427,8 +428,7 @@
         }
         w_long((long)n, p);
         w_string(s, (int)n, p);
-        if (pb->bf_releasebuffer != NULL)
-            (*pb->bf_releasebuffer)(v, &view);
+        PyBuffer_Release(&view);
     }
     else {
         w_byte(TYPE_UNKNOWN, p);
@@ -1383,7 +1383,7 @@
     char *s;
     Py_ssize_t n;
     PyObject* result;
-    if (!PyArg_ParseTuple(args, "s*:loads", &p))
+    if (!PyArg_ParseTuple(args, "y*:loads", &p))
         return NULL;
     s = p.buf;
     n = p.len;
@@ -1400,10 +1400,10 @@
 }
 
 PyDoc_STRVAR(loads_doc,
-"loads(string)\n\
+"loads(bytes)\n\
 \n\
-Convert the string to a value. If no valid value is found, raise\n\
-EOFError, ValueError or TypeError. Extra characters in the string are\n\
+Convert the bytes object to a value. If no valid value is found, raise\n\
+EOFError, ValueError or TypeError. Extra characters in the input are\n\
 ignored.");
 
 static PyMethodDef marshal_methods[] = {
diff -r 3d0686d90f55 Python/pystate.c
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -22,6 +22,9 @@
 #endif
 #endif
 
+#ifdef __cplusplus
+extern "C" {
+#endif
 
 #ifdef WITH_THREAD
 #include "pythread.h"
@@ -30,10 +33,6 @@
 #define HEAD_LOCK() PyThread_acquire_lock(head_mutex, WAIT_LOCK)
 #define HEAD_UNLOCK() PyThread_release_lock(head_mutex)
 
-#ifdef __cplusplus
-extern "C" {
-#endif
-
 /* The single PyInterpreterState used by this process'
    GILState implementation
 */
@@ -249,7 +248,7 @@
         return NULL;
     if (state->modules_by_index == NULL)
         return NULL;
-    if (index > PyList_GET_SIZE(state->modules_by_index))
+    if (index >= PyList_GET_SIZE(state->modules_by_index))
         return NULL;
     res = PyList_GET_ITEM(state->modules_by_index, index);
     return res==Py_None ? NULL : res;
@@ -727,10 +726,10 @@
         PyEval_SaveThread();
 }
 
+#endif /* WITH_THREAD */
+
 #ifdef __cplusplus
 }
 #endif
 
-#endif /* WITH_THREAD */
 
-
diff -r 3d0686d90f55 Python/pythonrun.c
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -696,7 +696,11 @@
     PyInterpreterState_Delete(interp);
 }
 
+#ifdef MS_WINDOWS
 static wchar_t *progname = L"python";
+#else
+static wchar_t *progname = L"python3";
+#endif
 
 void
 Py_SetProgramName(wchar_t *pn)
@@ -866,12 +870,15 @@
     Py_CLEAR(raw);
     Py_CLEAR(text);
 
+#ifdef MS_WINDOWS
+    /* sys.stdin: enable universal newline mode, translate "\r\n" and "\r"
+       newlines to "\n".
+       sys.stdout and sys.stderr: translate "\n" to "\r\n". */
+    newline = NULL;
+#else
+    /* sys.stdin: split lines at "\n".
+       sys.stdout and sys.stderr: don't translate newlines (use "\n"). */
     newline = "\n";
-#ifdef MS_WINDOWS
-    if (!write_mode) {
-        /* translate \r\n to \n for sys.stdin on Windows */
-        newline = NULL;
-    }
 #endif
 
     stream = PyObject_CallMethod(io, "TextIOWrapper", "OsssO",
@@ -1335,56 +1342,67 @@
         return PyArg_ParseTuple(err, "O(ziiz)", message, filename,
                                 lineno, offset, text);
 
+    *message = NULL;
+
     /* new style errors.  `err' is an instance */
-
-    if (! (v = PyObject_GetAttrString(err, "msg")))
-        goto finally;
-    *message = v;
-
-    if (!(v = PyObject_GetAttrString(err, "filename")))
-        goto finally;
-    if (v == Py_None)
-        *filename = NULL;
-    else if (! (*filename = _PyUnicode_AsString(v)))
+    *message = PyObject_GetAttrString(err, "msg");
+    if (!*message)
         goto finally;
 
-    Py_DECREF(v);
-    if (!(v = PyObject_GetAttrString(err, "lineno")))
+    v = PyObject_GetAttrString(err, "filename");
+    if (!v)
+        goto finally;
+    if (v == Py_None) {
+        Py_DECREF(v);
+        *filename = NULL;
+    }
+    else {
+        *filename = _PyUnicode_AsString(v);
+        Py_DECREF(v);
+        if (!*filename)
+            goto finally;
+    }
+
+    v = PyObject_GetAttrString(err, "lineno");
+    if (!v)
         goto finally;
     hold = PyLong_AsLong(v);
     Py_DECREF(v);
-    v = NULL;
     if (hold < 0 && PyErr_Occurred())
         goto finally;
     *lineno = (int)hold;
 
-    if (!(v = PyObject_GetAttrString(err, "offset")))
+    v = PyObject_GetAttrString(err, "offset");
+    if (!v)
         goto finally;
     if (v == Py_None) {
         *offset = -1;
         Py_DECREF(v);
-        v = NULL;
     } else {
         hold = PyLong_AsLong(v);
         Py_DECREF(v);
-        v = NULL;
         if (hold < 0 && PyErr_Occurred())
             goto finally;
         *offset = (int)hold;
     }
 
-    if (!(v = PyObject_GetAttrString(err, "text")))
+    v = PyObject_GetAttrString(err, "text");
+    if (!v)
         goto finally;
-    if (v == Py_None)
+    if (v == Py_None) {
+        Py_DECREF(v);
         *text = NULL;
-    else if (!PyUnicode_Check(v) ||
-             !(*text = _PyUnicode_AsString(v)))
-        goto finally;
-    Py_DECREF(v);
+    }
+    else {
+        *text = _PyUnicode_AsString(v);
+        Py_DECREF(v);
+        if (!*text)
+            goto finally;
+    }
     return 1;
 
 finally:
-    Py_XDECREF(v);
+    Py_XDECREF(*message);
     return 0;
 }
 
diff -r 3d0686d90f55 Python/thread_pthread.h
--- a/Python/thread_pthread.h
+++ b/Python/thread_pthread.h
@@ -19,14 +19,18 @@
 #define THREAD_STACK_SIZE       0       /* use default stack size */
 #endif
 
-#if (defined(__APPLE__) || defined(__FreeBSD__)) && defined(THREAD_STACK_SIZE) && THREAD_STACK_SIZE == 0
-   /* The default stack size for new threads on OSX is small enough that
-    * we'll get hard crashes instead of 'maximum recursion depth exceeded'
-    * exceptions.
-    *
-    * The default stack size below is the minimal stack size where a
-    * simple recursive function doesn't cause a hard crash.
-    */
+/* The default stack size for new threads on OSX and BSD is small enough that
+ * we'll get hard crashes instead of 'maximum recursion depth exceeded'
+ * exceptions.
+ *
+ * The default stack sizes below are the empirically determined minimal stack
+ * sizes where a simple recursive function doesn't cause a hard crash.
+ */
+#if defined(__APPLE__) && defined(THREAD_STACK_SIZE) && THREAD_STACK_SIZE == 0
+#undef  THREAD_STACK_SIZE
+#define THREAD_STACK_SIZE       0x500000
+#endif
+#if defined(__FreeBSD__) && defined(THREAD_STACK_SIZE) && THREAD_STACK_SIZE == 0
 #undef  THREAD_STACK_SIZE
 #define THREAD_STACK_SIZE       0x400000
 #endif
diff -r 3d0686d90f55 Tools/buildbot/build-amd64.bat
--- a/Tools/buildbot/build-amd64.bat
+++ b/Tools/buildbot/build-amd64.bat
@@ -1,4 +1,5 @@
 @rem Used by the buildbot "compile" step.
+set HOST_PYTHON="%CD%\PCbuild\amd64\python_d.exe"
 cmd /c Tools\buildbot\external-amd64.bat
 call "%VS90COMNTOOLS%\..\..\VC\vcvarsall.bat" x86_amd64
 cmd /c Tools\buildbot\clean-amd64.bat
diff -r 3d0686d90f55 Tools/buildbot/clean.bat
--- a/Tools/buildbot/clean.bat
+++ b/Tools/buildbot/clean.bat
@@ -1,7 +1,5 @@
 @rem Used by the buildbot "clean" step.
 call "%VS90COMNTOOLS%vsvars32.bat"
-@echo Deleting .pyc/.pyo files ...
-del /s Lib\*.pyc Lib\*.pyo
 @echo Deleting test leftovers ...
 rmdir /s /q build
 cd PCbuild
diff -r 3d0686d90f55 Tools/buildbot/external-common.bat
--- a/Tools/buildbot/external-common.bat
+++ b/Tools/buildbot/external-common.bat
@@ -4,7 +4,7 @@
 cd ..
 @rem XXX: If you need to force the buildbots to start from a fresh environment, uncomment
 @rem the following, check it in, then check it out, comment it out, then check it back in.
-@rem if exist bzip2-1.0.5 rd /s/q bzip2-1.0.5
+@rem if exist bzip2-1.0.6 rd /s/q bzip2-1.0.6
 @rem if exist tcltk rd /s/q tcltk
 @rem if exist tcltk64 rd /s/q tcltk64
 @rem if exist tcl8.4.12 rd /s/q tcl8.4.12
@@ -14,20 +14,17 @@
 @rem if exist tk8.4.16 rd /s/q tk8.4.16
 @rem if exist tk-8.4.18.1 rd /s/q tk-8.4.18.1
 @rem if exist db-4.4.20 rd /s/q db-4.4.20
-@rem if exist openssl-1.0.0a rd /s/q openssl-1.0.0a
+@rem if exist openssl-1.0.0j rd /s/q openssl-1.0.0j
 @rem if exist sqlite-3.7.4 rd /s/q sqlite-3.7.4    
 
 @rem bzip
-if not exist bzip2-1.0.5 (
-   rd /s/q bzip2-1.0.3
-  svn export http://svn.python.org/projects/external/bzip2-1.0.5
+if not exist bzip2-1.0.6 (
+   rd /s/q bzip2-1.0.5
+  svn export http://svn.python.org/projects/external/bzip2-1.0.6
 )
 
-@rem Sleepycat db
-if not exist db-4.4.20 svn export http://svn.python.org/projects/external/db-4.4.20-vs9 db-4.4.20
-
 @rem OpenSSL
-if not exist openssl-1.0.0a svn export http://svn.python.org/projects/external/openssl-1.0.0a
+if not exist openssl-1.0.0j svn export http://svn.python.org/projects/external/openssl-1.0.0j
 
 @rem tcl/tk
 if not exist tcl-8.5.9.0 (
diff -r 3d0686d90f55 Tools/buildbot/test-amd64.bat
--- a/Tools/buildbot/test-amd64.bat
+++ b/Tools/buildbot/test-amd64.bat
@@ -1,3 +1,3 @@
 @rem Used by the buildbot "test" step.
 cd PCbuild
-call rt.bat -q -d -x64 -uall -rw
+call rt.bat -d -q -x64 -uall -rwW -n %1 %2 %3 %4 %5 %6 %7 %8 %9
diff -r 3d0686d90f55 Tools/buildbot/test.bat
--- a/Tools/buildbot/test.bat
+++ b/Tools/buildbot/test.bat
@@ -1,4 +1,3 @@
 @rem Used by the buildbot "test" step.
 cd PCbuild
-call rt.bat -d -q -uall -rwW -n
-
+call rt.bat -d -q -uall -rwW -n %1 %2 %3 %4 %5 %6 %7 %8 %9
diff -r 3d0686d90f55 Tools/demo/redemo.py
--- a/Tools/demo/redemo.py
+++ b/Tools/demo/redemo.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python3
+
 """Basic regular expression demostration facility (Perl style syntax)."""
 
 from tkinter import *
diff -r 3d0686d90f55 Tools/msi/msi.py
--- a/Tools/msi/msi.py
+++ b/Tools/msi/msi.py
@@ -1021,6 +1021,7 @@
             lib.add_file("check_soundcard.vbs")
             lib.add_file("empty.vbs")
             lib.add_file("Sine-1000Hz-300ms.aif")
+            lib.add_file("mime.types")
             lib.glob("*.uue")
             lib.glob("*.pem")
             lib.glob("*.pck")
diff -r 3d0686d90f55 Tools/parser/test_unparse.py
--- a/Tools/parser/test_unparse.py
+++ b/Tools/parser/test_unparse.py
@@ -209,6 +209,13 @@
     def test_try_except_finally(self):
         self.check_roundtrip(try_except_finally)
 
+    def test_starred_assignment(self):
+        self.check_roundtrip("a, *b, c = seq")
+        self.check_roundtrip("a, (*b, c) = seq")
+        self.check_roundtrip("a, *b[0], c = seq")
+        self.check_roundtrip("a, *(b, c) = seq")
+
+
 class DirectoryTestCase(ASTTestCase):
     """Test roundtrip behaviour on all files in Lib and Lib/test."""
 
diff -r 3d0686d90f55 Tools/parser/unparse.py
--- a/Tools/parser/unparse.py
+++ b/Tools/parser/unparse.py
@@ -472,6 +472,10 @@
         self.dispatch(t.slice)
         self.write("]")
 
+    def _Starred(self, t):
+        self.write("*")
+        self.dispatch(t.value)
+
     # slice
     def _Ellipsis(self, t):
         self.write("...")
diff -r 3d0686d90f55 Tools/scripts/abitype.py
--- a/Tools/scripts/abitype.py
+++ b/Tools/scripts/abitype.py
@@ -3,34 +3,6 @@
 # Usage: abitype.py < old_code > new_code
 import re, sys
 
-############ Simplistic C scanner ##################################
-tokenizer = re.compile(
-    r"(?P<preproc>#.*\n)"
-    r"|(?P<comment>/\*.*?\*/)"
-    r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
-    r"|(?P<ws>[ \t\n]+)"
-    r"|(?P<other>.)",
-    re.MULTILINE)
-
-tokens = []
-source = sys.stdin.read()
-pos = 0
-while pos != len(source):
-    m = tokenizer.match(source, pos)
-    tokens.append([m.lastgroup, m.group()])
-    pos += len(tokens[-1][1])
-    if tokens[-1][0] == 'preproc':
-        # continuation lines are considered
-        # only in preprocess statements
-        while tokens[-1][1].endswith('\\\n'):
-            nl = source.find('\n', pos)
-            if nl == -1:
-                line = source[pos:]
-            else:
-                line = source[pos:nl+1]
-            tokens[-1][1] += line
-            pos += len(line)
-
 ###### Replacement of PyTypeObject static instances ##############
 
 # classify each token, giving it a one-letter code:
@@ -79,7 +51,7 @@
     while tokens[pos][0] in ('ws', 'comment'):
         pos += 1
     if tokens[pos][1] != 'PyVarObject_HEAD_INIT':
-        raise Exception, '%s has no PyVarObject_HEAD_INIT' % name
+        raise Exception('%s has no PyVarObject_HEAD_INIT' % name)
     while tokens[pos][1] != ')':
         pos += 1
     pos += 1
@@ -183,18 +155,48 @@
     return '\n'.join(res)
 
 
-# Main loop: replace all static PyTypeObjects until
-# there are none left.
-while 1:
-    c = classify()
-    m = re.search('(SW)?TWIW?=W?{.*?};', c)
-    if not m:
-        break
-    start = m.start()
-    end = m.end()
-    name, fields = get_fields(start, m)
-    tokens[start:end] = [('',make_slots(name, fields))]
+if __name__ == '__main__':
 
-# Output result to stdout
-for t, v in tokens:
-    sys.stdout.write(v)
+    ############ Simplistic C scanner ##################################
+    tokenizer = re.compile(
+        r"(?P<preproc>#.*\n)"
+        r"|(?P<comment>/\*.*?\*/)"
+        r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
+        r"|(?P<ws>[ \t\n]+)"
+        r"|(?P<other>.)",
+        re.MULTILINE)
+
+    tokens = []
+    source = sys.stdin.read()
+    pos = 0
+    while pos != len(source):
+        m = tokenizer.match(source, pos)
+        tokens.append([m.lastgroup, m.group()])
+        pos += len(tokens[-1][1])
+        if tokens[-1][0] == 'preproc':
+            # continuation lines are considered
+            # only in preprocess statements
+            while tokens[-1][1].endswith('\\\n'):
+                nl = source.find('\n', pos)
+                if nl == -1:
+                    line = source[pos:]
+                else:
+                    line = source[pos:nl+1]
+                tokens[-1][1] += line
+                pos += len(line)
+
+    # Main loop: replace all static PyTypeObjects until
+    # there are none left.
+    while 1:
+        c = classify()
+        m = re.search('(SW)?TWIW?=W?{.*?};', c)
+        if not m:
+            break
+        start = m.start()
+        end = m.end()
+        name, fields = get_fields(start, m)
+        tokens[start:end] = [('',make_slots(name, fields))]
+
+    # Output result to stdout
+    for t, v in tokens:
+        sys.stdout.write(v)
diff -r 3d0686d90f55 Tools/scripts/find_recursionlimit.py
--- a/Tools/scripts/find_recursionlimit.py
+++ b/Tools/scripts/find_recursionlimit.py
@@ -106,14 +106,16 @@
     else:
         print("Yikes!")
 
-limit = 1000
-while 1:
-    check_limit(limit, "test_recurse")
-    check_limit(limit, "test_add")
-    check_limit(limit, "test_repr")
-    check_limit(limit, "test_init")
-    check_limit(limit, "test_getattr")
-    check_limit(limit, "test_getitem")
-    check_limit(limit, "test_cpickle")
-    print("Limit of %d is fine" % limit)
-    limit = limit + 100
+if __name__ == '__main__':
+
+    limit = 1000
+    while 1:
+        check_limit(limit, "test_recurse")
+        check_limit(limit, "test_add")
+        check_limit(limit, "test_repr")
+        check_limit(limit, "test_init")
+        check_limit(limit, "test_getattr")
+        check_limit(limit, "test_getitem")
+        check_limit(limit, "test_cpickle")
+        print("Limit of %d is fine" % limit)
+        limit = limit + 100
diff -r 3d0686d90f55 Tools/scripts/findnocoding.py
--- a/Tools/scripts/findnocoding.py
+++ b/Tools/scripts/findnocoding.py
@@ -76,29 +76,31 @@
     -c: recognize Python source files trying to compile them
     -d: debug output""" % sys.argv[0]
 
-try:
-    opts, args = getopt.getopt(sys.argv[1:], 'cd')
-except getopt.error as msg:
-    print(msg, file=sys.stderr)
-    print(usage, file=sys.stderr)
-    sys.exit(1)
+if __name__ == '__main__':
 
-is_python = pysource.looks_like_python
-debug = False
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'cd')
+    except getopt.error as msg:
+        print(msg, file=sys.stderr)
+        print(usage, file=sys.stderr)
+        sys.exit(1)
 
-for o, a in opts:
-    if o == '-c':
-        is_python = pysource.can_be_compiled
-    elif o == '-d':
-        debug = True
+    is_python = pysource.looks_like_python
+    debug = False
 
-if not args:
-    print(usage, file=sys.stderr)
-    sys.exit(1)
+    for o, a in opts:
+        if o == '-c':
+            is_python = pysource.can_be_compiled
+        elif o == '-d':
+            debug = True
 
-for fullpath in pysource.walk_python_files(args, is_python):
-    if debug:
-        print("Testing for coding: %s" % fullpath)
-    result = needs_declaration(fullpath)
-    if result:
-        print(fullpath)
+    if not args:
+        print(usage, file=sys.stderr)
+        sys.exit(1)
+
+    for fullpath in pysource.walk_python_files(args, is_python):
+        if debug:
+            print("Testing for coding: %s" % fullpath)
+        result = needs_declaration(fullpath)
+        if result:
+            print(fullpath)
diff -r 3d0686d90f55 Tools/scripts/fixcid.py
--- a/Tools/scripts/fixcid.py
+++ b/Tools/scripts/fixcid.py
@@ -292,7 +292,7 @@
         if not words: continue
         if len(words) == 3 and words[0] == 'struct':
             words[:2] = [words[0] + ' ' + words[1]]
-        elif len(words) <> 2:
+        elif len(words) != 2:
             err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line))
             continue
         if Reverse:
diff -r 3d0686d90f55 Tools/scripts/gprof2html.py
--- a/Tools/scripts/gprof2html.py
+++ b/Tools/scripts/gprof2html.py
@@ -19,17 +19,19 @@
 </html>
 """
 
-def add_escapes(input):
-    for line in input:
-        yield cgi.escape(line)
+def add_escapes(filename):
+    with open(filename) as fp:
+        for line in fp:
+            yield cgi.escape(line)
+
 
 def main():
     filename = "gprof.out"
     if sys.argv[1:]:
         filename = sys.argv[1]
     outputfilename = filename + ".html"
-    input = add_escapes(file(filename))
-    output = file(outputfilename, "w")
+    input = add_escapes(filename)
+    output = open(outputfilename, "w")
     output.write(header % filename)
     for line in input:
         output.write(line)
diff -r 3d0686d90f55 Tools/scripts/md5sum.py
--- a/Tools/scripts/md5sum.py
+++ b/Tools/scripts/md5sum.py
@@ -20,7 +20,7 @@
 import sys
 import os
 import getopt
-import md5
+from hashlib import md5
 
 def sum(*files):
     sts = 0
diff -r 3d0686d90f55 Tools/scripts/parseentities.py
--- a/Tools/scripts/parseentities.py
+++ b/Tools/scripts/parseentities.py
@@ -13,7 +13,6 @@
 
 """
 import re,sys
-import TextTools
 
 entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->')
 
@@ -45,7 +44,7 @@
                 charcode = repr(charcode)
         else:
             charcode = repr(charcode)
-        comment = TextTools.collapse(comment)
+        comment = ' '.join(comment.split())
         f.write("    '%s':\t%s,  \t# %s\n" % (name,charcode,comment))
     f.write('\n}\n')
 
diff -r 3d0686d90f55 Tools/scripts/pdeps.py
--- a/Tools/scripts/pdeps.py
+++ b/Tools/scripts/pdeps.py
@@ -76,10 +76,9 @@
             nextline = fp.readline()
             if not nextline: break
             line = line[:-1] + nextline
-        if m_import.match(line) >= 0:
-            (a, b), (a1, b1) = m_import.regs[:2]
-        elif m_from.match(line) >= 0:
-            (a, b), (a1, b1) = m_from.regs[:2]
+        m_found = m_import.match(line) or m_from.match(line)
+        if m_found:
+            (a, b), (a1, b1) = m_found.regs[:2]
         else: continue
         words = line[a1:b1].split(',')
         # print '#', line, words
@@ -87,6 +86,7 @@
             word = word.strip()
             if word not in list:
                 list.append(word)
+    fp.close()
 
 
 # Compute closure (this is in fact totally general)
@@ -123,7 +123,7 @@
 def inverse(table):
     inv = {}
     for key in table.keys():
-        if not inv.has_key(key):
+        if key not in inv:
             inv[key] = []
         for item in table[key]:
             store(inv, item, key)
diff -r 3d0686d90f55 configure.ac
--- /dev/null
+++ b/configure.ac
@@ -0,0 +1,4355 @@
+dnl ***********************************************
+dnl * Please run autoreconf to test your changes! *
+dnl ***********************************************
+
+# Set VERSION so we only need to edit in one place (i.e., here)
+m4_define(PYTHON_VERSION, 3.2)
+
+dnl Some m4 magic to ensure that the configure script is generated
+dnl by the correct autoconf version.
+m4_define([version_required],
+[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]), [$1]), 0,
+       [],
+       [m4_fatal([Autoconf version $1 is required for Python], 63)])
+])
+AC_PREREQ(2.65)
+
+AC_REVISION($Revision$)
+AC_INIT(python, PYTHON_VERSION, http://bugs.python.org/)
+AC_CONFIG_SRCDIR([Include/object.h])
+AC_CONFIG_HEADER(pyconfig.h)
+
+dnl Ensure that if prefix is specified, it does not end in a slash. If
+dnl it does, we get path names containing '//' which is both ugly and
+dnl can cause trouble.
+
+dnl Last slash shouldn't be stripped if prefix=/
+if test "$prefix" != "/"; then
+    prefix=`echo "$prefix" | sed -e 's/\/$//g'`
+fi    
+
+dnl This is for stuff that absolutely must end up in pyconfig.h.
+dnl Please use pyport.h instead, if possible.
+AH_TOP([
+#ifndef Py_PYCONFIG_H
+#define Py_PYCONFIG_H
+])
+AH_BOTTOM([
+/* Define the macros needed if on a UnixWare 7.x system. */
+#if defined(__USLC__) && defined(__SCO_VERSION__)
+#define STRICT_SYSV_CURSES /* Don't use ncurses extensions */
+#endif
+
+#endif /*Py_PYCONFIG_H*/
+])
+
+# We don't use PACKAGE_ variables, and they cause conflicts
+# with other autoconf-based packages that include Python.h
+grep -v 'define PACKAGE_' <confdefs.h >confdefs.h.new
+rm confdefs.h
+mv confdefs.h.new confdefs.h
+
+AC_SUBST(VERSION)
+VERSION=PYTHON_VERSION
+
+# Version number of Python's own shared library file.
+AC_SUBST(SOVERSION)
+SOVERSION=1.0
+
+# The later defininition of _XOPEN_SOURCE disables certain features
+# on Linux, so we need _GNU_SOURCE to re-enable them (makedev, tm_zone).
+AC_DEFINE(_GNU_SOURCE, 1, [Define on Linux to activate all library features])
+
+# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
+# certain features on NetBSD, so we need _NETBSD_SOURCE to re-enable
+# them.
+AC_DEFINE(_NETBSD_SOURCE, 1, [Define on NetBSD to activate all library features])
+
+# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
+# certain features on FreeBSD, so we need __BSD_VISIBLE to re-enable
+# them.
+AC_DEFINE(__BSD_VISIBLE, 1, [Define on FreeBSD to activate all library features])
+
+# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
+# u_int on Irix 5.3. Defining _BSD_TYPES brings it back.
+AC_DEFINE(_BSD_TYPES, 1, [Define on Irix to enable u_int])
+
+# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
+# certain features on Mac OS X, so we need _DARWIN_C_SOURCE to re-enable
+# them.
+AC_DEFINE(_DARWIN_C_SOURCE, 1, [Define on Darwin to activate all library features])
+
+
+define_xopen_source=yes
+
+# Arguments passed to configure.
+AC_SUBST(CONFIG_ARGS)
+CONFIG_ARGS="$ac_configure_args"
+
+AC_MSG_CHECKING([for --enable-universalsdk])
+AC_ARG_ENABLE(universalsdk,
+	AS_HELP_STRING([--enable-universalsdk@<:@=SDKDIR@:>@], [Build against Mac OS X 10.4u SDK (ppc/i386)]),
+[
+	case $enableval in
+	yes)
+		enableval=/Developer/SDKs/MacOSX10.4u.sdk
+		if test ! -d "${enableval}"
+		then
+			enableval=/
+		fi
+		;;
+	esac
+	case $enableval in
+	no)
+		UNIVERSALSDK=
+		enable_universalsdk=
+		;;
+	*)
+		UNIVERSALSDK=$enableval
+		if test ! -d "${UNIVERSALSDK}"
+		then
+			AC_MSG_ERROR([--enable-universalsdk specifies non-existing SDK: ${UNIVERSALSDK}])
+		fi
+		;;
+	esac
+	
+],[
+   	UNIVERSALSDK=
+	enable_universalsdk=
+])
+if test -n "${UNIVERSALSDK}"
+then
+	AC_MSG_RESULT(${UNIVERSALSDK})
+else
+	AC_MSG_RESULT(no)
+fi
+AC_SUBST(UNIVERSALSDK)
+
+AC_SUBST(ARCH_RUN_32BIT)
+
+UNIVERSAL_ARCHS="32-bit"
+AC_SUBST(LIPO_32BIT_FLAGS)
+AC_MSG_CHECKING(for --with-universal-archs)
+AC_ARG_WITH(universal-archs,
+    AS_HELP_STRING([--with-universal-archs=ARCH], [select architectures for universal build ("32-bit", "64-bit", "3-way", "intel" or "all")]),
+[
+	AC_MSG_RESULT($withval)
+	UNIVERSAL_ARCHS="$withval"
+],
+[
+ 	AC_MSG_RESULT(32-bit)
+])
+
+
+
+AC_ARG_WITH(framework-name,
+              AS_HELP_STRING([--with-framework-name=FRAMEWORK],
+                             [specify an alternate name of the framework built with --enable-framework]),
+[
+    PYTHONFRAMEWORK=${withval}
+    PYTHONFRAMEWORKDIR=${withval}.framework
+    PYTHONFRAMEWORKIDENTIFIER=org.python.`echo $withval | tr '[A-Z]' '[a-z]'`
+    ],[
+    PYTHONFRAMEWORK=Python
+    PYTHONFRAMEWORKDIR=Python.framework
+    PYTHONFRAMEWORKIDENTIFIER=org.python.python
+])
+dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
+AC_ARG_ENABLE(framework,
+              AS_HELP_STRING([--enable-framework@<:@=INSTALLDIR@:>@], [Build (MacOSX|Darwin) framework]),
+[
+	case $enableval in
+	yes) 
+		enableval=/Library/Frameworks
+	esac
+	case $enableval in
+	no)
+		PYTHONFRAMEWORK=
+		PYTHONFRAMEWORKDIR=no-framework
+		PYTHONFRAMEWORKPREFIX=
+		PYTHONFRAMEWORKINSTALLDIR=
+		FRAMEWORKINSTALLFIRST=
+		FRAMEWORKINSTALLLAST=
+		FRAMEWORKALTINSTALLFIRST=
+		FRAMEWORKALTINSTALLLAST=
+		if test "x${prefix}" = "xNONE"; then
+			FRAMEWORKUNIXTOOLSPREFIX="${ac_default_prefix}"
+		else
+			FRAMEWORKUNIXTOOLSPREFIX="${prefix}"
+		fi
+		enable_framework=
+		;;
+	*)
+		PYTHONFRAMEWORKPREFIX="${enableval}"
+		PYTHONFRAMEWORKINSTALLDIR=$PYTHONFRAMEWORKPREFIX/$PYTHONFRAMEWORKDIR
+		FRAMEWORKINSTALLFIRST="frameworkinstallstructure"
+		FRAMEWORKALTINSTALLFIRST="frameworkinstallstructure "
+		FRAMEWORKINSTALLLAST="frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools"
+		FRAMEWORKALTINSTALLLAST="frameworkinstallmaclib frameworkinstallapps frameworkaltinstallunixtools"
+		FRAMEWORKINSTALLAPPSPREFIX="/Applications"
+
+		if test "x${prefix}" = "xNONE" ; then
+			FRAMEWORKUNIXTOOLSPREFIX="${ac_default_prefix}"
+
+		else
+			FRAMEWORKUNIXTOOLSPREFIX="${prefix}"
+		fi
+
+		case "${enableval}" in
+		/System*)
+			FRAMEWORKINSTALLAPPSPREFIX="/Applications"
+			if test "${prefix}" = "NONE" ; then
+				# See below
+				FRAMEWORKUNIXTOOLSPREFIX="/usr"
+			fi
+			;;
+
+		/Library*)
+			FRAMEWORKINSTALLAPPSPREFIX="/Applications"
+			;;
+
+		*/Library/Frameworks)
+			MDIR="`dirname "${enableval}"`"
+			MDIR="`dirname "${MDIR}"`"
+			FRAMEWORKINSTALLAPPSPREFIX="${MDIR}/Applications"
+
+			if test "${prefix}" = "NONE"; then
+				# User hasn't specified the 
+				# --prefix option, but wants to install
+				# the framework in a non-default location,
+				# ensure that the compatibility links get
+				# installed relative to that prefix as well
+				# instead of in /usr/local.
+				FRAMEWORKUNIXTOOLSPREFIX="${MDIR}"
+			fi
+			;;
+
+		*)
+			FRAMEWORKINSTALLAPPSPREFIX="/Applications"
+			;;
+		esac
+
+		prefix=$PYTHONFRAMEWORKINSTALLDIR/Versions/$VERSION
+
+		# Add files for Mac specific code to the list of output
+		# files:
+		AC_CONFIG_FILES(Mac/Makefile)
+		AC_CONFIG_FILES(Mac/PythonLauncher/Makefile)
+		AC_CONFIG_FILES(Mac/Resources/framework/Info.plist)
+		AC_CONFIG_FILES(Mac/Resources/app/Info.plist)
+	esac
+	],[
+	PYTHONFRAMEWORK=
+	PYTHONFRAMEWORKDIR=no-framework
+	PYTHONFRAMEWORKPREFIX=
+	PYTHONFRAMEWORKINSTALLDIR=
+	FRAMEWORKINSTALLFIRST=
+	FRAMEWORKINSTALLLAST=
+	FRAMEWORKALTINSTALLFIRST=
+	FRAMEWORKALTINSTALLLAST=
+	if test "x${prefix}" = "xNONE" ; then
+		FRAMEWORKUNIXTOOLSPREFIX="${ac_default_prefix}"
+	else
+		FRAMEWORKUNIXTOOLSPREFIX="${prefix}"
+	fi
+	enable_framework=
+
+])
+AC_SUBST(PYTHONFRAMEWORK)
+AC_SUBST(PYTHONFRAMEWORKIDENTIFIER)
+AC_SUBST(PYTHONFRAMEWORKDIR)
+AC_SUBST(PYTHONFRAMEWORKPREFIX)
+AC_SUBST(PYTHONFRAMEWORKINSTALLDIR)
+AC_SUBST(FRAMEWORKINSTALLFIRST)
+AC_SUBST(FRAMEWORKINSTALLLAST)
+AC_SUBST(FRAMEWORKALTINSTALLFIRST)
+AC_SUBST(FRAMEWORKALTINSTALLLAST)
+AC_SUBST(FRAMEWORKUNIXTOOLSPREFIX)
+AC_SUBST(FRAMEWORKINSTALLAPPSPREFIX)
+
+##AC_ARG_WITH(dyld,
+##            AS_HELP_STRING([--with-dyld],
+##                           [Use (OpenStep|Rhapsody) dynamic linker]))
+##
+# Set name for machine-dependent library files
+AC_SUBST(MACHDEP)
+AC_MSG_CHECKING(MACHDEP)
+if test -z "$MACHDEP"
+then
+	ac_sys_system=`uname -s`
+	if test "$ac_sys_system" = "AIX" \
+	-o "$ac_sys_system" = "UnixWare" -o "$ac_sys_system" = "OpenUNIX"; then
+		ac_sys_release=`uname -v`
+	else
+		ac_sys_release=`uname -r`
+	fi
+	ac_md_system=`echo $ac_sys_system |
+			   tr -d '[/ ]' | tr '[[A-Z]]' '[[a-z]]'`
+	ac_md_release=`echo $ac_sys_release |
+			   tr -d '[/ ]' | sed 's/^[[A-Z]]\.//' | sed 's/\..*//'`
+	MACHDEP="$ac_md_system$ac_md_release"
+
+	case $MACHDEP in
+	linux*) MACHDEP="linux2";;
+	cygwin*) MACHDEP="cygwin";;
+	darwin*) MACHDEP="darwin";;
+        irix646) MACHDEP="irix6";;
+	'')	MACHDEP="unknown";;
+	esac
+fi
+	
+# Some systems cannot stand _XOPEN_SOURCE being defined at all; they
+# disable features if it is defined, without any means to access these
+# features as extensions. For these systems, we skip the definition of
+# _XOPEN_SOURCE. Before adding a system to the list to gain access to
+# some feature, make sure there is no alternative way to access this
+# feature. Also, when using wildcards, make sure you have verified the
+# need for not defining _XOPEN_SOURCE on all systems matching the
+# wildcard, and that the wildcard does not include future systems
+# (which may remove their limitations).
+dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
+case $ac_sys_system/$ac_sys_release in
+  # On OpenBSD, select(2) is not available if _XOPEN_SOURCE is defined,
+  # even though select is a POSIX function. Reported by J. Ribbens.
+  # Reconfirmed for OpenBSD 3.3 by Zachary Hamm, for 3.4 by Jason Ish.
+  # In addition, Stefan Krah confirms that issue #1244610 exists through
+  # OpenBSD 4.6, but is fixed in 4.7.
+  OpenBSD/2.* | OpenBSD/3.* | OpenBSD/4.@<:@0123456@:>@) 
+    define_xopen_source=no
+    # OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is
+    # also defined. This can be overridden by defining _BSD_SOURCE
+    # As this has a different meaning on Linux, only define it on OpenBSD
+    AC_DEFINE(_BSD_SOURCE, 1, [Define on OpenBSD to activate all library features])
+    ;;
+  OpenBSD/*)
+    # OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is
+    # also defined. This can be overridden by defining _BSD_SOURCE
+    # As this has a different meaning on Linux, only define it on OpenBSD
+    AC_DEFINE(_BSD_SOURCE, 1, [Define on OpenBSD to activate all library features])
+    ;;
+  # Defining _XOPEN_SOURCE on NetBSD version prior to the introduction of
+  # _NETBSD_SOURCE disables certain features (eg. setgroups). Reported by
+  # Marc Recht
+  NetBSD/1.5 | NetBSD/1.5.* | NetBSD/1.6 | NetBSD/1.6.* | NetBSD/1.6@<:@A-S@:>@)
+    define_xopen_source=no;;
+  # From the perspective of Solaris, _XOPEN_SOURCE is not so much a
+  # request to enable features supported by the standard as a request
+  # to disable features not supported by the standard.  The best way
+  # for Python to use Solaris is simply to leave _XOPEN_SOURCE out
+  # entirely and define __EXTENSIONS__ instead.
+  SunOS/*)
+    define_xopen_source=no;;
+  # On UnixWare 7, u_long is never defined with _XOPEN_SOURCE,
+  # but used in /usr/include/netinet/tcp.h. Reported by Tim Rice.
+  # Reconfirmed for 7.1.4 by Martin v. Loewis.
+  OpenUNIX/8.0.0| UnixWare/7.1.@<:@0-4@:>@)
+    define_xopen_source=no;;
+  # On OpenServer 5, u_short is never defined with _XOPEN_SOURCE,
+  # but used in struct sockaddr.sa_family. Reported by Tim Rice.
+  SCO_SV/3.2)
+    define_xopen_source=no;;
+  # On FreeBSD 4, the math functions C89 does not cover are never defined
+  # with _XOPEN_SOURCE and __BSD_VISIBLE does not re-enable them.
+  FreeBSD/4.*)
+    define_xopen_source=no;;
+  # On MacOS X 10.2, a bug in ncurses.h means that it craps out if 
+  # _XOPEN_EXTENDED_SOURCE is defined. Apparently, this is fixed in 10.3, which
+  # identifies itself as Darwin/7.*
+  # On Mac OS X 10.4, defining _POSIX_C_SOURCE or _XOPEN_SOURCE
+  # disables platform specific features beyond repair.
+  # On Mac OS X 10.3, defining _POSIX_C_SOURCE or _XOPEN_SOURCE 
+  # has no effect, don't bother defining them
+  Darwin/@<:@6789@:>@.*)
+    define_xopen_source=no;;
+  Darwin/1@<:@0-9@:>@.*)
+    define_xopen_source=no;;
+  # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
+  # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
+  # or has another value. By not (re)defining it, the defaults come in place.
+  AIX/4)
+    define_xopen_source=no;;
+  AIX/5)
+    if test `uname -r` -eq 1; then
+      define_xopen_source=no
+    fi
+    ;;
+  # On QNX 6.3.2, defining _XOPEN_SOURCE prevents netdb.h from
+  # defining NI_NUMERICHOST.
+  QNX/6.3.2)
+    define_xopen_source=no
+    ;;
+
+esac
+
+if test $define_xopen_source = yes
+then
+  AC_DEFINE(_XOPEN_SOURCE, 600, 
+            Define to the level of X/Open that your system supports)
+
+  # On Tru64 Unix 4.0F, defining _XOPEN_SOURCE also requires
+  # definition of _XOPEN_SOURCE_EXTENDED and _POSIX_C_SOURCE, or else
+  # several APIs are not declared. Since this is also needed in some
+  # cases for HP-UX, we define it globally.
+  AC_DEFINE(_XOPEN_SOURCE_EXTENDED, 1,
+   	    Define to activate Unix95-and-earlier features)
+
+  AC_DEFINE(_POSIX_C_SOURCE, 200112L, Define to activate features from IEEE Stds 1003.1-2001)
+  
+fi
+
+#
+# SGI compilers allow the specification of the both the ABI and the
+# ISA on the command line.  Depending on the values of these switches,
+# different and often incompatable code will be generated.
+#
+# The SGI_ABI variable can be used to modify the CC and LDFLAGS and
+# thus supply support for various ABI/ISA combinations.  The MACHDEP
+# variable is also adjusted.
+#
+AC_SUBST(SGI_ABI)
+if test ! -z "$SGI_ABI"
+then
+        CC="cc $SGI_ABI"
+        LDFLAGS="$SGI_ABI $LDFLAGS"
+        MACHDEP=`echo "${MACHDEP}${SGI_ABI}" | sed 's/ *//g'`
+fi
+AC_MSG_RESULT($MACHDEP)
+
+# Record the configure-time value of MACOSX_DEPLOYMENT_TARGET,
+# it may influence the way we can build extensions, so distutils
+# needs to check it
+AC_SUBST(CONFIGURE_MACOSX_DEPLOYMENT_TARGET)
+AC_SUBST(EXPORT_MACOSX_DEPLOYMENT_TARGET)
+CONFIGURE_MACOSX_DEPLOYMENT_TARGET=
+EXPORT_MACOSX_DEPLOYMENT_TARGET='#'
+
+AC_MSG_CHECKING(machine type as reported by uname -m)
+ac_sys_machine=`uname -m`
+AC_MSG_RESULT($ac_sys_machine)
+
+# checks for alternative programs
+
+# compiler flags are generated in two sets, BASECFLAGS and OPT.  OPT is just
+# for debug/optimization stuff.  BASECFLAGS is for flags that are required
+# just to get things to compile and link.  Users are free to override OPT
+# when running configure or make.  The build should not break if they do.
+# BASECFLAGS should generally not be messed with, however.
+
+# XXX shouldn't some/most/all of this code be merged with the stuff later
+# on that fiddles with OPT and BASECFLAGS?
+AC_MSG_CHECKING(for --without-gcc)
+AC_ARG_WITH(gcc,
+            AS_HELP_STRING([--without-gcc], [never use gcc]),
+[
+	case $withval in
+	no)	CC=${CC:-cc}
+		without_gcc=yes;;
+	yes)	CC=gcc
+		without_gcc=no;;
+	*)	CC=$withval
+		without_gcc=$withval;;
+	esac], [
+	case $ac_sys_system in
+	AIX*)   CC=${CC:-xlc_r}
+		without_gcc=;;
+	*)	without_gcc=no;;
+	esac])
+AC_MSG_RESULT($without_gcc)
+
+# If the user switches compilers, we can't believe the cache
+if test ! -z "$ac_cv_prog_CC" -a ! -z "$CC" -a "$CC" != "$ac_cv_prog_CC"
+then
+  AC_MSG_ERROR([cached CC is different -- throw away $cache_file
+(it is also a good idea to do 'make clean' before compiling)])
+fi
+
+# Don't let AC_PROG_CC set the default CFLAGS. It normally sets -g -O2
+# when the compiler supports them, but we don't always want -O2, and
+# we set -g later.
+if test -z "$CFLAGS"; then
+        CFLAGS=
+fi
+AC_PROG_CC
+
+AC_SUBST(CXX)
+AC_SUBST(MAINCC)
+AC_MSG_CHECKING(for --with-cxx-main=<compiler>)
+AC_ARG_WITH(cxx_main,
+            AS_HELP_STRING([--with-cxx-main=<compiler>],
+                           [compile main() and link python executable with C++ compiler]),
+[
+	
+	case $withval in
+	no)	with_cxx_main=no
+		MAINCC='$(CC)';;
+	yes)	with_cxx_main=yes
+		MAINCC='$(CXX)';;
+	*)	with_cxx_main=yes
+		MAINCC=$withval
+		if test -z "$CXX"
+		then
+			CXX=$withval
+		fi;;
+	esac], [
+	with_cxx_main=no
+	MAINCC='$(CC)'
+])
+AC_MSG_RESULT($with_cxx_main)
+
+preset_cxx="$CXX"
+if test -z "$CXX"
+then
+        case "$CC" in
+        gcc)    AC_PATH_PROG(CXX, [g++], [g++], [notfound]) ;;
+        cc)     AC_PATH_PROG(CXX, [c++], [c++], [notfound]) ;;
+        esac
+	if test "$CXX" = "notfound"
+	then
+		CXX=""
+	fi
+fi
+if test -z "$CXX"
+then
+	AC_CHECK_PROGS(CXX, $CCC c++ g++ gcc CC cxx cc++ cl, notfound)
+	if test "$CXX" = "notfound"
+	then
+		CXX=""
+	fi
+fi
+if test "$preset_cxx" != "$CXX"
+then
+        AC_MSG_WARN([
+
+  By default, distutils will build C++ extension modules with "$CXX".
+  If this is not intended, then set CXX on the configure command line.
+  ])
+fi
+
+
+AC_MSG_CHECKING([for -Wl,--no-as-needed])
+save_LDFLAGS="$LDFLAGS"
+LDFLAGS="$LDFLAGS -Wl,--no-as-needed"
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
+  [NO_AS_NEEDED="-Wl,--no-as-needed"
+   AC_MSG_RESULT([yes])],
+  [NO_AS_NEEDED=""
+   AC_MSG_RESULT([no])])
+LDFLAGS="$save_LDFLAGS"
+AC_SUBST(NO_AS_NEEDED)
+
+
+# checks for UNIX variants that set C preprocessor variables
+AC_USE_SYSTEM_EXTENSIONS
+
+# Check for unsupported systems
+case $ac_sys_system/$ac_sys_release in
+atheos*|Linux*/1*)
+   echo This system \($ac_sys_system/$ac_sys_release\) is no longer supported.
+   echo See README for details.
+   exit 1;;
+esac
+
+AC_EXEEXT
+AC_MSG_CHECKING(for --with-suffix)
+AC_ARG_WITH(suffix,
+            AS_HELP_STRING([--with-suffix=.exe], [set executable suffix]),
+[
+	case $withval in
+	no)	EXEEXT=;;
+	yes)	EXEEXT=.exe;;
+	*)	EXEEXT=$withval;;
+	esac])
+AC_MSG_RESULT($EXEEXT)
+
+# Test whether we're running on a non-case-sensitive system, in which
+# case we give a warning if no ext is given
+AC_SUBST(BUILDEXEEXT)
+AC_MSG_CHECKING(for case-insensitive build directory)
+if test ! -d CaseSensitiveTestDir; then
+mkdir CaseSensitiveTestDir
+fi
+
+if test -d casesensitivetestdir
+then
+    AC_MSG_RESULT(yes)
+    BUILDEXEEXT=.exe
+else
+	AC_MSG_RESULT(no)
+	BUILDEXEEXT=$EXEEXT
+fi
+rmdir CaseSensitiveTestDir
+
+case $MACHDEP in
+bsdos*)
+    case $CC in
+    gcc) CC="$CC -D_HAVE_BSDI";;
+    esac;;
+esac
+
+case $ac_sys_system in
+hp*|HP*)
+    case $CC in
+    cc|*/cc) CC="$CC -Ae";;
+    esac;;
+esac
+
+
+AC_SUBST(LIBRARY)
+AC_MSG_CHECKING(LIBRARY)
+if test -z "$LIBRARY"
+then
+	LIBRARY='libpython$(VERSION)$(ABIFLAGS).a'
+fi
+AC_MSG_RESULT($LIBRARY)
+
+# LDLIBRARY is the name of the library to link against (as opposed to the
+# name of the library into which to insert object files). BLDLIBRARY is also
+# the library to link against, usually. On Mac OS X frameworks, BLDLIBRARY
+# is blank as the main program is not linked directly against LDLIBRARY.
+# LDLIBRARYDIR is the path to LDLIBRARY, which is made in a subdirectory. On
+# systems without shared libraries, LDLIBRARY is the same as LIBRARY
+# (defined in the Makefiles). On Cygwin LDLIBRARY is the import library,
+# DLLLIBRARY is the shared (i.e., DLL) library.
+# 
+# RUNSHARED is used to run shared python without installed libraries
+#
+# INSTSONAME is the name of the shared library that will be use to install
+# on the system - some systems like version suffix, others don't
+#
+# LDVERSION is the shared library version number, normally the Python version
+# with the ABI build flags appended.
+AC_SUBST(LDLIBRARY)
+AC_SUBST(DLLLIBRARY)
+AC_SUBST(BLDLIBRARY)
+AC_SUBST(PY3LIBRARY)
+AC_SUBST(LDLIBRARYDIR)
+AC_SUBST(INSTSONAME)
+AC_SUBST(RUNSHARED)
+AC_SUBST(LDVERSION)
+LDLIBRARY="$LIBRARY"
+BLDLIBRARY='$(LDLIBRARY)'
+INSTSONAME='$(LDLIBRARY)'
+DLLLIBRARY=''
+LDLIBRARYDIR=''
+RUNSHARED=''
+LDVERSION="$VERSION"
+
+# LINKCC is the command that links the python executable -- default is $(CC).
+# If CXX is set, and if it is needed to link a main function that was
+# compiled with CXX, LINKCC is CXX instead. Always using CXX is undesirable:
+# python might then depend on the C++ runtime
+# This is altered for AIX in order to build the export list before 
+# linking.
+AC_SUBST(LINKCC)
+AC_MSG_CHECKING(LINKCC)
+if test -z "$LINKCC"
+then
+	LINKCC='$(PURIFY) $(MAINCC)'
+	case $ac_sys_system in
+	AIX*)
+	   exp_extra="\"\""
+	   if test $ac_sys_release -ge 5 -o \
+		   $ac_sys_release -eq 4 -a `uname -r` -ge 2 ; then
+	       exp_extra="."
+	   fi
+	   LINKCC="\$(srcdir)/Modules/makexp_aix Modules/python.exp $exp_extra \$(LIBRARY); $LINKCC";;
+	QNX*)
+	   # qcc must be used because the other compilers do not
+	   # support -N.
+	   LINKCC=qcc;;
+	esac
+fi
+AC_MSG_RESULT($LINKCC)
+
+# GNULD is set to "yes" if the GNU linker is used.  If this goes wrong
+# make sure we default having it set to "no": this is used by
+# distutils.unixccompiler to know if it should add --enable-new-dtags
+# to linker command lines, and failing to detect GNU ld simply results
+# in the same bahaviour as before.
+AC_SUBST(GNULD)
+AC_MSG_CHECKING(for GNU ld)
+ac_prog=ld
+if test "$GCC" = yes; then
+       ac_prog=`$CC -print-prog-name=ld`
+fi
+case `"$ac_prog" -V 2>&1 < /dev/null` in
+      *GNU*)
+          GNULD=yes;;
+      *)
+          GNULD=no;;
+esac
+AC_MSG_RESULT($GNULD)
+
+AC_C_INLINE
+if test "$ac_cv_c_inline" != no ; then
+        AC_DEFINE(USE_INLINE, 1, [Define to use the C99 inline keyword.])
+        AC_SUBST(USE_INLINE)
+fi
+
+
+AC_MSG_CHECKING(for --enable-shared)
+AC_ARG_ENABLE(shared,
+              AS_HELP_STRING([--enable-shared], [disable/enable building shared python library]))
+
+if test -z "$enable_shared"
+then 
+  case $ac_sys_system in
+  CYGWIN*)
+    enable_shared="yes";;
+  *)
+    enable_shared="no";;
+  esac
+fi
+AC_MSG_RESULT($enable_shared)
+
+AC_MSG_CHECKING(for --enable-profiling)
+AC_ARG_ENABLE(profiling,
+              AS_HELP_STRING([--enable-profiling], [enable C-level code profiling]),
+[ac_save_cc="$CC"
+ CC="$CC -pg"
+ AC_RUN_IFELSE([AC_LANG_SOURCE([[int main() { return 0; }]])],
+   [ac_enable_profiling="yes"],
+   [ac_enable_profiling="no"],
+   [ac_enable_profiling="no"])
+ CC="$ac_save_cc"])
+AC_MSG_RESULT($ac_enable_profiling)
+
+case "$ac_enable_profiling" in
+    "yes")
+	BASECFLAGS="-pg $BASECFLAGS"
+	LDFLAGS="-pg $LDFLAGS"
+    ;;
+esac
+
+AC_MSG_CHECKING(LDLIBRARY)
+
+# MacOSX framework builds need more magic. LDLIBRARY is the dynamic
+# library that we build, but we do not want to link against it (we
+# will find it with a -framework option). For this reason there is an
+# extra variable BLDLIBRARY against which Python and the extension
+# modules are linked, BLDLIBRARY. This is normally the same as
+# LDLIBRARY, but empty for MacOSX framework builds.
+if test "$enable_framework"
+then
+  LDLIBRARY='$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+  RUNSHARED=DYLD_FRAMEWORK_PATH="`pwd`:$DYLD_FRAMEWORK_PATH"
+  BLDLIBRARY=''
+else
+  BLDLIBRARY='$(LDLIBRARY)'
+fi  
+
+# Other platforms follow
+if test $enable_shared = "yes"; then
+  AC_DEFINE(Py_ENABLE_SHARED, 1, [Defined if Python is built as a shared library.])
+  case $ac_sys_system in
+    CYGWIN*)
+          LDLIBRARY='libpython$(LDVERSION).dll.a'
+          DLLLIBRARY='libpython$(LDVERSION).dll'
+          ;;
+    SunOS*)
+	  LDLIBRARY='libpython$(LDVERSION).so'
+	  BLDLIBRARY='-Wl,-R,$(LIBDIR) -L. -lpython$(LDVERSION)'
+	  RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH}
+	  INSTSONAME="$LDLIBRARY".$SOVERSION
+	  if test "$with_pydebug" != yes
+	  then
+	      PY3LIBRARY=libpython3.so
+	  fi
+          ;;
+    Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
+	  LDLIBRARY='libpython$(LDVERSION).so'
+	  BLDLIBRARY='-L. -lpython$(LDVERSION)'
+	  RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH}
+	  case $ac_sys_system in
+	      FreeBSD*)
+		SOVERSION=`echo $SOVERSION|cut -d "." -f 1`
+		;;
+	  esac
+	  INSTSONAME="$LDLIBRARY".$SOVERSION
+	  if test "$with_pydebug" != yes
+          then
+	      PY3LIBRARY=libpython3.so
+	  fi
+	  ;;
+    hp*|HP*)
+	  case `uname -m` in
+		ia64)
+			LDLIBRARY='libpython$(LDVERSION).so'
+			;;
+		*)
+			LDLIBRARY='libpython$(LDVERSION).sl'
+			;;
+	  esac
+	  BLDLIBRARY='-Wl,+b,$(LIBDIR) -L. -lpython$(LDVERSION)'
+	  RUNSHARED=SHLIB_PATH=`pwd`:${SHLIB_PATH}
+	  ;;
+    OSF*)
+	  LDLIBRARY='libpython$(LDVERSION).so'
+	  BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(LDVERSION)'
+	  RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH}
+	  ;;
+    Darwin*)
+    	LDLIBRARY='libpython$(LDVERSION).dylib'
+	BLDLIBRARY='-L. -lpython$(LDVERSION)'
+	RUNSHARED='DYLD_LIBRARY_PATH=`pwd`:${DYLD_LIBRARY_PATH}'
+	;;
+    AIX*)
+	LDLIBRARY='libpython$(LDVERSION).so'
+	RUNSHARED=LIBPATH=`pwd`:${LIBPATH}
+	;;
+
+  esac
+else # shared is disabled
+  case $ac_sys_system in
+    CYGWIN*)
+          BLDLIBRARY='$(LIBRARY)'
+          LDLIBRARY='libpython$(LDVERSION).dll.a'
+          ;;
+  esac
+fi
+
+AC_MSG_RESULT($LDLIBRARY)
+
+AC_PROG_RANLIB
+AC_SUBST(AR)
+AC_CHECK_PROGS(AR, ar aal, ar)
+
+# tweak ARFLAGS only if the user didn't set it on the command line
+AC_SUBST(ARFLAGS)
+if test -z "$ARFLAGS"
+then
+        ARFLAGS="rc"
+fi
+
+AC_SUBST(SVNVERSION)
+AC_CHECK_PROG(SVNVERSION, svnversion, found, not-found)
+if test $SVNVERSION = found
+then
+	SVNVERSION="svnversion \$(srcdir)"
+else
+	SVNVERSION="echo Unversioned directory"
+fi
+
+AC_SUBST(HGVERSION)
+AC_SUBST(HGTAG)
+AC_SUBST(HGBRANCH)
+
+if test -e $srcdir/.hg/dirstate
+then
+AC_CHECK_PROG(HAS_HG, hg, found, not-found)
+else
+HAS_HG=no-repository
+fi
+if test $HAS_HG = found
+then
+    HGVERSION="hg id -i \$(srcdir)"
+    HGTAG="hg id -t \$(srcdir)"
+    HGBRANCH="hg id -b \$(srcdir)"
+else
+    HGVERSION=""
+    HGTAG=""
+    HGBRANCH=""
+fi
+
+AC_SUBST(DISABLE_ASDLGEN)
+DISABLE_ASDLGEN=""
+AC_CHECK_PROG(HAS_PYTHON, python, found, not-found)
+if test $HAS_HG != found -o $HAS_PYTHON != found
+then
+    DISABLE_ASDLGEN="@echo hg: $HAS_HG, python: $HAS_PYTHON! cannot run \$(srcdir)/Parser/asdl_c.py #"
+fi
+
+
+case $MACHDEP in
+bsdos*|hp*|HP*)
+	# install -d does not work on BSDI or HP-UX
+	if test -z "$INSTALL"
+	then
+		INSTALL="${srcdir}/install-sh -c"
+	fi
+esac
+AC_PROG_INSTALL
+
+# Not every filesystem supports hard links
+AC_SUBST(LN)
+if test -z "$LN" ; then
+	case $ac_sys_system in
+		CYGWIN*) LN="ln -s";;
+		*) LN=ln;;
+	esac
+fi
+
+# For calculating the .so ABI tag.
+AC_SUBST(ABIFLAGS)
+ABIFLAGS=""
+
+# Check for --with-pydebug
+AC_MSG_CHECKING(for --with-pydebug)
+AC_ARG_WITH(pydebug, 
+            AS_HELP_STRING([--with-pydebug], [build with Py_DEBUG defined]),
+[
+if test "$withval" != no
+then 
+  AC_DEFINE(Py_DEBUG, 1, 
+  [Define if you want to build an interpreter with many run-time checks.]) 
+  AC_MSG_RESULT(yes); 
+  Py_DEBUG='true'
+  ABIFLAGS="${ABIFLAGS}d"
+else AC_MSG_RESULT(no); Py_DEBUG='false'
+fi],
+[AC_MSG_RESULT(no)])
+
+# XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be
+# merged with this chunk of code?
+
+# Optimizer/debugger flags
+# ------------------------
+# (The following bit of code is complicated enough - please keep things
+# indented properly.  Just pretend you're editing Python code. ;-)
+
+# There are two parallel sets of case statements below, one that checks to
+# see if OPT was set and one that does BASECFLAGS setting based upon
+# compiler and platform.  BASECFLAGS tweaks need to be made even if the
+# user set OPT.
+
+# tweak OPT based on compiler and platform, only if the user didn't set
+# it on the command line
+AC_SUBST(OPT)
+if test "${OPT-unset}" = "unset"
+then
+    case $GCC in
+    yes)
+        if test "$CC" != 'g++' ; then
+	    STRICT_PROTO="-Wstrict-prototypes"
+	fi
+        # For gcc 4.x we need to use -fwrapv so lets check if its supported
+        if "$CC" -v --help 2>/dev/null |grep -- -fwrapv > /dev/null; then
+           WRAP="-fwrapv"
+        fi
+
+        # Clang also needs -fwrapv
+        case $CC in
+            *clang*) WRAP="-fwrapv"
+            ;;
+        esac
+
+	case $ac_cv_prog_cc_g in
+	yes)
+	    if test "$Py_DEBUG" = 'true' ; then
+		# Optimization messes up debuggers, so turn it off for
+		# debug builds.
+		OPT="-g -O0 -Wall $STRICT_PROTO"
+	    else
+		OPT="-g $WRAP -O3 -Wall $STRICT_PROTO"
+	    fi
+	    ;;
+	*)
+	    OPT="-O3 -Wall $STRICT_PROTO"
+	    ;;
+	esac
+	case $ac_sys_system in
+	    SCO_SV*) OPT="$OPT -m486 -DSCO5"
+	    ;;
+        esac
+	;;
+
+    *)
+	OPT="-O"
+	;;
+    esac
+fi
+
+AC_SUBST(BASECFLAGS)
+
+# The -arch flags for universal builds on OSX
+UNIVERSAL_ARCH_FLAGS=
+AC_SUBST(UNIVERSAL_ARCH_FLAGS)
+
+# tweak BASECFLAGS based on compiler and platform
+case $GCC in
+yes)
+    # Python doesn't violate C99 aliasing rules, but older versions of
+    # GCC produce warnings for legal Python code.  Enable
+    # -fno-strict-aliasing on versions of GCC that support but produce
+    # warnings.  See Issue3326
+    AC_MSG_CHECKING(whether $CC accepts and needs -fno-strict-aliasing)
+     ac_save_cc="$CC"
+     CC="$CC -fno-strict-aliasing"
+     save_CFLAGS="$CFLAGS"
+     AC_CACHE_VAL(ac_cv_no_strict_aliasing,
+       AC_COMPILE_IFELSE(
+         [
+	   AC_LANG_PROGRAM([[]], [[]])
+	 ],[
+	   CC="$ac_save_cc -fstrict-aliasing"
+           CFLAGS="$CFLAGS -Werror -Wstrict-aliasing"
+           AC_COMPILE_IFELSE(
+	     [
+	       AC_LANG_PROGRAM([[void f(int **x) {}]],
+	         [[double *x; f((int **) &x);]])
+	     ],[
+	       ac_cv_no_strict_aliasing=no
+	     ],[
+               ac_cv_no_strict_aliasing=yes
+	     ])
+	 ],[
+	   ac_cv_no_strict_aliasing=no
+	 ]))
+     CFLAGS="$save_CFLAGS"
+     CC="$ac_save_cc"
+    AC_MSG_RESULT($ac_cv_no_strict_aliasing)
+    if test $ac_cv_no_strict_aliasing = yes
+    then
+      BASECFLAGS="$BASECFLAGS -fno-strict-aliasing"
+    fi
+
+    # if using gcc on alpha, use -mieee to get (near) full IEEE 754
+    # support.  Without this, treatment of subnormals doesn't follow
+    # the standard.
+    case $ac_sys_machine in
+         alpha*)
+                BASECFLAGS="$BASECFLAGS -mieee"
+                ;;
+    esac
+
+    case $ac_sys_system in
+	SCO_SV*)
+	    BASECFLAGS="$BASECFLAGS -m486 -DSCO5"
+	    ;;
+	# is there any other compiler on Darwin besides gcc?
+	Darwin*)
+	    # -Wno-long-double, -no-cpp-precomp, and -mno-fused-madd
+	    # used to be here, but non-Apple gcc doesn't accept them.
+            if test "${CC}" = gcc
+	    then
+		AC_MSG_CHECKING(which compiler should be used)
+		case "${UNIVERSALSDK}" in
+		*/MacOSX10.4u.sdk)
+			# Build using 10.4 SDK, force usage of gcc when the 
+			# compiler is gcc, otherwise the user will get very
+			# confusing error messages when building on OSX 10.6
+			CC=gcc-4.0
+			CPP=cpp-4.0
+			;;
+		esac
+		AC_MSG_RESULT($CC)
+	    fi
+
+
+	    if test "${enable_universalsdk}"; then
+		UNIVERSAL_ARCH_FLAGS=""
+	        if test "$UNIVERSAL_ARCHS" = "32-bit" ; then
+		   UNIVERSAL_ARCH_FLAGS="-arch ppc -arch i386"
+		   ARCH_RUN_32BIT=""
+		   LIPO_32BIT_FLAGS=""
+	         elif test "$UNIVERSAL_ARCHS" = "64-bit" ; then
+		   UNIVERSAL_ARCH_FLAGS="-arch ppc64 -arch x86_64"
+		   LIPO_32BIT_FLAGS=""
+		   ARCH_RUN_32BIT="true"
+
+	         elif test "$UNIVERSAL_ARCHS" = "all" ; then
+		   UNIVERSAL_ARCH_FLAGS="-arch i386 -arch ppc -arch ppc64 -arch x86_64"
+		   LIPO_32BIT_FLAGS="-extract ppc7400 -extract i386"
+		   ARCH_RUN_32BIT="/usr/bin/arch -i386 -ppc"
+
+	         elif test "$UNIVERSAL_ARCHS" = "intel" ; then
+		   UNIVERSAL_ARCH_FLAGS="-arch i386 -arch x86_64"
+		   LIPO_32BIT_FLAGS="-extract i386"
+		   ARCH_RUN_32BIT="/usr/bin/arch -i386"
+
+	         elif test "$UNIVERSAL_ARCHS" = "3-way" ; then
+		   UNIVERSAL_ARCH_FLAGS="-arch i386 -arch ppc -arch x86_64"
+		   LIPO_32BIT_FLAGS="-extract ppc7400 -extract i386"
+		   ARCH_RUN_32BIT="/usr/bin/arch -i386 -ppc"
+
+		 else
+	           AC_MSG_ERROR([proper usage is --with-universal-arch=32-bit|64-bit|all|intel|3-way])
+
+		 fi
+
+
+		CFLAGS="${UNIVERSAL_ARCH_FLAGS} -isysroot ${UNIVERSALSDK} ${CFLAGS}"
+		if test "${UNIVERSALSDK}" != "/"
+		then
+			CFLAGS="-isysroot ${UNIVERSALSDK} ${CFLAGS}"
+			LDFLAGS="-isysroot ${UNIVERSALSDK} ${LDFLAGS}"
+			CPPFLAGS="-isysroot ${UNIVERSALSDK} ${CPPFLAGS}"
+		fi
+	    fi
+
+	    # Calculate the right deployment target for this build.
+	    #
+	    cur_target=`sw_vers -productVersion | sed 's/\(10\.[[0-9]]*\).*/\1/'`
+	    if test ${cur_target} '>' 10.2; then
+		    cur_target=10.3
+		    if test ${enable_universalsdk}; then
+			    if test "${UNIVERSAL_ARCHS}" = "all"; then
+				    # Ensure that the default platform for a 
+				    # 4-way universal build is OSX 10.5, 
+				    # that's the first OS release where 
+				    # 4-way builds make sense.
+				    cur_target='10.5'
+
+			    elif test "${UNIVERSAL_ARCHS}" = "3-way"; then
+				    cur_target='10.5'
+
+			    elif test "${UNIVERSAL_ARCHS}" = "intel"; then
+				    cur_target='10.5'
+
+			    elif test "${UNIVERSAL_ARCHS}" = "64-bit"; then
+				    cur_target='10.5'
+			    fi
+		    else
+			    if test `/usr/bin/arch` = "i386"; then
+				    # On Intel macs default to a deployment
+				    # target of 10.4, that's the first OSX
+				    # release with Intel support.
+				    cur_target="10.4"
+			    fi
+		    fi
+	    fi
+	    CONFIGURE_MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET-${cur_target}}
+	    
+	    # Make sure that MACOSX_DEPLOYMENT_TARGET is set in the 
+	    # environment with a value that is the same as what we'll use
+	    # in the Makefile to ensure that we'll get the same compiler
+	    # environment during configure and build time.
+	    MACOSX_DEPLOYMENT_TARGET="$CONFIGURE_MACOSX_DEPLOYMENT_TARGET"
+	    export MACOSX_DEPLOYMENT_TARGET
+	    EXPORT_MACOSX_DEPLOYMENT_TARGET=''
+
+	    ;;
+	OSF*)
+	    BASECFLAGS="$BASECFLAGS -mieee"
+	    ;;
+    esac
+    ;;
+
+*)
+    case $ac_sys_system in
+    OpenUNIX*|UnixWare*)
+	BASECFLAGS="$BASECFLAGS -K pentium,host,inline,loop_unroll,alloca "
+	;;
+    OSF*)
+	BASECFLAGS="$BASECFLAGS -ieee -std"
+    	;;
+    SCO_SV*)
+	BASECFLAGS="$BASECFLAGS -belf -Ki486 -DSCO5"
+	;;
+    esac
+    ;;
+esac
+
+if test "$Py_DEBUG" = 'true'; then
+  :
+else
+  OPT="-DNDEBUG $OPT"
+fi
+
+if test "$ac_arch_flags"
+then
+	BASECFLAGS="$BASECFLAGS $ac_arch_flags"
+fi
+
+# Check whether GCC supports PyArg_ParseTuple format
+if test "$GCC" = "yes"
+then
+  AC_MSG_CHECKING(whether gcc supports ParseTuple __format__)
+  save_CFLAGS=$CFLAGS
+  CFLAGS="$CFLAGS -Werror"
+  AC_COMPILE_IFELSE([
+    AC_LANG_PROGRAM([[void f(char*,...)__attribute((format(PyArg_ParseTuple, 1, 2)));]], [[]])
+  ],[
+    AC_DEFINE(HAVE_ATTRIBUTE_FORMAT_PARSETUPLE, 1,
+      [Define if GCC supports __attribute__((format(PyArg_ParseTuple, 2, 3)))])
+    AC_MSG_RESULT(yes)
+  ],[
+    AC_MSG_RESULT(no)
+  ])
+  CFLAGS=$save_CFLAGS
+fi
+
+# On some compilers, pthreads are available without further options
+# (e.g. MacOS X). On some of these systems, the compiler will not
+# complain if unaccepted options are passed (e.g. gcc on Mac OS X).
+# So we have to see first whether pthreads are available without
+# options before we can check whether -Kpthread improves anything.
+AC_MSG_CHECKING(whether pthreads are available without options)
+AC_CACHE_VAL(ac_cv_pthread_is_default,
+[AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <pthread.h>
+
+void* routine(void* p){return NULL;}
+
+int main(){
+  pthread_t p;
+  if(pthread_create(&p,NULL,routine,NULL)!=0)
+    return 1;
+  (void)pthread_detach(p);
+  return 0;
+}
+]])],[
+  ac_cv_pthread_is_default=yes
+  ac_cv_kthread=no
+  ac_cv_pthread=no
+],[ac_cv_pthread_is_default=no],[ac_cv_pthread_is_default=no])
+])
+AC_MSG_RESULT($ac_cv_pthread_is_default)
+
+
+if test $ac_cv_pthread_is_default = yes 
+then
+  ac_cv_kpthread=no
+else
+# -Kpthread, if available, provides the right #defines
+# and linker options to make pthread_create available
+# Some compilers won't report that they do not support -Kpthread,
+# so we need to run a program to see whether it really made the
+# function available.
+AC_MSG_CHECKING(whether $CC accepts -Kpthread)
+AC_CACHE_VAL(ac_cv_kpthread,
+[ac_save_cc="$CC"
+CC="$CC -Kpthread"
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <pthread.h>
+
+void* routine(void* p){return NULL;}
+
+int main(){
+  pthread_t p;
+  if(pthread_create(&p,NULL,routine,NULL)!=0)
+    return 1;
+  (void)pthread_detach(p);
+  return 0;
+}
+]])],[ac_cv_kpthread=yes],[ac_cv_kpthread=no],[ac_cv_kpthread=no])
+CC="$ac_save_cc"])
+AC_MSG_RESULT($ac_cv_kpthread)
+fi
+
+if test $ac_cv_kpthread = no -a $ac_cv_pthread_is_default = no
+then
+# -Kthread, if available, provides the right #defines
+# and linker options to make pthread_create available
+# Some compilers won't report that they do not support -Kthread,
+# so we need to run a program to see whether it really made the
+# function available.
+AC_MSG_CHECKING(whether $CC accepts -Kthread)
+AC_CACHE_VAL(ac_cv_kthread,
+[ac_save_cc="$CC"
+CC="$CC -Kthread"
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <pthread.h>
+
+void* routine(void* p){return NULL;}
+
+int main(){
+  pthread_t p;
+  if(pthread_create(&p,NULL,routine,NULL)!=0)
+    return 1;
+  (void)pthread_detach(p);
+  return 0;
+}
+]])],[ac_cv_kthread=yes],[ac_cv_kthread=no],[ac_cv_kthread=no])
+CC="$ac_save_cc"])
+AC_MSG_RESULT($ac_cv_kthread)
+fi
+
+if test $ac_cv_kthread = no -a $ac_cv_pthread_is_default = no
+then
+# -pthread, if available, provides the right #defines
+# and linker options to make pthread_create available
+# Some compilers won't report that they do not support -pthread,
+# so we need to run a program to see whether it really made the
+# function available.
+AC_MSG_CHECKING(whether $CC accepts -pthread)
+AC_CACHE_VAL(ac_cv_thread,
+[ac_save_cc="$CC"
+CC="$CC -pthread"
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <pthread.h>
+
+void* routine(void* p){return NULL;}
+
+int main(){
+  pthread_t p;
+  if(pthread_create(&p,NULL,routine,NULL)!=0)
+    return 1;
+  (void)pthread_detach(p);
+  return 0;
+}
+]])],[ac_cv_pthread=yes],[ac_cv_pthread=no],[ac_cv_pthread=no])
+CC="$ac_save_cc"])
+AC_MSG_RESULT($ac_cv_pthread)
+fi
+
+# If we have set a CC compiler flag for thread support then
+# check if it works for CXX, too.
+ac_cv_cxx_thread=no
+if test ! -z "$CXX"
+then
+AC_MSG_CHECKING(whether $CXX also accepts flags for thread support)
+ac_save_cxx="$CXX"
+
+if test "$ac_cv_kpthread" = "yes"
+then
+  CXX="$CXX -Kpthread"  
+  ac_cv_cxx_thread=yes
+elif test "$ac_cv_kthread" = "yes"
+then
+  CXX="$CXX -Kthread"
+  ac_cv_cxx_thread=yes
+elif test "$ac_cv_pthread" = "yes"
+then 
+  CXX="$CXX -pthread"
+  ac_cv_cxx_thread=yes
+fi
+
+if test $ac_cv_cxx_thread = yes
+then
+  echo 'void foo();int main(){foo();}void foo(){}' > conftest.$ac_ext
+  $CXX -c conftest.$ac_ext 2>&5
+  if $CXX -o conftest$ac_exeext conftest.$ac_objext 2>&5 \
+     && test -s conftest$ac_exeext && ./conftest$ac_exeext
+  then
+    ac_cv_cxx_thread=yes
+  else
+    ac_cv_cxx_thread=no
+  fi
+  rm -fr conftest*
+fi
+AC_MSG_RESULT($ac_cv_cxx_thread)
+fi
+CXX="$ac_save_cxx"
+
+dnl # check for ANSI or K&R ("traditional") preprocessor
+dnl AC_MSG_CHECKING(for C preprocessor type)
+dnl AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+dnl #define spam(name, doc) {#name, &name, #name "() -- " doc}
+dnl int foo;
+dnl struct {char *name; int *addr; char *doc;} desc = spam(foo, "something");
+dnl ]], [[;]])],[cpp_type=ansi],[AC_DEFINE(HAVE_OLD_CPP) cpp_type=traditional])
+dnl AC_MSG_RESULT($cpp_type)
+
+# checks for header files
+AC_HEADER_STDC
+AC_CHECK_HEADERS(asm/types.h conio.h curses.h direct.h dlfcn.h errno.h \
+fcntl.h grp.h \
+ieeefp.h io.h langinfo.h libintl.h ncurses.h poll.h process.h pthread.h \
+shadow.h signal.h stdint.h stropts.h termios.h thread.h \
+unistd.h utime.h \
+sys/audioio.h sys/bsdtty.h sys/epoll.h sys/event.h sys/file.h sys/loadavg.h \
+sys/lock.h sys/mkdev.h sys/modem.h \
+sys/param.h sys/poll.h sys/select.h sys/socket.h sys/statvfs.h sys/stat.h \
+sys/syscall.h sys/termio.h sys/time.h \
+sys/times.h sys/types.h sys/un.h sys/utsname.h sys/wait.h pty.h libutil.h \
+sys/resource.h netpacket/packet.h sysexits.h bluetooth.h \
+bluetooth/bluetooth.h linux/tipc.h spawn.h util.h)
+AC_HEADER_DIRENT
+AC_HEADER_MAJOR
+
+# On Solaris, term.h requires curses.h
+AC_CHECK_HEADERS(term.h,,,[
+#ifdef HAVE_CURSES_H
+#include <curses.h>
+#endif
+])
+
+# On Linux, netlink.h requires asm/types.h
+AC_CHECK_HEADERS(linux/netlink.h,,,[
+#ifdef HAVE_ASM_TYPES_H
+#include <asm/types.h>
+#endif
+#ifdef HAVE_SYS_SOCKET_H
+#include <sys/socket.h>
+#endif
+])
+
+# checks for typedefs
+was_it_defined=no
+AC_MSG_CHECKING(for clock_t in time.h)
+AC_EGREP_HEADER(clock_t, time.h, was_it_defined=yes, [
+    AC_DEFINE(clock_t, long, [Define to 'long' if <time.h> doesn't define.])
+])
+AC_MSG_RESULT($was_it_defined)
+
+# Check whether using makedev requires defining _OSF_SOURCE
+AC_MSG_CHECKING(for makedev)
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[
+#if defined(MAJOR_IN_MKDEV)
+#include <sys/mkdev.h>
+#elif defined(MAJOR_IN_SYSMACROS)
+#include <sys/sysmacros.h>
+#else
+#include <sys/types.h>
+#endif
+]], [[
+  makedev(0, 0) ]])
+],[ac_cv_has_makedev=yes],[ac_cv_has_makedev=no])
+if test "$ac_cv_has_makedev" = "no"; then
+    # we didn't link, try if _OSF_SOURCE will allow us to link
+    AC_LINK_IFELSE([AC_LANG_PROGRAM([[
+#define _OSF_SOURCE 1
+#include <sys/types.h>
+    ]],
+    [[ makedev(0, 0) ]])],
+    [ac_cv_has_makedev=yes],
+    [ac_cv_has_makedev=no])
+    if test "$ac_cv_has_makedev" = "yes"; then
+        AC_DEFINE(_OSF_SOURCE, 1, [Define _OSF_SOURCE to get the makedev macro.])
+    fi
+fi
+AC_MSG_RESULT($ac_cv_has_makedev)
+if test "$ac_cv_has_makedev" = "yes"; then
+    AC_DEFINE(HAVE_MAKEDEV, 1, [Define this if you have the makedev macro.])
+fi
+
+# Enabling LFS on Solaris (2.6 to 9) with gcc 2.95 triggers a bug in
+# the system headers: If _XOPEN_SOURCE and _LARGEFILE_SOURCE are
+# defined, but the compiler does not support pragma redefine_extname,
+# and _LARGEFILE64_SOURCE is not defined, the headers refer to 64-bit
+# structures (such as rlimit64) without declaring them. As a
+# work-around, disable LFS on such configurations
+
+use_lfs=yes
+AC_MSG_CHECKING(Solaris LFS bug)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#define _LARGEFILE_SOURCE 1
+#define _FILE_OFFSET_BITS 64
+#include <sys/resource.h>
+]], [[struct rlimit foo;]])],[sol_lfs_bug=no],[sol_lfs_bug=yes])
+AC_MSG_RESULT($sol_lfs_bug)
+if test "$sol_lfs_bug" = "yes"; then
+  use_lfs=no
+fi
+
+if test "$use_lfs" = "yes"; then
+# Two defines needed to enable largefile support on various platforms
+# These may affect some typedefs
+case $ac_sys_system/$ac_sys_release in
+AIX*)
+    AC_DEFINE(_LARGE_FILES, 1, 
+    [This must be defined on AIX systems to enable large file support.])
+    ;;
+esac
+AC_DEFINE(_LARGEFILE_SOURCE, 1, 
+[This must be defined on some systems to enable large file support.])
+AC_DEFINE(_FILE_OFFSET_BITS, 64,
+[This must be set to 64 on some systems to enable large file support.])
+fi
+
+# Add some code to confdefs.h so that the test for off_t works on SCO
+cat >> confdefs.h <<\EOF
+#if defined(SCO_DS)
+#undef _OFF_T
+#endif
+EOF
+
+# Type availability checks
+AC_TYPE_MODE_T
+AC_TYPE_OFF_T
+AC_TYPE_PID_T
+AC_DEFINE_UNQUOTED([RETSIGTYPE],[void],[assume C89 semantics that RETSIGTYPE is always void])
+AC_TYPE_SIZE_T
+AC_TYPE_UID_T
+AC_TYPE_UINT32_T
+AC_TYPE_UINT64_T
+AC_TYPE_INT32_T
+AC_TYPE_INT64_T
+AC_CHECK_TYPE(ssize_t,
+  AC_DEFINE(HAVE_SSIZE_T, 1, [Define if your compiler provides ssize_t]),,)
+
+# Sizes of various common basic types
+# ANSI C requires sizeof(char) == 1, so no need to check it
+AC_CHECK_SIZEOF(int, 4)
+AC_CHECK_SIZEOF(long, 4)
+AC_CHECK_SIZEOF(void *, 4)
+AC_CHECK_SIZEOF(short, 2)
+AC_CHECK_SIZEOF(float, 4)
+AC_CHECK_SIZEOF(double, 8)
+AC_CHECK_SIZEOF(fpos_t, 4)
+AC_CHECK_SIZEOF(size_t, 4)
+AC_CHECK_SIZEOF(pid_t, 4)
+
+AC_MSG_CHECKING(for long long support)
+have_long_long=no
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[long long x; x = (long long)0;]])],[
+  AC_DEFINE(HAVE_LONG_LONG, 1, [Define this if you have the type long long.]) 
+  have_long_long=yes
+],[])
+AC_MSG_RESULT($have_long_long)
+if test "$have_long_long" = yes ; then
+AC_CHECK_SIZEOF(long long, 8)
+fi
+
+AC_MSG_CHECKING(for long double support)
+have_long_double=no
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[long double x; x = (long double)0;]])],[
+  AC_DEFINE(HAVE_LONG_DOUBLE, 1, [Define this if you have the type long double.]) 
+  have_long_double=yes
+],[])
+AC_MSG_RESULT($have_long_double)
+if test "$have_long_double" = yes ; then
+AC_CHECK_SIZEOF(long double, 16)
+fi
+
+
+AC_MSG_CHECKING(for _Bool support)
+have_c99_bool=no
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[_Bool x; x = (_Bool)0;]])],[
+  AC_DEFINE(HAVE_C99_BOOL, 1, [Define this if you have the type _Bool.]) 
+  have_c99_bool=yes
+],[])
+AC_MSG_RESULT($have_c99_bool)
+if test "$have_c99_bool" = yes ; then
+AC_CHECK_SIZEOF(_Bool, 1)
+fi
+
+AC_CHECK_TYPES(uintptr_t, 
+   [AC_CHECK_SIZEOF(uintptr_t, 4)], 
+   [], [#ifdef HAVE_STDINT_H
+        #include <stdint.h>
+        #endif
+        #ifdef HAVE_INTTYPES_H
+        #include <inttypes.h>
+        #endif])
+
+AC_CHECK_SIZEOF(off_t, [], [
+#ifdef HAVE_SYS_TYPES_H
+#include <sys/types.h>
+#endif
+])
+
+AC_MSG_CHECKING(whether to enable large file support)
+if test "$have_long_long" = yes
+then
+if test "$ac_cv_sizeof_off_t" -gt "$ac_cv_sizeof_long" -a \
+	"$ac_cv_sizeof_long_long" -ge "$ac_cv_sizeof_off_t"; then
+  AC_DEFINE(HAVE_LARGEFILE_SUPPORT, 1, 
+  [Defined to enable large file support when an off_t is bigger than a long
+   and long long is available and at least as big as an off_t. You may need
+   to add some flags for configuration and compilation to enable this mode.
+   (For Solaris and Linux, the necessary defines are already defined.)])
+  AC_MSG_RESULT(yes)
+else
+  AC_MSG_RESULT(no)
+fi
+else
+  AC_MSG_RESULT(no)
+fi
+
+AC_CHECK_SIZEOF(time_t, [], [
+#ifdef HAVE_SYS_TYPES_H
+#include <sys/types.h>
+#endif
+#ifdef HAVE_TIME_H
+#include <time.h>
+#endif
+])
+
+# if have pthread_t then define SIZEOF_PTHREAD_T
+ac_save_cc="$CC"
+if test "$ac_cv_kpthread" = "yes"
+then CC="$CC -Kpthread"
+elif test "$ac_cv_kthread" = "yes"
+then CC="$CC -Kthread"
+elif test "$ac_cv_pthread" = "yes"
+then CC="$CC -pthread"
+fi
+
+AC_MSG_CHECKING(for pthread_t)
+have_pthread_t=no
+AC_COMPILE_IFELSE([
+  AC_LANG_PROGRAM([[#include <pthread.h>]], [[pthread_t x; x = *(pthread_t*)0;]])
+],[have_pthread_t=yes],[])
+AC_MSG_RESULT($have_pthread_t)
+if test "$have_pthread_t" = yes ; then
+  AC_CHECK_SIZEOF(pthread_t, [], [
+#ifdef HAVE_PTHREAD_H
+#include <pthread.h>
+#endif
+  ])
+fi
+CC="$ac_save_cc"
+
+AC_SUBST(OTHER_LIBTOOL_OPT)
+case $ac_sys_system/$ac_sys_release in
+  Darwin/@<:@01567@:>@\..*) 
+    OTHER_LIBTOOL_OPT="-prebind -seg1addr 0x10000000"
+    ;;
+  Darwin/*)
+    OTHER_LIBTOOL_OPT=""
+    ;;
+esac
+
+
+ARCH_RUN_32BIT=""
+AC_SUBST(LIBTOOL_CRUFT)
+case $ac_sys_system/$ac_sys_release in
+  Darwin/@<:@01567@:>@\..*) 
+    LIBTOOL_CRUFT="-framework System -lcc_dynamic"
+    if test "${enable_universalsdk}"; then
+	    :
+    else
+        LIBTOOL_CRUFT="${LIBTOOL_CRUFT} -arch_only `/usr/bin/arch`"
+    fi
+    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -install_name $(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -compatibility_version $(VERSION) -current_version $(VERSION)';;
+  Darwin/*)
+    gcc_version=`gcc -dumpversion`
+    if test ${gcc_version} '<' 4.0
+        then
+            LIBTOOL_CRUFT="-lcc_dynamic"
+        else 
+            LIBTOOL_CRUFT=""
+    fi
+    AC_RUN_IFELSE([AC_LANG_SOURCE([[
+    #include <unistd.h>
+    int main(int argc, char*argv[])
+    {
+      if (sizeof(long) == 4) {
+    	  return 0;
+      } else {
+      	  return 1;
+      }
+    }
+    ]])],[ac_osx_32bit=yes],[ac_osx_32bit=no],[ac_osx_32bit=yes])
+    
+    if test "${ac_osx_32bit}" = "yes"; then
+    	case `/usr/bin/arch` in
+    	i386) 
+    		MACOSX_DEFAULT_ARCH="i386" 
+    		;;
+    	ppc) 
+    		MACOSX_DEFAULT_ARCH="ppc" 
+    		;;
+    	*)
+    		AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
+    		;;
+    	esac
+    else
+    	case `/usr/bin/arch` in
+    	i386) 
+    		MACOSX_DEFAULT_ARCH="x86_64" 
+    		;;
+    	ppc) 
+    		MACOSX_DEFAULT_ARCH="ppc64" 
+    		;;
+    	*)
+    		AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
+    		;;
+    	esac
+
+	#ARCH_RUN_32BIT="true"
+    fi
+
+    LIBTOOL_CRUFT=$LIBTOOL_CRUFT" -lSystem -lSystemStubs -arch_only ${MACOSX_DEFAULT_ARCH}"
+    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -install_name $(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -compatibility_version $(VERSION) -current_version $(VERSION)';;
+esac
+
+AC_MSG_CHECKING(for --enable-framework)
+if test "$enable_framework"
+then
+	BASECFLAGS="$BASECFLAGS -fno-common -dynamic"
+	# -F. is needed to allow linking to the framework while 
+	# in the build location.
+	AC_DEFINE(WITH_NEXT_FRAMEWORK, 1, 
+         [Define if you want to produce an OpenStep/Rhapsody framework
+         (shared library plus accessory files).])
+	AC_MSG_RESULT(yes)
+	if test $enable_shared = "yes"
+	then
+		AC_MSG_ERROR([Specifying both --enable-shared and --enable-framework is not supported, use only --enable-framework instead])
+	fi
+else
+	AC_MSG_RESULT(no)
+fi
+
+AC_MSG_CHECKING(for dyld)
+case $ac_sys_system/$ac_sys_release in
+  Darwin/*)
+  	AC_DEFINE(WITH_DYLD, 1, 
+        [Define if you want to use the new-style (Openstep, Rhapsody, MacOS)
+         dynamic linker (dyld) instead of the old-style (NextStep) dynamic
+         linker (rld). Dyld is necessary to support frameworks.])
+  	AC_MSG_RESULT(always on for Darwin)
+  	;;
+  *)
+	AC_MSG_RESULT(no)
+	;;
+esac
+
+# Set info about shared libraries.
+AC_SUBST(SO)
+AC_SUBST(LDSHARED)
+AC_SUBST(LDCXXSHARED)
+AC_SUBST(BLDSHARED)
+AC_SUBST(CCSHARED)
+AC_SUBST(LINKFORSHARED)
+
+AC_DEFINE_UNQUOTED(SHLIB_EXT, "$SO", [Define this to be extension of shared libraries (including the dot!).])
+# LDSHARED is the ld *command* used to create shared library
+# -- "cc -G" on SunOS 5.x, "ld -shared" on IRIX 5
+# (Shared libraries in this instance are shared modules to be loaded into
+# Python, as opposed to building Python itself as a shared library.)
+AC_MSG_CHECKING(LDSHARED)
+if test -z "$LDSHARED"
+then
+	case $ac_sys_system/$ac_sys_release in
+	AIX*)
+		BLDSHARED="\$(srcdir)/Modules/ld_so_aix \$(CC) -bI:\$(srcdir)/Modules/python.exp"
+		LDSHARED="\$(BINLIBDEST)/config/ld_so_aix \$(CC) -bI:\$(BINLIBDEST)/config/python.exp"
+		;;
+	IRIX/5*) LDSHARED="ld -shared";;
+	IRIX*/6*) LDSHARED="ld ${SGI_ABI} -shared -all";;
+	SunOS/5*) 
+		if test "$GCC" = "yes" ; then
+			LDSHARED='$(CC) -shared'
+			LDCXXSHARED='$(CXX) -shared'
+		else
+			LDSHARED='$(CC) -G'
+			LDCXXSHARED='$(CXX) -G'
+		fi ;;
+	hp*|HP*)
+		if test "$GCC" = "yes" ; then
+			LDSHARED='$(CC) -shared'
+			LDCXXSHARED='$(CXX) -shared'
+		else
+			LDSHARED='ld -b'
+		fi ;;
+	OSF*) LDSHARED="ld -shared -expect_unresolved \"*\"";;
+	Darwin/1.3*)
+		LDSHARED='$(CC) -bundle'
+		LDCXXSHARED='$(CXX) -bundle'
+		if test "$enable_framework" ; then
+			# Link against the framework. All externals should be defined.
+			BLDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+			LDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+			LDCXXSHARED="$LDCXXSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+		else
+			# No framework. Ignore undefined symbols, assuming they come from Python
+			LDSHARED="$LDSHARED -undefined suppress"
+			LDCXXSHARED="$LDCXXSHARED -undefined suppress"
+		fi ;;
+	Darwin/1.4*|Darwin/5.*|Darwin/6.*)
+		LDSHARED='$(CC) -bundle'
+		LDCXXSHARED='$(CXX) -bundle'
+		if test "$enable_framework" ; then
+			# Link against the framework. All externals should be defined.
+			BLDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+			LDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+			LDCXXSHARED="$LDCXXSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+		else
+			# No framework, use the Python app as bundle-loader
+			BLDSHARED="$LDSHARED "'-bundle_loader $(BUILDPYTHON)'
+			LDSHARED="$LDSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
+			LDCXXSHARED="$LDCXXSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
+		fi ;;
+	Darwin/*)
+		# Use -undefined dynamic_lookup whenever possible (10.3 and later).
+		# This allows an extension to be used in any Python
+
+		if test ${MACOSX_DEPLOYMENT_TARGET} '>' 10.2
+		then
+			if test "${enable_universalsdk}"; then
+				LDFLAGS="${UNIVERSAL_ARCH_FLAGS} -isysroot ${UNIVERSALSDK} ${LDFLAGS}"
+			fi
+			LDSHARED='$(CC) -bundle -undefined dynamic_lookup'
+			LDCXXSHARED='$(CXX) -bundle -undefined dynamic_lookup'
+			BLDSHARED="$LDSHARED"
+		else
+			LDSHARED='$(CC) -bundle'
+			LDCXXSHARED='$(CXX) -bundle'
+			if test "$enable_framework" ; then
+				# Link against the framework. All externals should be defined.
+				BLDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+				LDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+				LDCXXSHARED="$LDCXXSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+			else
+				# No framework, use the Python app as bundle-loader
+				BLDSHARED="$LDSHARED "'-bundle_loader $(BUILDPYTHON)'
+				LDSHARED="$LDSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
+				LDCXXSHARED="$LDCXXSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
+			fi
+		fi
+		;;
+	Linux*|GNU*|QNX*)
+		LDSHARED='$(CC) -shared'
+		LDCXXSHARED='$(CXX) -shared';;
+	BSD/OS*/4*)
+		LDSHARED="gcc -shared"
+		LDCXXSHARED="g++ -shared";;
+	FreeBSD*)
+		if [[ "`$CC -dM -E - </dev/null | grep __ELF__`" != "" ]]
+		then
+			LDSHARED='$(CC) -shared'
+			LDCXXSHARED='$(CXX) -shared'
+		else
+			LDSHARED="ld -Bshareable"
+		fi;;
+	OpenBSD*)
+		if [[ "`$CC -dM -E - </dev/null | grep __ELF__`" != "" ]]
+		then
+				LDSHARED='$(CC) -shared $(CCSHARED)'
+				LDCXXSHARED='$(CXX) -shared $(CCSHARED)'
+		else
+				case `uname -r` in
+				[[01]].* | 2.[[0-7]] | 2.[[0-7]].*)
+				   LDSHARED="ld -Bshareable ${LDFLAGS}"
+				   ;;
+				*)
+				   LDSHARED='$(CC) -shared $(CCSHARED)'
+				   LDCXXSHARED='$(CXX) -shared $(CCSHARED)'
+				   ;;
+				esac
+		fi;;
+	NetBSD*|DragonFly*)
+		LDSHARED='$(CC) -shared'
+		LDCXXSHARED='$(CXX) -shared';;
+	OpenUNIX*|UnixWare*)
+		if test "$GCC" = "yes" ; then
+			LDSHARED='$(CC) -shared'
+			LDCXXSHARED='$(CXX) -shared'
+		else
+			LDSHARED='$(CC) -G'
+			LDCXXSHARED='$(CXX) -G'
+		fi;;
+	SCO_SV*)
+		LDSHARED='$(CC) -Wl,-G,-Bexport'
+		LDCXXSHARED='$(CXX) -Wl,-G,-Bexport';;
+	CYGWIN*)
+		LDSHARED="gcc -shared -Wl,--enable-auto-image-base"
+		LDCXXSHARED="g++ -shared -Wl,--enable-auto-image-base";;
+	*)	LDSHARED="ld";;
+	esac
+fi
+AC_MSG_RESULT($LDSHARED)
+LDCXXSHARED=${LDCXXSHARED-$LDSHARED}
+BLDSHARED=${BLDSHARED-$LDSHARED}
+# CCSHARED are the C *flags* used to create objects to go into a shared
+# library (module) -- this is only needed for a few systems
+AC_MSG_CHECKING(CCSHARED)
+if test -z "$CCSHARED"
+then
+	case $ac_sys_system/$ac_sys_release in
+	SunOS*) if test "$GCC" = yes;
+		then CCSHARED="-fPIC";
+		elif test `uname -p` = sparc;
+		then CCSHARED="-xcode=pic32";
+		else CCSHARED="-Kpic";
+		fi;;
+	hp*|HP*) if test "$GCC" = yes;
+		 then CCSHARED="-fPIC";
+		 else CCSHARED="+z";
+		 fi;;
+	Linux*|GNU*) CCSHARED="-fPIC";;
+	BSD/OS*/4*) CCSHARED="-fpic";;
+	FreeBSD*|NetBSD*|OpenBSD*|DragonFly*) CCSHARED="-fPIC";;
+	OpenUNIX*|UnixWare*)
+		if test "$GCC" = "yes"
+		then CCSHARED="-fPIC"
+		else CCSHARED="-KPIC"
+		fi;;
+	SCO_SV*)
+		if test "$GCC" = "yes"
+		then CCSHARED="-fPIC"
+		else CCSHARED="-Kpic -belf"
+		fi;;
+	IRIX*/6*)  case $CC in
+		   *gcc*) CCSHARED="-shared";;
+		   *) CCSHARED="";;
+		   esac;;
+	esac
+fi
+AC_MSG_RESULT($CCSHARED)
+# LINKFORSHARED are the flags passed to the $(CC) command that links
+# the python executable -- this is only needed for a few systems
+AC_MSG_CHECKING(LINKFORSHARED)
+if test -z "$LINKFORSHARED"
+then
+	case $ac_sys_system/$ac_sys_release in
+	AIX*)	LINKFORSHARED='-Wl,-bE:Modules/python.exp -lld';;
+	hp*|HP*)
+	    LINKFORSHARED="-Wl,-E -Wl,+s";;
+#	    LINKFORSHARED="-Wl,-E -Wl,+s -Wl,+b\$(BINLIBDEST)/lib-dynload";;
+	BSD/OS/4*) LINKFORSHARED="-Xlinker -export-dynamic";;
+	Linux*|GNU*) LINKFORSHARED="-Xlinker -export-dynamic";;
+	# -u libsys_s pulls in all symbols in libsys
+	Darwin/*) 
+		LINKFORSHARED="$extra_undefs -framework CoreFoundation"
+		if test "$enable_framework"
+		then
+			LINKFORSHARED="$LINKFORSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
+		fi
+		LINKFORSHARED="$LINKFORSHARED";;
+	OpenUNIX*|UnixWare*) LINKFORSHARED="-Wl,-Bexport";;
+	SCO_SV*) LINKFORSHARED="-Wl,-Bexport";;
+	ReliantUNIX*) LINKFORSHARED="-W1 -Blargedynsym";;
+	FreeBSD*|NetBSD*|OpenBSD*|DragonFly*) 
+		if [[ "`$CC -dM -E - </dev/null | grep __ELF__`" != "" ]]
+		then
+			LINKFORSHARED="-Wl,--export-dynamic"
+		fi;;
+	SunOS/5*) case $CC in
+		  *gcc*)
+		    if $CC -Xlinker --help 2>&1 | grep export-dynamic >/dev/null
+		    then
+			LINKFORSHARED="-Xlinker --export-dynamic"
+		    fi;;
+		  esac;;
+	CYGWIN*)
+		if test $enable_shared = "no"
+		then
+			LINKFORSHARED='-Wl,--out-implib=$(LDLIBRARY)'
+		fi;;
+	QNX*)
+		# -Wl,-E causes the symbols to be added to the dynamic
+		# symbol table so that they can be found when a module
+		# is loaded.  -N 2048K causes the stack size to be set
+		# to 2048 kilobytes so that the stack doesn't overflow
+		# when running test_compile.py.
+		LINKFORSHARED='-Wl,-E -N 2048K';;
+	esac
+fi
+AC_MSG_RESULT($LINKFORSHARED)
+
+
+AC_SUBST(CFLAGSFORSHARED)
+AC_MSG_CHECKING(CFLAGSFORSHARED)
+if test ! "$LIBRARY" = "$LDLIBRARY"
+then
+	case $ac_sys_system in
+	CYGWIN*)
+		# Cygwin needs CCSHARED when building extension DLLs
+		# but not when building the interpreter DLL.
+		CFLAGSFORSHARED='';;
+	*)
+		CFLAGSFORSHARED='$(CCSHARED)'
+	esac
+fi
+AC_MSG_RESULT($CFLAGSFORSHARED)
+
+# SHLIBS are libraries (except -lc and -lm) to link to the python shared
+# library (with --enable-shared).
+# For platforms on which shared libraries are not allowed to have unresolved
+# symbols, this must be set to $(LIBS) (expanded by make). We do this even
+# if it is not required, since it creates a dependency of the shared library
+# to LIBS. This, in turn, means that applications linking the shared libpython
+# don't need to link LIBS explicitly. The default should be only changed
+# on systems where this approach causes problems.
+AC_SUBST(SHLIBS)
+AC_MSG_CHECKING(SHLIBS)
+case "$ac_sys_system" in
+	*)
+		SHLIBS='$(LIBS)';;
+esac
+AC_MSG_RESULT($SHLIBS)
+
+
+# checks for libraries
+AC_CHECK_LIB(dl, dlopen)	# Dynamic linking for SunOS/Solaris and SYSV
+AC_CHECK_LIB(dld, shl_load)	# Dynamic linking for HP-UX
+
+# only check for sem_init if thread support is requested
+if test "$with_threads" = "yes" -o -z "$with_threads"; then
+    AC_SEARCH_LIBS(sem_init, pthread rt posix4) # 'Real Time' functions on Solaris
+						# posix4 on Solaris 2.6
+						# pthread (first!) on Linux
+fi
+
+# check if we need libintl for locale functions
+AC_CHECK_LIB(intl, textdomain,
+	[AC_DEFINE(WITH_LIBINTL, 1,
+	[Define to 1 if libintl is needed for locale functions.])
+        LIBS="-lintl $LIBS"])
+
+# checks for system dependent C++ extensions support
+case "$ac_sys_system" in
+	AIX*)	AC_MSG_CHECKING(for genuine AIX C++ extensions support)
+		AC_LINK_IFELSE([
+		  AC_LANG_PROGRAM([[#include <load.h>]],
+				  [[loadAndInit("", 0, "")]])
+		],[
+		  AC_DEFINE(AIX_GENUINE_CPLUSPLUS, 1,
+                      [Define for AIX if your compiler is a genuine IBM xlC/xlC_r
+                       and you want support for AIX C++ shared extension modules.])
+		  AC_MSG_RESULT(yes)
+		],[
+		  AC_MSG_RESULT(no)
+		]);;
+	*) ;;
+esac
+
+# Most SVR4 platforms (e.g. Solaris) need -lsocket and -lnsl.
+AC_CHECK_LIB(nsl, t_open, [LIBS="-lnsl $LIBS"]) # SVR4
+AC_CHECK_LIB(socket, socket, [LIBS="-lsocket $LIBS"], [], $LIBS) # SVR4 sockets
+
+AC_MSG_CHECKING(for --with-libs)
+AC_ARG_WITH(libs,
+            AS_HELP_STRING([--with-libs='lib1 ...'], [link against additional libs]),
+[
+AC_MSG_RESULT($withval)
+LIBS="$withval $LIBS"
+],
+[AC_MSG_RESULT(no)])
+
+AC_PATH_TOOL([PKG_CONFIG], [pkg-config])
+
+# Check for use of the system expat library
+AC_MSG_CHECKING(for --with-system-expat)
+AC_ARG_WITH(system_expat,
+            AS_HELP_STRING([--with-system-expat], [build pyexpat module using an installed expat library]),
+            [],
+            [with_system_expat="no"])
+
+AC_MSG_RESULT($with_system_expat)
+
+# Check for use of the system libffi library
+AC_MSG_CHECKING(for --with-system-ffi)
+AC_ARG_WITH(system_ffi,
+            AS_HELP_STRING([--with-system-ffi], [build _ctypes module using an installed ffi library]),
+            [],
+            [with_system_ffi="no"])
+
+if test "$with_system_ffi" = "yes" && test -n "$PKG_CONFIG"; then
+    LIBFFI_INCLUDEDIR="`"$PKG_CONFIG" libffi --cflags-only-I 2>/dev/null | sed -e 's/^-I//;s/ *$//'`"
+else
+    LIBFFI_INCLUDEDIR=""
+fi
+AC_SUBST(LIBFFI_INCLUDEDIR)
+
+AC_MSG_RESULT($with_system_ffi)
+
+# Check for support for loadable sqlite extensions
+AC_MSG_CHECKING(for --enable-loadable-sqlite-extensions)
+AC_ARG_ENABLE(loadable-sqlite-extensions,
+              AS_HELP_STRING([--enable-loadable-sqlite-extensions], [support loadable extensions in _sqlite module]),
+              [],
+              [enable_loadable_sqlite_extensions="no"])
+
+AC_MSG_RESULT($enable_loadable_sqlite_extensions)
+
+# Check for --with-dbmliborder
+AC_MSG_CHECKING(for --with-dbmliborder)
+AC_ARG_WITH(dbmliborder,
+            AS_HELP_STRING([--with-dbmliborder=db1:db2:...], [order to check db backends for dbm. Valid value is a colon separated string with the backend names `ndbm', `gdbm' and `bdb'.]),
+[
+if test x$with_dbmliborder = xyes
+then
+AC_MSG_ERROR([proper usage is --with-dbmliborder=db1:db2:...])
+else
+  for db in `echo $with_dbmliborder | sed 's/:/ /g'`; do
+    if test x$db != xndbm && test x$db != xgdbm && test x$db != xbdb
+    then
+      AC_MSG_ERROR([proper usage is --with-dbmliborder=db1:db2:...])
+    fi
+  done
+fi])
+AC_MSG_RESULT($with_dbmliborder)
+
+# Determine if signalmodule should be used.
+AC_SUBST(USE_SIGNAL_MODULE)
+AC_SUBST(SIGNAL_OBJS)
+AC_MSG_CHECKING(for --with-signal-module)
+AC_ARG_WITH(signal-module,
+            AS_HELP_STRING([--with-signal-module], [disable/enable signal module]))
+
+if test -z "$with_signal_module"
+then with_signal_module="yes"
+fi
+AC_MSG_RESULT($with_signal_module)
+
+if test "${with_signal_module}" = "yes"; then
+	USE_SIGNAL_MODULE=""
+	SIGNAL_OBJS=""
+else
+	USE_SIGNAL_MODULE="#"
+	SIGNAL_OBJS="Parser/intrcheck.o Python/sigcheck.o"
+fi
+
+# This is used to generate Setup.config
+AC_SUBST(USE_THREAD_MODULE)
+USE_THREAD_MODULE=""
+
+AC_MSG_CHECKING(for --with-dec-threads)
+AC_SUBST(LDLAST)
+AC_ARG_WITH(dec-threads,
+            AS_HELP_STRING([--with-dec-threads], [use DEC Alpha/OSF1 thread-safe libraries]),
+[
+AC_MSG_RESULT($withval)
+LDLAST=-threads
+if test "${with_thread+set}" != set; then
+   with_thread="$withval";
+fi],
+[AC_MSG_RESULT(no)])
+
+# Templates for things AC_DEFINEd more than once.
+# For a single AC_DEFINE, no template is needed.
+AH_TEMPLATE(C_THREADS,[Define if you have the Mach cthreads package])
+AH_TEMPLATE(_REENTRANT,
+  [Define to force use of thread-safe errno, h_errno, and other functions])
+AH_TEMPLATE(WITH_THREAD,
+  [Define if you want to compile in rudimentary thread support])
+
+AC_MSG_CHECKING(for --with-threads)
+dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
+AC_ARG_WITH(threads,
+            AS_HELP_STRING([--with(out)-threads@<:@=DIRECTORY@:>@], [disable/enable thread support]))
+
+# --with-thread is deprecated, but check for it anyway
+dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
+AC_ARG_WITH(thread,
+            AS_HELP_STRING([--with(out)-thread@<:@=DIRECTORY@:>@], [deprecated; use --with(out)-threads]),
+            [with_threads=$with_thread])
+
+if test -z "$with_threads"
+then with_threads="yes"
+fi
+AC_MSG_RESULT($with_threads)
+
+AC_SUBST(THREADOBJ)
+if test "$with_threads" = "no"
+then
+    USE_THREAD_MODULE="#"
+elif test "$ac_cv_pthread_is_default" = yes
+then
+    AC_DEFINE(WITH_THREAD)
+    # Defining _REENTRANT on system with POSIX threads should not hurt.
+    AC_DEFINE(_REENTRANT)
+    posix_threads=yes
+    THREADOBJ="Python/thread.o"    
+elif test "$ac_cv_kpthread" = "yes"
+then
+    CC="$CC -Kpthread"
+    if test "$ac_cv_cxx_thread" = "yes"; then
+        CXX="$CXX -Kpthread"
+    fi
+    AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    THREADOBJ="Python/thread.o"
+elif test "$ac_cv_kthread" = "yes"
+then
+    CC="$CC -Kthread"
+    if test "$ac_cv_cxx_thread" = "yes"; then
+        CXX="$CXX -Kthread"
+    fi
+    AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    THREADOBJ="Python/thread.o"
+elif test "$ac_cv_pthread" = "yes"
+then
+    CC="$CC -pthread"
+    if test "$ac_cv_cxx_thread" = "yes"; then
+        CXX="$CXX -pthread"
+    fi
+    AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    THREADOBJ="Python/thread.o"
+else
+    if test ! -z "$with_threads" -a -d "$with_threads"
+    then LDFLAGS="$LDFLAGS -L$with_threads"
+    fi
+    if test ! -z "$withval" -a -d "$withval"
+    then LDFLAGS="$LDFLAGS -L$withval"
+    fi
+
+    # According to the POSIX spec, a pthreads implementation must
+    # define _POSIX_THREADS in unistd.h. Some apparently don't
+    # (e.g. gnu pth with pthread emulation)
+    AC_MSG_CHECKING(for _POSIX_THREADS in unistd.h)
+    AC_EGREP_CPP(yes,
+    [
+#include <unistd.h>
+#ifdef _POSIX_THREADS
+yes
+#endif
+    ], unistd_defines_pthreads=yes, unistd_defines_pthreads=no)
+    AC_MSG_RESULT($unistd_defines_pthreads)
+
+    AC_DEFINE(_REENTRANT)
+    AC_CHECK_HEADER(cthreads.h, [AC_DEFINE(WITH_THREAD)
+    AC_DEFINE(C_THREADS)
+    AC_DEFINE(HURD_C_THREADS, 1,
+    [Define if you are using Mach cthreads directly under /include])
+    LIBS="$LIBS -lthreads"
+    THREADOBJ="Python/thread.o"],[
+    AC_CHECK_HEADER(mach/cthreads.h, [AC_DEFINE(WITH_THREAD)
+    AC_DEFINE(C_THREADS)
+    AC_DEFINE(MACH_C_THREADS, 1,
+    [Define if you are using Mach cthreads under mach /])
+    THREADOBJ="Python/thread.o"],[
+    # Just looking for pthread_create in libpthread is not enough:
+    # on HP/UX, pthread.h renames pthread_create to a different symbol name.
+    # So we really have to include pthread.h, and then link.
+    _libs=$LIBS
+    LIBS="$LIBS -lpthread"
+    AC_MSG_CHECKING([for pthread_create in -lpthread])
+    AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <pthread.h>
+
+void * start_routine (void *arg) { exit (0); }]], [[
+pthread_create (NULL, NULL, start_routine, NULL)]])],[
+    AC_MSG_RESULT(yes)
+    AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    THREADOBJ="Python/thread.o"],[
+    LIBS=$_libs
+    AC_CHECK_FUNC(pthread_detach, [AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    THREADOBJ="Python/thread.o"],[
+    AC_CHECK_LIB(pthreads, pthread_create, [AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    LIBS="$LIBS -lpthreads"
+    THREADOBJ="Python/thread.o"], [
+    AC_CHECK_LIB(c_r, pthread_create, [AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    LIBS="$LIBS -lc_r"
+    THREADOBJ="Python/thread.o"], [
+    AC_CHECK_LIB(pthread, __pthread_create_system, [AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    LIBS="$LIBS -lpthread"
+    THREADOBJ="Python/thread.o"], [
+    AC_CHECK_LIB(cma, pthread_create, [AC_DEFINE(WITH_THREAD)
+    posix_threads=yes
+    LIBS="$LIBS -lcma"
+    THREADOBJ="Python/thread.o"],[
+    USE_THREAD_MODULE="#"])
+    ])])])])])])])
+
+    AC_CHECK_LIB(mpc, usconfig, [AC_DEFINE(WITH_THREAD)
+    LIBS="$LIBS -lmpc"
+    THREADOBJ="Python/thread.o"
+    USE_THREAD_MODULE=""])
+
+    if test "$posix_threads" != "yes"; then     
+      AC_CHECK_LIB(thread, thr_create, [AC_DEFINE(WITH_THREAD)
+      LIBS="$LIBS -lthread"
+      THREADOBJ="Python/thread.o"
+      USE_THREAD_MODULE=""])
+    fi
+
+    if test "$USE_THREAD_MODULE" != "#"
+    then
+        # If the above checks didn't disable threads, (at least) OSF1
+        # needs this '-threads' argument during linking.
+        case $ac_sys_system in
+        OSF1) LDLAST=-threads;;
+        esac
+    fi
+fi
+
+if test "$posix_threads" = "yes"; then
+      if test "$unistd_defines_pthreads" = "no"; then
+         AC_DEFINE(_POSIX_THREADS, 1,
+         [Define if you have POSIX threads, 
+          and your system does not define that.])
+      fi
+
+      # Bug 662787: Using semaphores causes unexplicable hangs on Solaris 8.
+      case  $ac_sys_system/$ac_sys_release in
+      SunOS/5.6) AC_DEFINE(HAVE_PTHREAD_DESTRUCTOR, 1,
+                       [Defined for Solaris 2.6 bug in pthread header.])
+		       ;;
+      SunOS/5.8) AC_DEFINE(HAVE_BROKEN_POSIX_SEMAPHORES, 1,
+		       [Define if the Posix semaphores do not work on your system])
+		       ;;
+      AIX/*) AC_DEFINE(HAVE_BROKEN_POSIX_SEMAPHORES, 1,
+		       [Define if the Posix semaphores do not work on your system])
+		       ;;
+      esac
+
+      AC_MSG_CHECKING(if PTHREAD_SCOPE_SYSTEM is supported)
+      AC_CACHE_VAL(ac_cv_pthread_system_supported,
+      [AC_RUN_IFELSE([AC_LANG_SOURCE([[#include <pthread.h>
+      void *foo(void *parm) {
+        return NULL;
+      }
+      main() {
+        pthread_attr_t attr;
+        pthread_t id;
+        if (pthread_attr_init(&attr)) exit(-1);
+        if (pthread_attr_setscope(&attr, PTHREAD_SCOPE_SYSTEM)) exit(-1);
+        if (pthread_create(&id, &attr, foo, NULL)) exit(-1);
+        exit(0);
+      }]])],
+      [ac_cv_pthread_system_supported=yes],
+      [ac_cv_pthread_system_supported=no],
+      [ac_cv_pthread_system_supported=no])
+      ])
+      AC_MSG_RESULT($ac_cv_pthread_system_supported)
+      if test "$ac_cv_pthread_system_supported" = "yes"; then
+        AC_DEFINE(PTHREAD_SYSTEM_SCHED_SUPPORTED, 1, [Defined if PTHREAD_SCOPE_SYSTEM supported.])
+      fi
+      AC_CHECK_FUNCS(pthread_sigmask,
+        [case $ac_sys_system in
+        CYGWIN*)
+          AC_DEFINE(HAVE_BROKEN_PTHREAD_SIGMASK, 1,
+            [Define if pthread_sigmask() does not work on your system.])
+            ;;
+        esac])
+fi
+
+
+# Check for enable-ipv6
+AH_TEMPLATE(ENABLE_IPV6, [Define if --enable-ipv6 is specified])
+AC_MSG_CHECKING([if --enable-ipv6 is specified])
+AC_ARG_ENABLE(ipv6,
+[  --enable-ipv6           Enable ipv6 (with ipv4) support
+  --disable-ipv6          Disable ipv6 support],
+[ case "$enableval" in
+  no)
+       AC_MSG_RESULT(no)
+       ipv6=no
+       ;;
+  *)   AC_MSG_RESULT(yes)
+       AC_DEFINE(ENABLE_IPV6)
+       ipv6=yes
+       ;;
+  esac ],
+
+[
+dnl the check does not work on cross compilation case...
+  AC_RUN_IFELSE([AC_LANG_SOURCE([[ /* AF_INET6 available check */
+#include <sys/types.h>
+#include <sys/socket.h>
+main()
+{
+ if (socket(AF_INET6, SOCK_STREAM, 0) < 0)
+   exit(1);
+ else
+   exit(0);
+}
+]])],[
+  AC_MSG_RESULT(yes)
+  ipv6=yes
+],[
+  AC_MSG_RESULT(no)
+  ipv6=no
+],[
+  AC_MSG_RESULT(no)
+  ipv6=no
+])
+
+if test "$ipv6" = "yes"; then
+	AC_MSG_CHECKING(if RFC2553 API is available)
+	AC_COMPILE_IFELSE([
+	  AC_LANG_PROGRAM([[#include <sys/types.h>
+#include <netinet/in.h>]],
+			  [[struct sockaddr_in6 x;
+			    x.sin6_scope_id;]])
+	],[
+	  AC_MSG_RESULT(yes)
+	  ipv6=yes
+	],[
+	  AC_MSG_RESULT(no, IPv6 disabled)
+	  ipv6=no
+	])
+fi
+
+if test "$ipv6" = "yes"; then
+	AC_DEFINE(ENABLE_IPV6)
+fi
+])
+
+ipv6type=unknown
+ipv6lib=none
+ipv6trylibc=no
+
+if test "$ipv6" = "yes"; then
+	AC_MSG_CHECKING([ipv6 stack type])
+	for i in inria kame linux-glibc linux-inet6 solaris toshiba v6d zeta;
+	do
+		case $i in
+		inria)
+			dnl http://www.kame.net/
+			AC_EGREP_CPP(yes, [
+#include <netinet/in.h>
+#ifdef IPV6_INRIA_VERSION
+yes
+#endif],
+				[ipv6type=$i])
+			;;
+		kame)
+			dnl http://www.kame.net/
+			AC_EGREP_CPP(yes, [
+#include <netinet/in.h>
+#ifdef __KAME__
+yes
+#endif],
+				[ipv6type=$i;
+				ipv6lib=inet6
+				ipv6libdir=/usr/local/v6/lib
+				ipv6trylibc=yes])
+			;;
+		linux-glibc)
+			dnl http://www.v6.linux.or.jp/
+			AC_EGREP_CPP(yes, [
+#include <features.h>
+#if defined(__GLIBC__) && ((__GLIBC__ == 2 && __GLIBC_MINOR__ >= 1) || (__GLIBC__ > 2))
+yes
+#endif],
+				[ipv6type=$i;
+				ipv6trylibc=yes])
+			;;
+		linux-inet6)
+			dnl http://www.v6.linux.or.jp/
+			if test -d /usr/inet6; then
+				ipv6type=$i
+				ipv6lib=inet6
+				ipv6libdir=/usr/inet6/lib
+				BASECFLAGS="-I/usr/inet6/include $BASECFLAGS"
+			fi
+			;;
+		solaris)
+			if test -f /etc/netconfig; then
+                          if $GREP -q tcp6 /etc/netconfig; then
+				ipv6type=$i
+				ipv6trylibc=yes
+                          fi
+                        fi
+			;;
+		toshiba)
+			AC_EGREP_CPP(yes, [
+#include <sys/param.h>
+#ifdef _TOSHIBA_INET6
+yes
+#endif],
+				[ipv6type=$i;
+				ipv6lib=inet6;
+				ipv6libdir=/usr/local/v6/lib])
+			;;
+		v6d)
+			AC_EGREP_CPP(yes, [
+#include </usr/local/v6/include/sys/v6config.h>
+#ifdef __V6D__
+yes
+#endif],
+				[ipv6type=$i;
+				ipv6lib=v6;
+				ipv6libdir=/usr/local/v6/lib;
+				BASECFLAGS="-I/usr/local/v6/include $BASECFLAGS"])
+			;;
+		zeta)
+			AC_EGREP_CPP(yes, [
+#include <sys/param.h>
+#ifdef _ZETA_MINAMI_INET6
+yes
+#endif],
+				[ipv6type=$i;
+				ipv6lib=inet6;
+				ipv6libdir=/usr/local/v6/lib])
+			;;
+		esac
+		if test "$ipv6type" != "unknown"; then
+			break
+		fi
+	done
+	AC_MSG_RESULT($ipv6type)
+fi
+
+if test "$ipv6" = "yes" -a "$ipv6lib" != "none"; then
+	if test -d $ipv6libdir -a -f $ipv6libdir/lib$ipv6lib.a; then
+		LIBS="-L$ipv6libdir -l$ipv6lib $LIBS"
+		echo "using lib$ipv6lib"
+	else
+		if test $ipv6trylibc = "yes"; then
+			echo "using libc"
+		else
+			echo 'Fatal: no $ipv6lib library found.  cannot continue.'
+			echo "You need to fetch lib$ipv6lib.a from appropriate"
+			echo 'ipv6 kit and compile beforehand.'
+			exit 1
+		fi
+	fi
+fi
+
+AC_MSG_CHECKING(for OSX 10.5 SDK or later)
+AC_COMPILE_IFELSE([
+  AC_LANG_PROGRAM([[#include <Carbon/Carbon.h>]], [[FSIORefNum fRef = 0]])
+],[
+  AC_DEFINE(HAVE_OSX105_SDK, 1, [Define if compiling using MacOS X 10.5 SDK or later.])
+  AC_MSG_RESULT(yes)
+],[
+  AC_MSG_RESULT(no)
+])
+
+# Check for --with-doc-strings
+AC_MSG_CHECKING(for --with-doc-strings)
+AC_ARG_WITH(doc-strings,
+            AS_HELP_STRING([--with(out)-doc-strings], [disable/enable documentation strings]))
+
+if test -z "$with_doc_strings"
+then with_doc_strings="yes"
+fi
+if test "$with_doc_strings" != "no"
+then
+    AC_DEFINE(WITH_DOC_STRINGS, 1,
+      [Define if you want documentation strings in extension modules])
+fi
+AC_MSG_RESULT($with_doc_strings)
+
+# Check if eval loop should use timestamp counter profiling
+AC_MSG_CHECKING(for --with-tsc)
+AC_ARG_WITH(tsc,
+	    AS_HELP_STRING([--with(out)-tsc],[enable/disable timestamp counter profile]),[
+if test "$withval" != no
+then 
+  AC_DEFINE(WITH_TSC, 1, 
+    [Define to profile with the Pentium timestamp counter]) 
+    AC_MSG_RESULT(yes)
+else AC_MSG_RESULT(no)
+fi],
+[AC_MSG_RESULT(no)])
+
+# Check for Python-specific malloc support
+AC_MSG_CHECKING(for --with-pymalloc)
+AC_ARG_WITH(pymalloc,
+            AS_HELP_STRING([--with(out)-pymalloc], [disable/enable specialized mallocs]))
+
+if test -z "$with_pymalloc"
+then
+    with_pymalloc="yes"
+    ABIFLAGS="${ABIFLAGS}m"
+fi
+if test "$with_pymalloc" != "no"
+then
+    AC_DEFINE(WITH_PYMALLOC, 1, 
+     [Define if you want to compile in Python-specific mallocs])
+fi
+AC_MSG_RESULT($with_pymalloc)
+
+# Check for Valgrind support
+AC_MSG_CHECKING([for --with-valgrind])
+AC_ARG_WITH([valgrind],
+  AS_HELP_STRING([--with-valgrind], [Enable Valgrind support]),,
+  with_valgrind=no)
+AC_MSG_RESULT([$with_valgrind])
+if test "$with_valgrind" != no; then
+    AC_CHECK_HEADER([valgrind/valgrind.h],
+      [AC_DEFINE([WITH_VALGRIND], 1, [Define if you want pymalloc to be disabled when running under valgrind])],
+      [AC_MSG_ERROR([Valgrind support requested but headers not available])]
+    )
+    OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT"
+fi
+
+# -I${DLINCLDIR} is added to the compile rule for importdl.o
+AC_SUBST(DLINCLDIR)
+DLINCLDIR=.
+
+# the dlopen() function means we might want to use dynload_shlib.o. some
+# platforms, such as AIX, have dlopen(), but don't want to use it.
+AC_CHECK_FUNCS(dlopen)
+
+# DYNLOADFILE specifies which dynload_*.o file we will use for dynamic
+# loading of modules.
+AC_SUBST(DYNLOADFILE)
+AC_MSG_CHECKING(DYNLOADFILE)
+if test -z "$DYNLOADFILE"
+then
+	case $ac_sys_system/$ac_sys_release in
+	AIX*) # Use dynload_shlib.c and dlopen() if we have it; otherwise dynload_aix.c
+	if test "$ac_cv_func_dlopen" = yes
+	then DYNLOADFILE="dynload_shlib.o"
+	else DYNLOADFILE="dynload_aix.o"
+	fi
+	;;
+	hp*|HP*) DYNLOADFILE="dynload_hpux.o";;
+	# Use dynload_next.c only on 10.2 and below, which don't have native dlopen()
+	Darwin/@<:@0156@:>@\..*) DYNLOADFILE="dynload_next.o";;
+	*)
+	# use dynload_shlib.c and dlopen() if we have it; otherwise stub
+	# out any dynamic loading
+	if test "$ac_cv_func_dlopen" = yes
+	then DYNLOADFILE="dynload_shlib.o"
+	else DYNLOADFILE="dynload_stub.o"
+	fi
+	;;
+	esac
+fi
+AC_MSG_RESULT($DYNLOADFILE)
+if test "$DYNLOADFILE" != "dynload_stub.o"
+then
+	AC_DEFINE(HAVE_DYNAMIC_LOADING, 1,
+        [Defined when any dynamic module loading is enabled.])
+fi
+
+# MACHDEP_OBJS can be set to platform-specific object files needed by Python
+
+AC_SUBST(MACHDEP_OBJS)
+AC_MSG_CHECKING(MACHDEP_OBJS)
+if test -z "$MACHDEP_OBJS"
+then
+	MACHDEP_OBJS=$extra_machdep_objs
+else
+	MACHDEP_OBJS="$MACHDEP_OBJS $extra_machdep_objs"
+fi
+AC_MSG_RESULT(MACHDEP_OBJS)
+
+# checks for library functions
+AC_CHECK_FUNCS(alarm accept4 setitimer getitimer bind_textdomain_codeset chown \
+ clock confstr ctermid execv fchmod fchown fork fpathconf ftime ftruncate \
+ gai_strerror getgroups getlogin getloadavg getpeername getpgid getpid \
+ getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \
+ initgroups kill killpg lchmod lchown lstat mbrtowc mkfifo mknod mktime \
+ mremap nice pathconf pause plock poll pthread_init \
+ putenv readlink realpath \
+ select sem_open sem_timedwait sem_getvalue sem_unlink setegid seteuid \
+ setgid \
+ setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setuid setvbuf \
+ sigaction siginterrupt sigrelse snprintf strftime strlcpy \
+ sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \
+ truncate uname unsetenv utimes waitpid wait3 wait4 \
+ wcscoll wcsftime wcsxfrm _getpty)
+
+AC_CHECK_DECL(dirfd,
+    AC_DEFINE(HAVE_DIRFD, 1,
+              Define if you have the 'dirfd' function or macro.), ,
+      [#include <sys/types.h>
+       #include <dirent.h>])
+
+# For some functions, having a definition is not sufficient, since
+# we want to take their address.
+AC_MSG_CHECKING(for chroot)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=chroot]])],
+  [AC_DEFINE(HAVE_CHROOT, 1, Define if you have the 'chroot' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+AC_MSG_CHECKING(for link)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=link]])],
+  [AC_DEFINE(HAVE_LINK, 1, Define if you have the 'link' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+AC_MSG_CHECKING(for symlink)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=symlink]])],
+  [AC_DEFINE(HAVE_SYMLINK, 1, Define if you have the 'symlink' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+AC_MSG_CHECKING(for fchdir)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=fchdir]])],
+  [AC_DEFINE(HAVE_FCHDIR, 1, Define if you have the 'fchdir' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+AC_MSG_CHECKING(for fsync)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=fsync]])],
+  [AC_DEFINE(HAVE_FSYNC, 1, Define if you have the 'fsync' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+AC_MSG_CHECKING(for fdatasync)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=fdatasync]])],
+  [AC_DEFINE(HAVE_FDATASYNC, 1, Define if you have the 'fdatasync' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+AC_MSG_CHECKING(for epoll)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/epoll.h>]], [[void *x=epoll_create]])],
+  [AC_DEFINE(HAVE_EPOLL, 1, Define if you have the 'epoll' functions.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+AC_MSG_CHECKING(for kqueue)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <sys/types.h>
+#include <sys/event.h>
+    ]], [[int x=kqueue()]])],
+  [AC_DEFINE(HAVE_KQUEUE, 1, Define if you have the 'kqueue' functions.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+# On some systems (eg. FreeBSD 5), we would find a definition of the
+# functions ctermid_r, setgroups in the library, but no prototype
+# (e.g. because we use _XOPEN_SOURCE). See whether we can take their
+# address to avoid compiler warnings and potential miscompilations
+# because of the missing prototypes.
+
+AC_MSG_CHECKING(for ctermid_r)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <stdio.h>
+]], [[void* p = ctermid_r]])],
+  [AC_DEFINE(HAVE_CTERMID_R, 1, Define if you have the 'ctermid_r' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+
+AC_CACHE_CHECK([for flock declaration], [ac_cv_flock_decl],
+  [AC_COMPILE_IFELSE(
+    [AC_LANG_PROGRAM(
+      [#include <sys/file.h>],
+      [void* p = flock]
+    )],
+    [ac_cv_flock_decl=yes],
+    [ac_cv_flock_decl=no]
+  )
+])
+if test "x${ac_cv_flock_decl}" = xyes; then
+  AC_CHECK_FUNCS(flock,,
+    AC_CHECK_LIB(bsd,flock,
+      [AC_DEFINE(HAVE_FLOCK)
+       AC_DEFINE(FLOCK_NEEDS_LIBBSD, 1, Define if flock needs to be linked with bsd library.)
+    ])
+  )
+fi
+
+AC_MSG_CHECKING(for getpagesize)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <unistd.h>
+]], [[void* p = getpagesize]])],
+  [AC_DEFINE(HAVE_GETPAGESIZE, 1, Define if you have the 'getpagesize' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+
+AC_MSG_CHECKING(for broken unsetenv)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <stdlib.h>
+]], [[int res = unsetenv("DUMMY")]])],
+  [AC_MSG_RESULT(no)],
+  [AC_DEFINE(HAVE_BROKEN_UNSETENV, 1, Define if `unsetenv` does not return an int.)
+   AC_MSG_RESULT(yes)
+])
+
+dnl check for true
+AC_CHECK_PROGS(TRUE, true, /bin/true)
+
+dnl On some systems (e.g. Solaris 9), hstrerror and inet_aton are in -lresolv
+dnl On others, they are in the C library, so we to take no action
+AC_CHECK_LIB(c, inet_aton, [$ac_cv_prog_TRUE],
+  AC_CHECK_LIB(resolv, inet_aton)
+)
+
+# On Tru64, chflags seems to be present, but calling it will
+# exit Python
+AC_CACHE_CHECK([for chflags], [ac_cv_have_chflags], [dnl
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <sys/stat.h>
+#include <unistd.h>
+int main(int argc, char*argv[])
+{
+  if(chflags(argv[0], 0) != 0)
+    return 1;
+  return 0;
+}
+]])],
+[ac_cv_have_chflags=yes],
+[ac_cv_have_chflags=no],
+[ac_cv_have_chflags=cross])
+])
+if test "$ac_cv_have_chflags" = cross ; then
+  AC_CHECK_FUNC([chflags], [ac_cv_have_chflags="yes"], [ac_cv_have_chflags="no"])
+fi
+if test "$ac_cv_have_chflags" = yes ; then
+  AC_DEFINE(HAVE_CHFLAGS, 1, [Define to 1 if you have the 'chflags' function.])
+fi
+
+AC_CACHE_CHECK([for lchflags], [ac_cv_have_lchflags], [dnl
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <sys/stat.h>
+#include <unistd.h>
+int main(int argc, char*argv[])
+{
+  if(lchflags(argv[0], 0) != 0)
+    return 1;
+  return 0;
+}
+]])],[ac_cv_have_lchflags=yes],[ac_cv_have_lchflags=no],[ac_cv_have_lchflags=cross])
+])
+if test "$ac_cv_have_lchflags" = cross ; then
+  AC_CHECK_FUNC([lchflags], [ac_cv_have_lchflags="yes"], [ac_cv_have_lchflags="no"])
+fi
+if test "$ac_cv_have_lchflags" = yes ; then
+  AC_DEFINE(HAVE_LCHFLAGS, 1, [Define to 1 if you have the 'lchflags' function.])
+fi
+
+dnl Check if system zlib has *Copy() functions
+dnl
+dnl On MacOSX the linker will search for dylibs on the entire linker path
+dnl before searching for static libraries. setup.py adds -Wl,-search_paths_first
+dnl to revert to a more traditional unix behaviour and make it possible to
+dnl override the system libz with a local static library of libz. Temporarily
+dnl add that flag to our CFLAGS as well to ensure that we check the version
+dnl of libz that will be used by setup.py. 
+dnl The -L/usr/local/lib is needed as wel to get the same compilation 
+dnl environment as setup.py (and leaving it out can cause configure to use the
+dnl wrong version of the library)
+case $ac_sys_system/$ac_sys_release in
+Darwin/*) 
+	_CUR_CFLAGS="${CFLAGS}"
+	_CUR_LDFLAGS="${LDFLAGS}"
+	CFLAGS="${CFLAGS} -Wl,-search_paths_first"
+	LDFLAGS="${LDFLAGS} -Wl,-search_paths_first -L/usr/local/lib"
+	;;
+esac
+
+AC_CHECK_LIB(z, inflateCopy, AC_DEFINE(HAVE_ZLIB_COPY, 1, [Define if the zlib library has inflateCopy]))
+
+case $ac_sys_system/$ac_sys_release in
+Darwin/*) 
+	CFLAGS="${_CUR_CFLAGS}"
+	LDFLAGS="${_CUR_LDFLAGS}"
+	;;
+esac
+
+AC_MSG_CHECKING(for hstrerror)
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[
+#include <netdb.h>
+]], [[void* p = hstrerror; hstrerror(0)]])],
+  [AC_DEFINE(HAVE_HSTRERROR, 1, Define if you have the 'hstrerror' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+
+AC_MSG_CHECKING(for inet_aton)
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <arpa/inet.h>
+]], [[void* p = inet_aton;inet_aton(0,0)]])],
+  [AC_DEFINE(HAVE_INET_ATON, 1, Define if you have the 'inet_aton' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+
+AC_MSG_CHECKING(for inet_pton)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <arpa/inet.h>
+]], [[void* p = inet_pton]])],
+  [AC_DEFINE(HAVE_INET_PTON, 1, Define if you have the 'inet_pton' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+
+# On some systems, setgroups is in unistd.h, on others, in grp.h
+AC_MSG_CHECKING(for setgroups)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <unistd.h>
+#ifdef HAVE_GRP_H
+#include <grp.h>
+#endif
+]], [[void* p = setgroups]])],
+  [AC_DEFINE(HAVE_SETGROUPS, 1, Define if you have the 'setgroups' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)
+])
+
+# check for openpty and forkpty
+
+AC_CHECK_FUNCS(openpty,, 
+   AC_CHECK_LIB(util,openpty,
+     [AC_DEFINE(HAVE_OPENPTY) LIBS="$LIBS -lutil"],
+     AC_CHECK_LIB(bsd,openpty, [AC_DEFINE(HAVE_OPENPTY) LIBS="$LIBS -lbsd"])
+   )
+)
+AC_CHECK_FUNCS(forkpty,, 
+   AC_CHECK_LIB(util,forkpty, 
+     [AC_DEFINE(HAVE_FORKPTY) LIBS="$LIBS -lutil"],
+     AC_CHECK_LIB(bsd,forkpty, [AC_DEFINE(HAVE_FORKPTY) LIBS="$LIBS -lbsd"])
+   )
+)
+
+# Stuff for expat.
+AC_CHECK_FUNCS(memmove)
+
+# check for long file support functions
+AC_CHECK_FUNCS(fseek64 fseeko fstatvfs ftell64 ftello statvfs)
+
+AC_REPLACE_FUNCS(dup2 getcwd strdup)
+AC_CHECK_FUNCS(getpgrp, 
+  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[getpgrp(0);]])],
+    [AC_DEFINE(GETPGRP_HAVE_ARG, 1, [Define if getpgrp() must be called as getpgrp(0).])],
+    [])
+)
+AC_CHECK_FUNCS(setpgrp,
+  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[setpgrp(0,0);]])],
+    [AC_DEFINE(SETPGRP_HAVE_ARG, 1, [Define if setpgrp() must be called as setpgrp(0, 0).])],
+    [])
+)
+AC_CHECK_FUNCS(gettimeofday, 
+  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/time.h>]],
+  				     [[gettimeofday((struct timeval*)0,(struct timezone*)0);]])],
+    [],
+    [AC_DEFINE(GETTIMEOFDAY_NO_TZ, 1,
+      [Define if gettimeofday() does not have second (timezone) argument
+       This is the case on Motorola V4 (R40V4.2)])
+    ])
+)
+
+AC_MSG_CHECKING(for major, minor, and makedev)
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[
+#if defined(MAJOR_IN_MKDEV)
+#include <sys/mkdev.h>
+#elif defined(MAJOR_IN_SYSMACROS)
+#include <sys/sysmacros.h>
+#else
+#include <sys/types.h>
+#endif
+]], [[
+  makedev(major(0),minor(0));
+]])],[
+  AC_DEFINE(HAVE_DEVICE_MACROS, 1,
+	    [Define to 1 if you have the device macros.])
+  AC_MSG_RESULT(yes)
+],[
+  AC_MSG_RESULT(no)
+])
+
+# On OSF/1 V5.1, getaddrinfo is available, but a define
+# for [no]getaddrinfo in netdb.h. 
+AC_MSG_CHECKING(for getaddrinfo)
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netdb.h>
+#include <stdio.h>
+]], [[getaddrinfo(NULL, NULL, NULL, NULL);]])],
+[have_getaddrinfo=yes],
+[have_getaddrinfo=no])
+AC_MSG_RESULT($have_getaddrinfo)
+if test $have_getaddrinfo = yes
+then
+  AC_MSG_CHECKING(getaddrinfo bug)
+  AC_CACHE_VAL(ac_cv_buggy_getaddrinfo,
+  AC_RUN_IFELSE([AC_LANG_SOURCE([[[
+#include <sys/types.h>
+#include <netdb.h>
+#include <string.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+
+int main()
+{
+  int passive, gaierr, inet4 = 0, inet6 = 0;
+  struct addrinfo hints, *ai, *aitop;
+  char straddr[INET6_ADDRSTRLEN], strport[16];
+
+  for (passive = 0; passive <= 1; passive++) {
+    memset(&hints, 0, sizeof(hints));
+    hints.ai_family = AF_UNSPEC;
+    hints.ai_flags = passive ? AI_PASSIVE : 0;
+    hints.ai_socktype = SOCK_STREAM;
+    hints.ai_protocol = IPPROTO_TCP;
+    if ((gaierr = getaddrinfo(NULL, "54321", &hints, &aitop)) != 0) {
+      (void)gai_strerror(gaierr);
+      goto bad;
+    }
+    for (ai = aitop; ai; ai = ai->ai_next) {
+      if (ai->ai_addr == NULL ||
+          ai->ai_addrlen == 0 ||
+          getnameinfo(ai->ai_addr, ai->ai_addrlen,
+                      straddr, sizeof(straddr), strport, sizeof(strport),
+                      NI_NUMERICHOST|NI_NUMERICSERV) != 0) {
+        goto bad;
+      }
+      switch (ai->ai_family) {
+      case AF_INET:
+        if (strcmp(strport, "54321") != 0) {
+          goto bad;
+        }
+        if (passive) {
+          if (strcmp(straddr, "0.0.0.0") != 0) {
+            goto bad;
+          }
+        } else {
+          if (strcmp(straddr, "127.0.0.1") != 0) {
+            goto bad;
+          }
+        }
+        inet4++;
+        break;
+      case AF_INET6:
+        if (strcmp(strport, "54321") != 0) {
+          goto bad;
+        }
+        if (passive) {
+          if (strcmp(straddr, "::") != 0) {
+            goto bad;
+          }
+        } else {
+          if (strcmp(straddr, "::1") != 0) {
+            goto bad;
+          }
+        }
+        inet6++;
+        break;
+      case AF_UNSPEC:
+        goto bad;
+        break;
+      default:
+        /* another family support? */
+        break;
+      }
+    }
+  }
+
+  if (!(inet4 == 0 || inet4 == 2))
+    goto bad;
+  if (!(inet6 == 0 || inet6 == 2))
+    goto bad;
+
+  if (aitop)
+    freeaddrinfo(aitop);
+  return 0;
+
+ bad:
+  if (aitop)
+    freeaddrinfo(aitop);
+  return 1;
+}
+]]])],
+[ac_cv_buggy_getaddrinfo=no],
+[ac_cv_buggy_getaddrinfo=yes],
+[ac_cv_buggy_getaddrinfo=yes]))
+fi
+
+AC_MSG_RESULT($ac_cv_buggy_getaddrinfo)
+
+if test $have_getaddrinfo = no -o "$ac_cv_buggy_getaddrinfo" = yes
+then
+	if test $ipv6 = yes
+	then
+		echo 'Fatal: You must get working getaddrinfo() function.'
+		echo '       or you can specify "--disable-ipv6"'.
+		exit 1
+	fi
+else
+	AC_DEFINE(HAVE_GETADDRINFO, 1, [Define if you have the getaddrinfo function.])
+fi
+
+AC_CHECK_FUNCS(getnameinfo)
+
+# checks for structures
+AC_HEADER_TIME
+AC_STRUCT_TM
+AC_STRUCT_TIMEZONE
+AC_CHECK_MEMBERS([struct stat.st_rdev])
+AC_CHECK_MEMBERS([struct stat.st_blksize])
+AC_CHECK_MEMBERS([struct stat.st_flags])
+AC_CHECK_MEMBERS([struct stat.st_gen])
+AC_CHECK_MEMBERS([struct stat.st_birthtime])
+AC_STRUCT_ST_BLOCKS
+
+AC_MSG_CHECKING(for time.h that defines altzone)
+AC_CACHE_VAL(ac_cv_header_time_altzone,[
+  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <time.h>]], [[return altzone;]])],
+    [ac_cv_header_time_altzone=yes],
+    [ac_cv_header_time_altzone=no])
+  ])
+AC_MSG_RESULT($ac_cv_header_time_altzone)
+if test $ac_cv_header_time_altzone = yes; then
+  AC_DEFINE(HAVE_ALTZONE, 1, [Define this if your time.h defines altzone.])
+fi
+
+was_it_defined=no
+AC_MSG_CHECKING(whether sys/select.h and sys/time.h may both be included)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <sys/types.h>
+#include <sys/select.h>
+#include <sys/time.h>
+]], [[;]])],[
+  AC_DEFINE(SYS_SELECT_WITH_SYS_TIME, 1,
+  [Define if  you can safely include both <sys/select.h> and <sys/time.h>
+   (which you can't on SCO ODT 3.0).]) 
+  was_it_defined=yes
+],[])
+AC_MSG_RESULT($was_it_defined)
+
+AC_MSG_CHECKING(for addrinfo)
+AC_CACHE_VAL(ac_cv_struct_addrinfo,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <netdb.h>]], [[struct addrinfo a]])],
+  [ac_cv_struct_addrinfo=yes],
+  [ac_cv_struct_addrinfo=no]))
+AC_MSG_RESULT($ac_cv_struct_addrinfo)
+if test $ac_cv_struct_addrinfo = yes; then
+	AC_DEFINE(HAVE_ADDRINFO, 1, [struct addrinfo (netdb.h)])
+fi
+
+AC_MSG_CHECKING(for sockaddr_storage)
+AC_CACHE_VAL(ac_cv_struct_sockaddr_storage,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#		include <sys/types.h>
+#		include <sys/socket.h>]], [[struct sockaddr_storage s]])],
+  [ac_cv_struct_sockaddr_storage=yes],
+  [ac_cv_struct_sockaddr_storage=no]))
+AC_MSG_RESULT($ac_cv_struct_sockaddr_storage)
+if test $ac_cv_struct_sockaddr_storage = yes; then
+	AC_DEFINE(HAVE_SOCKADDR_STORAGE, 1, [struct sockaddr_storage (sys/socket.h)])
+fi
+
+# checks for compiler characteristics
+
+AC_C_CHAR_UNSIGNED
+AC_C_CONST
+
+works=no
+AC_MSG_CHECKING(for working volatile)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[volatile int x; x = 0;]])],
+  [works=yes],
+  [AC_DEFINE(volatile, , [Define to empty if the keyword does not work.])]
+)
+AC_MSG_RESULT($works)
+
+works=no
+AC_MSG_CHECKING(for working signed char)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[signed char c;]])],
+  [works=yes],
+  [AC_DEFINE(signed, , [Define to empty if the keyword does not work.])]
+)
+AC_MSG_RESULT($works)
+
+have_prototypes=no
+AC_MSG_CHECKING(for prototypes)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[int foo(int x) { return 0; }]], [[return foo(10);]])],
+  [AC_DEFINE(HAVE_PROTOTYPES, 1, 
+     [Define if your compiler supports function prototype]) 
+   have_prototypes=yes],
+  []
+)
+AC_MSG_RESULT($have_prototypes)
+
+works=no
+AC_MSG_CHECKING(for variable length prototypes and stdarg.h)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <stdarg.h>
+int foo(int x, ...) {
+	va_list va;
+	va_start(va, x);
+	va_arg(va, int);
+	va_arg(va, char *);
+	va_arg(va, double);
+	return 0;
+}
+]], [[return foo(10, "", 3.14);]])],[
+  AC_DEFINE(HAVE_STDARG_PROTOTYPES, 1,
+   [Define if your compiler supports variable length function prototypes
+   (e.g. void fprintf(FILE *, char *, ...);) *and* <stdarg.h>]) 
+  works=yes
+],[])
+AC_MSG_RESULT($works)
+
+# check for socketpair
+AC_MSG_CHECKING(for socketpair)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <sys/types.h>
+#include <sys/socket.h>
+]], [[void *x=socketpair]])],
+  [AC_DEFINE(HAVE_SOCKETPAIR, 1, [Define if you have the 'socketpair' function.])
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)]
+)
+
+# check if sockaddr has sa_len member
+AC_MSG_CHECKING(if sockaddr has sa_len member)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/types.h>
+#include <sys/socket.h>]], [[struct sockaddr x;
+x.sa_len = 0;]])],
+  [AC_MSG_RESULT(yes)
+   AC_DEFINE(HAVE_SOCKADDR_SA_LEN, 1, [Define if sockaddr has sa_len member])],
+  [AC_MSG_RESULT(no)]
+)
+
+va_list_is_array=no
+AC_MSG_CHECKING(whether va_list is an array)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#ifdef HAVE_STDARG_PROTOTYPES
+#include <stdarg.h>
+#else
+#include <varargs.h>
+#endif
+]], [[va_list list1, list2; list1 = list2;]])],[],[
+ AC_DEFINE(VA_LIST_IS_ARRAY, 1, [Define if a va_list is an array of some kind]) 
+ va_list_is_array=yes
+])
+AC_MSG_RESULT($va_list_is_array)
+
+# sigh -- gethostbyname_r is a mess; it can have 3, 5 or 6 arguments :-(
+AH_TEMPLATE(HAVE_GETHOSTBYNAME_R,
+  [Define this if you have some version of gethostbyname_r()])
+
+AC_CHECK_FUNC(gethostbyname_r, [
+  AC_DEFINE(HAVE_GETHOSTBYNAME_R)
+  AC_MSG_CHECKING([gethostbyname_r with 6 args])
+  OLD_CFLAGS=$CFLAGS
+  CFLAGS="$CFLAGS $MY_CPPFLAGS $MY_THREAD_CPPFLAGS $MY_CFLAGS"
+  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#   include <netdb.h>
+  ]], [[
+    char *name;
+    struct hostent *he, *res;
+    char buffer[2048];
+    int buflen = 2048;
+    int h_errnop;
+
+    (void) gethostbyname_r(name, he, buffer, buflen, &res, &h_errnop)
+  ]])],[
+    AC_DEFINE(HAVE_GETHOSTBYNAME_R)
+    AC_DEFINE(HAVE_GETHOSTBYNAME_R_6_ARG, 1,
+    [Define this if you have the 6-arg version of gethostbyname_r().])
+    AC_MSG_RESULT(yes)
+  ],[
+    AC_MSG_RESULT(no)
+    AC_MSG_CHECKING([gethostbyname_r with 5 args])
+    AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#       include <netdb.h>
+      ]], [[
+        char *name;
+        struct hostent *he;
+        char buffer[2048];
+        int buflen = 2048;
+        int h_errnop;
+
+        (void) gethostbyname_r(name, he, buffer, buflen, &h_errnop)
+      ]])],
+      [
+        AC_DEFINE(HAVE_GETHOSTBYNAME_R)
+        AC_DEFINE(HAVE_GETHOSTBYNAME_R_5_ARG, 1,
+          [Define this if you have the 5-arg version of gethostbyname_r().])
+        AC_MSG_RESULT(yes)
+      ], [
+        AC_MSG_RESULT(no)
+        AC_MSG_CHECKING([gethostbyname_r with 3 args])
+        AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#           include <netdb.h>
+          ]], [[
+            char *name;
+            struct hostent *he;
+            struct hostent_data data;
+
+            (void) gethostbyname_r(name, he, &data);
+          ]])],
+          [
+            AC_DEFINE(HAVE_GETHOSTBYNAME_R)
+            AC_DEFINE(HAVE_GETHOSTBYNAME_R_3_ARG, 1,
+              [Define this if you have the 3-arg version of gethostbyname_r().])
+            AC_MSG_RESULT(yes)
+          ], [
+           AC_MSG_RESULT(no)
+        ])
+    ])
+  ])
+  CFLAGS=$OLD_CFLAGS
+], [
+  AC_CHECK_FUNCS(gethostbyname)
+])
+AC_SUBST(HAVE_GETHOSTBYNAME_R_6_ARG)
+AC_SUBST(HAVE_GETHOSTBYNAME_R_5_ARG)
+AC_SUBST(HAVE_GETHOSTBYNAME_R_3_ARG)
+AC_SUBST(HAVE_GETHOSTBYNAME_R)
+AC_SUBST(HAVE_GETHOSTBYNAME)
+
+# checks for system services
+# (none yet)
+
+# Linux requires this for correct f.p. operations
+AC_CHECK_FUNC(__fpu_control,
+  [],
+  [AC_CHECK_LIB(ieee, __fpu_control)
+])
+
+# Check for --with-fpectl
+AC_MSG_CHECKING(for --with-fpectl)
+AC_ARG_WITH(fpectl,
+            AS_HELP_STRING([--with-fpectl], [enable SIGFPE catching]),
+[
+if test "$withval" != no
+then 
+  AC_DEFINE(WANT_SIGFPE_HANDLER, 1,
+  [Define if you want SIGFPE handled (see Include/pyfpe.h).]) 
+  AC_MSG_RESULT(yes)
+else AC_MSG_RESULT(no)
+fi],
+[AC_MSG_RESULT(no)])
+
+# check for --with-libm=...
+AC_SUBST(LIBM)
+case $ac_sys_system in
+Darwin) ;;
+*) LIBM=-lm
+esac
+AC_MSG_CHECKING(for --with-libm=STRING)
+AC_ARG_WITH(libm,
+            AS_HELP_STRING([--with-libm=STRING], [math library]),
+[
+if test "$withval" = no
+then LIBM=
+     AC_MSG_RESULT(force LIBM empty)
+elif test "$withval" != yes
+then LIBM=$withval
+     AC_MSG_RESULT(set LIBM="$withval")
+else AC_MSG_ERROR([proper usage is --with-libm=STRING])
+fi],
+[AC_MSG_RESULT(default LIBM="$LIBM")])
+
+# check for --with-libc=...
+AC_SUBST(LIBC)
+AC_MSG_CHECKING(for --with-libc=STRING)
+AC_ARG_WITH(libc,
+            AS_HELP_STRING([--with-libc=STRING], [C library]),
+[
+if test "$withval" = no
+then LIBC=
+     AC_MSG_RESULT(force LIBC empty)
+elif test "$withval" != yes
+then LIBC=$withval
+     AC_MSG_RESULT(set LIBC="$withval")
+else AC_MSG_ERROR([proper usage is --with-libc=STRING])
+fi],
+[AC_MSG_RESULT(default LIBC="$LIBC")])
+
+# **************************************************
+# * Check for various properties of floating point *
+# **************************************************
+
+AC_MSG_CHECKING(whether C doubles are little-endian IEEE 754 binary64)
+AC_CACHE_VAL(ac_cv_little_endian_double, [
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <string.h>
+int main() {
+    double x = 9006104071832581.0;
+    if (memcmp(&x, "\x05\x04\x03\x02\x01\xff\x3f\x43", 8) == 0)
+        return 0;
+    else
+        return 1;
+}
+]])],
+[ac_cv_little_endian_double=yes],
+[ac_cv_little_endian_double=no],
+[ac_cv_little_endian_double=no])])
+AC_MSG_RESULT($ac_cv_little_endian_double)
+if test "$ac_cv_little_endian_double" = yes
+then
+  AC_DEFINE(DOUBLE_IS_LITTLE_ENDIAN_IEEE754, 1,
+  [Define if C doubles are 64-bit IEEE 754 binary format, stored
+   with the least significant byte first])
+fi
+
+AC_MSG_CHECKING(whether C doubles are big-endian IEEE 754 binary64)
+AC_CACHE_VAL(ac_cv_big_endian_double, [
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <string.h>
+int main() {
+    double x = 9006104071832581.0;
+    if (memcmp(&x, "\x43\x3f\xff\x01\x02\x03\x04\x05", 8) == 0)
+        return 0;
+    else
+        return 1;
+}
+]])],
+[ac_cv_big_endian_double=yes],
+[ac_cv_big_endian_double=no],
+[ac_cv_big_endian_double=no])])
+AC_MSG_RESULT($ac_cv_big_endian_double)
+if test "$ac_cv_big_endian_double" = yes
+then
+  AC_DEFINE(DOUBLE_IS_BIG_ENDIAN_IEEE754, 1,
+  [Define if C doubles are 64-bit IEEE 754 binary format, stored
+   with the most significant byte first])
+fi
+
+# Some ARM platforms use a mixed-endian representation for doubles.
+# While Python doesn't currently have full support for these platforms
+# (see e.g., issue 1762561), we can at least make sure that float <-> string
+# conversions work.
+AC_MSG_CHECKING(whether C doubles are ARM mixed-endian IEEE 754 binary64)
+AC_CACHE_VAL(ac_cv_mixed_endian_double, [
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <string.h>
+int main() {
+    double x = 9006104071832581.0;
+    if (memcmp(&x, "\x01\xff\x3f\x43\x05\x04\x03\x02", 8) == 0)
+        return 0;
+    else
+        return 1;
+}
+]])],
+[ac_cv_mixed_endian_double=yes],
+[ac_cv_mixed_endian_double=no],
+[ac_cv_mixed_endian_double=no])])
+AC_MSG_RESULT($ac_cv_mixed_endian_double)
+if test "$ac_cv_mixed_endian_double" = yes
+then
+  AC_DEFINE(DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754, 1,
+  [Define if C doubles are 64-bit IEEE 754 binary format, stored
+   in ARM mixed-endian order (byte order 45670123)])
+fi
+
+# The short float repr introduced in Python 3.1 requires the
+# correctly-rounded string <-> double conversion functions from
+# Python/dtoa.c, which in turn require that the FPU uses 53-bit
+# rounding; this is a problem on x86, where the x87 FPU has a default
+# rounding precision of 64 bits.  For gcc/x86, we can fix this by
+# using inline assembler to get and set the x87 FPU control word.
+
+# This inline assembler syntax may also work for suncc and icc,
+# so we try it on all platforms.
+
+AC_MSG_CHECKING(whether we can use gcc inline assembler to get and set x87 control word)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[
+  unsigned short cw;
+  __asm__ __volatile__ ("fnstcw %0" : "=m" (cw));
+  __asm__ __volatile__ ("fldcw %0" : : "m" (cw));
+]])],[have_gcc_asm_for_x87=yes],[have_gcc_asm_for_x87=no])
+AC_MSG_RESULT($have_gcc_asm_for_x87)
+if test "$have_gcc_asm_for_x87" = yes
+then
+    AC_DEFINE(HAVE_GCC_ASM_FOR_X87, 1,
+    [Define if we can use gcc inline assembler to get and set x87 control word])
+fi
+
+# Detect whether system arithmetic is subject to x87-style double
+# rounding issues.  The result of this test has little meaning on non
+# IEEE 754 platforms.  On IEEE 754, test should return 1 if rounding
+# mode is round-to-nearest and double rounding issues are present, and
+# 0 otherwise.  See http://bugs.python.org/issue2937 for more info.
+AC_MSG_CHECKING(for x87-style double rounding)
+# $BASECFLAGS may affect the result
+ac_save_cc="$CC"
+CC="$CC $BASECFLAGS"
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <stdlib.h>
+#include <math.h>
+int main() {
+    volatile double x, y, z;
+    /* 1./(1-2**-53) -> 1+2**-52 (correct), 1.0 (double rounding) */
+    x = 0.99999999999999989; /* 1-2**-53 */
+    y = 1./x;
+    if (y != 1.)
+        exit(0);
+    /* 1e16+2.99999 -> 1e16+2. (correct), 1e16+4. (double rounding) */
+    x = 1e16;
+    y = 2.99999;
+    z = x + y;
+    if (z != 1e16+4.)
+        exit(0);
+    /* both tests show evidence of double rounding */
+    exit(1);
+}
+]])],
+[ac_cv_x87_double_rounding=no],
+[ac_cv_x87_double_rounding=yes],
+[ac_cv_x87_double_rounding=no])
+CC="$ac_save_cc"
+AC_MSG_RESULT($ac_cv_x87_double_rounding)
+if test "$ac_cv_x87_double_rounding" = yes
+then
+  AC_DEFINE(X87_DOUBLE_ROUNDING, 1,
+  [Define if arithmetic is subject to x87-style double rounding issue])
+fi
+
+# ************************************
+# * Check for mathematical functions *
+# ************************************
+
+LIBS_SAVE=$LIBS
+LIBS="$LIBS $LIBM"
+
+AC_CHECK_FUNCS([acosh asinh atanh copysign erf erfc expm1 finite gamma])
+AC_CHECK_FUNCS([hypot lgamma log1p round tgamma])
+AC_CHECK_DECLS([isinf, isnan, isfinite], [], [], [[#include <math.h>]])
+
+# On FreeBSD 6.2, it appears that tanh(-0.) returns 0. instead of
+# -0. on some architectures.
+AC_MSG_CHECKING(whether tanh preserves the sign of zero)
+AC_CACHE_VAL(ac_cv_tanh_preserves_zero_sign, [
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <math.h>
+#include <stdlib.h>
+int main() {
+    /* return 0 if either negative zeros don't exist
+       on this platform or if negative zeros exist
+       and tanh(-0.) == -0. */
+  if (atan2(0., -1.) == atan2(-0., -1.) ||
+      atan2(tanh(-0.), -1.) == atan2(-0., -1.)) exit(0);
+  else exit(1);
+}
+]])],
+[ac_cv_tanh_preserves_zero_sign=yes],
+[ac_cv_tanh_preserves_zero_sign=no],
+[ac_cv_tanh_preserves_zero_sign=no])])
+AC_MSG_RESULT($ac_cv_tanh_preserves_zero_sign)
+if test "$ac_cv_tanh_preserves_zero_sign" = yes
+then
+  AC_DEFINE(TANH_PRESERVES_ZERO_SIGN, 1,
+  [Define if tanh(-0.) is -0., or if platform doesn't have signed zeros])
+fi
+
+if test "$ac_cv_func_log1p" = yes
+then
+    # On some versions of AIX, log1p(-0.) returns 0. instead of
+    # -0.  See issue #9920.
+    AC_MSG_CHECKING(whether log1p drops the sign of negative zero)
+    AC_CACHE_VAL(ac_cv_log1p_drops_zero_sign, [
+    AC_RUN_IFELSE([AC_LANG_SOURCE([[
+    #include <math.h>
+    #include <stdlib.h>
+    int main() {
+        /* Fail if the signs of log1p(-0.) and -0. can be
+	   distinguished. */
+        if (atan2(log1p(-0.), -1.) == atan2(-0., -1.))
+            return 0;
+        else
+            return 1;
+    }
+    ]])],
+    [ac_cv_log1p_drops_zero_sign=no],
+    [ac_cv_log1p_drops_zero_sign=yes],
+    [ac_cv_log1p_drops_zero_sign=no])])
+    AC_MSG_RESULT($ac_cv_log1p_drops_zero_sign)
+fi
+if test "$ac_cv_log1p_drops_zero_sign" = yes
+then
+  AC_DEFINE(LOG1P_DROPS_ZERO_SIGN, 1,
+  [Define if log1p(-0.) is 0. rather than -0.])
+fi
+
+LIBS=$LIBS_SAVE
+
+# For multiprocessing module, check that sem_open
+# actually works.  For FreeBSD versions <= 7.2,
+# the kernel module that provides POSIX semaphores
+# isn't loaded by default, so an attempt to call
+# sem_open results in a 'Signal 12' error.
+AC_MSG_CHECKING(whether POSIX semaphores are enabled)
+AC_CACHE_VAL(ac_cv_posix_semaphores_enabled,
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <unistd.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <semaphore.h>
+#include <sys/stat.h>
+
+int main(void) {
+  sem_t *a = sem_open("/autoconf", O_CREAT, S_IRUSR|S_IWUSR, 0);
+  if (a == SEM_FAILED) {
+    perror("sem_open");
+    return 1;
+  }
+  sem_close(a);
+  sem_unlink("/autoconf");
+  return 0;
+}
+]])],
+[ac_cv_posix_semaphores_enabled=yes],
+[ac_cv_posix_semaphores_enabled=no],
+[ac_cv_posix_semaphores_enabled=yes])
+)
+AC_MSG_RESULT($ac_cv_posix_semaphores_enabled)
+if test $ac_cv_posix_semaphores_enabled = no
+then
+  AC_DEFINE(POSIX_SEMAPHORES_NOT_ENABLED, 1,
+            [Define if POSIX semaphores aren't enabled on your system])
+fi
+
+# Multiprocessing check for broken sem_getvalue
+AC_MSG_CHECKING(for broken sem_getvalue)
+AC_CACHE_VAL(ac_cv_broken_sem_getvalue,
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <unistd.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <semaphore.h>
+#include <sys/stat.h>
+
+int main(void){
+  sem_t *a = sem_open("/autocftw", O_CREAT, S_IRUSR|S_IWUSR, 0);
+  int count;
+  int res;
+  if(a==SEM_FAILED){
+    perror("sem_open");
+    return 1;
+
+  }
+  res = sem_getvalue(a, &count);
+  sem_close(a);
+  sem_unlink("/autocftw");
+  return res==-1 ? 1 : 0;
+}
+]])],
+[ac_cv_broken_sem_getvalue=no],
+[ac_cv_broken_sem_getvalue=yes],
+[ac_cv_broken_sem_getvalue=yes])
+)
+AC_MSG_RESULT($ac_cv_broken_sem_getvalue)
+if test $ac_cv_broken_sem_getvalue = yes
+then
+  AC_DEFINE(HAVE_BROKEN_SEM_GETVALUE, 1,
+  [define to 1 if your sem_getvalue is broken.])
+fi
+
+# determine what size digit to use for Python's longs
+AC_MSG_CHECKING([digit size for Python's longs])
+AC_ARG_ENABLE(big-digits,
+AS_HELP_STRING([--enable-big-digits@<:@=BITS@:>@],[use big digits for Python longs [[BITS=30]]]),
+[case $enable_big_digits in
+yes)
+  enable_big_digits=30 ;;
+no)
+  enable_big_digits=15 ;;
+[15|30])
+  ;;
+*)
+  AC_MSG_ERROR([bad value $enable_big_digits for --enable-big-digits; value should be 15 or 30]) ;;
+esac
+AC_MSG_RESULT($enable_big_digits)
+AC_DEFINE_UNQUOTED(PYLONG_BITS_IN_DIGIT, $enable_big_digits, [Define as the preferred size in bits of long digits])
+],
+[AC_MSG_RESULT(no value specified)])
+
+# check for wchar.h
+AC_CHECK_HEADER(wchar.h, [
+  AC_DEFINE(HAVE_WCHAR_H, 1, 
+  [Define if the compiler provides a wchar.h header file.]) 
+  wchar_h="yes"
+],
+wchar_h="no"
+)
+
+# determine wchar_t size
+if test "$wchar_h" = yes
+then
+  AC_CHECK_SIZEOF(wchar_t, 4, [#include <wchar.h>])
+fi
+
+AC_MSG_CHECKING(for UCS-4 tcl)
+have_ucs4_tcl=no
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+#include <tcl.h>
+#if TCL_UTF_MAX != 6
+# error "NOT UCS4_TCL"
+#endif]], [[]])],[
+  AC_DEFINE(HAVE_UCS4_TCL, 1, [Define this if you have tcl and TCL_UTF_MAX==6])
+  have_ucs4_tcl=yes
+],[])
+AC_MSG_RESULT($have_ucs4_tcl)
+
+# check whether wchar_t is signed or not
+if test "$wchar_h" = yes
+then
+  # check whether wchar_t is signed or not
+  AC_MSG_CHECKING(whether wchar_t is signed)
+  AC_CACHE_VAL(ac_cv_wchar_t_signed, [
+  AC_RUN_IFELSE([AC_LANG_SOURCE([[
+  #include <wchar.h>
+  int main()
+  {
+	/* Success: exit code 0 */
+        exit((((wchar_t) -1) < ((wchar_t) 0)) ? 0 : 1);
+  }
+  ]])],
+  [ac_cv_wchar_t_signed=yes],
+  [ac_cv_wchar_t_signed=no],
+  [ac_cv_wchar_t_signed=yes])])
+  AC_MSG_RESULT($ac_cv_wchar_t_signed)
+fi
+
+AC_MSG_CHECKING(what type to use for str)
+AC_ARG_WITH(wide-unicode, 
+            AS_HELP_STRING([--with-wide-unicode], [Use 4-byte Unicode characters (default is 2 bytes)]),
+[
+if test "$withval" != no
+then unicode_size="4"
+else unicode_size="2"
+fi
+],
+[
+case "$have_ucs4_tcl" in
+  yes) unicode_size="4";;
+  *)   unicode_size="2" ;;
+esac
+])
+
+AH_TEMPLATE(Py_UNICODE_SIZE,
+  [Define as the size of the unicode type.])
+case "$unicode_size" in
+  4)
+     AC_DEFINE(Py_UNICODE_SIZE, 4)
+     ABIFLAGS="${ABIFLAGS}u"
+     ;;
+  *) AC_DEFINE(Py_UNICODE_SIZE, 2) ;;
+esac
+
+AH_TEMPLATE(PY_UNICODE_TYPE,
+  [Define as the integral type used for Unicode representation.])
+
+# wchar_t is only usable if it maps to an unsigned type
+if test "$unicode_size" = "$ac_cv_sizeof_wchar_t" \
+          -a "$ac_cv_wchar_t_signed" = "no"
+then
+  PY_UNICODE_TYPE="wchar_t"
+  AC_DEFINE(HAVE_USABLE_WCHAR_T, 1,
+  [Define if you have a useable wchar_t type defined in wchar.h; useable
+   means wchar_t must be an unsigned type with at least 16 bits. (see
+   Include/unicodeobject.h).])
+  AC_DEFINE(PY_UNICODE_TYPE,wchar_t)
+elif test "$ac_cv_sizeof_short" = "$unicode_size"
+then
+     PY_UNICODE_TYPE="unsigned short"
+     AC_DEFINE(PY_UNICODE_TYPE,unsigned short)
+elif test "$ac_cv_sizeof_long" = "$unicode_size"
+then
+     PY_UNICODE_TYPE="unsigned long"
+     AC_DEFINE(PY_UNICODE_TYPE,unsigned long)
+else
+     PY_UNICODE_TYPE="no type found"
+fi
+AC_MSG_RESULT($PY_UNICODE_TYPE)
+
+# check for endianness
+AC_C_BIGENDIAN
+
+# ABI version string for Python extension modules.  This appears between the
+# periods in shared library file names, e.g. foo.<SOABI>.so.  It is calculated
+# from the following attributes which affect the ABI of this Python build (in
+# this order):
+#
+# * The Python implementation (always 'cpython-' for us)
+# * The major and minor version numbers
+# * --with-pydebug (adds a 'd')
+# * --with-pymalloc (adds a 'm')
+# * --with-wide-unicode (adds a 'u')
+#
+# Thus for example, Python 3.2 built with wide unicode, pydebug, and pymalloc,
+# would get a shared library ABI version tag of 'cpython-32dmu' and shared
+# libraries would be named 'foo.cpython-32dmu.so'.
+AC_SUBST(SOABI)
+AC_MSG_CHECKING(ABIFLAGS)
+AC_MSG_RESULT($ABIFLAGS)
+AC_MSG_CHECKING(SOABI)
+SOABI='cpython-'`echo $VERSION | tr -d .`${ABIFLAGS}
+AC_MSG_RESULT($SOABI)
+
+AC_MSG_CHECKING(LDVERSION)
+LDVERSION='$(VERSION)$(ABIFLAGS)'
+AC_MSG_RESULT($LDVERSION)
+
+# SO is the extension of shared libraries `(including the dot!)
+# -- usually .so, .sl on HP-UX, .dll on Cygwin
+AC_MSG_CHECKING(SO)
+if test -z "$SO"
+then
+	case $ac_sys_system in
+	hp*|HP*)
+		case `uname -m` in
+			ia64) SO=.so;;
+	  		*)    SO=.sl;;
+		esac
+		;;
+	CYGWIN*)   SO=.dll;;
+	Linux*|GNU*)
+		   SO=.${SOABI}.so;;
+	*)	   SO=.so;;
+	esac
+else
+	# this might also be a termcap variable, see #610332
+	echo
+	echo '====================================================================='
+	echo '+                                                                   +'
+	echo '+ WARNING: You have set SO in your environment.                     +'
+	echo '+ Do you really mean to change the extension for shared libraries?  +'
+	echo '+ Continuing in 10 seconds to let you to ponder.                    +'
+	echo '+                                                                   +'
+	echo '====================================================================='
+	sleep 10
+fi
+AC_MSG_RESULT($SO)
+
+# Check whether right shifting a negative integer extends the sign bit
+# or fills with zeros (like the Cray J90, according to Tim Peters).
+AC_MSG_CHECKING(whether right shift extends the sign bit)
+AC_CACHE_VAL(ac_cv_rshift_extends_sign, [
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+int main()
+{
+	exit(((-1)>>3 == -1) ? 0 : 1);
+}
+]])],
+[ac_cv_rshift_extends_sign=yes],
+[ac_cv_rshift_extends_sign=no],
+[ac_cv_rshift_extends_sign=yes])])
+AC_MSG_RESULT($ac_cv_rshift_extends_sign)
+if test "$ac_cv_rshift_extends_sign" = no
+then
+  AC_DEFINE(SIGNED_RIGHT_SHIFT_ZERO_FILLS, 1,
+  [Define if i>>j for signed int i does not extend the sign bit
+   when i < 0])
+fi
+
+# check for getc_unlocked and related locking functions
+AC_MSG_CHECKING(for getc_unlocked() and friends)
+AC_CACHE_VAL(ac_cv_have_getc_unlocked, [
+AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <stdio.h>]], [[
+	FILE *f = fopen("/dev/null", "r");
+	flockfile(f);
+	getc_unlocked(f);
+	funlockfile(f);
+]])],[ac_cv_have_getc_unlocked=yes],[ac_cv_have_getc_unlocked=no])])
+AC_MSG_RESULT($ac_cv_have_getc_unlocked)
+if test "$ac_cv_have_getc_unlocked" = yes
+then
+  AC_DEFINE(HAVE_GETC_UNLOCKED, 1,
+  [Define this if you have flockfile(), getc_unlocked(), and funlockfile()])
+fi
+
+# check where readline lives
+# save the value of LIBS so we don't actually link Python with readline
+LIBS_no_readline=$LIBS
+
+# On some systems we need to link readline to a termcap compatible
+# library.  NOTE: Keep the precedence of listed libraries synchronised
+# with setup.py.
+py_cv_lib_readline=no
+AC_MSG_CHECKING([how to link readline libs])
+for py_libtermcap in "" ncursesw ncurses curses termcap; do
+  if test -z "$py_libtermcap"; then
+    READLINE_LIBS="-lreadline"
+  else
+    READLINE_LIBS="-lreadline -l$py_libtermcap"
+  fi
+  LIBS="$READLINE_LIBS $LIBS_no_readline"
+  AC_LINK_IFELSE(
+    [AC_LANG_CALL([],[readline])],
+    [py_cv_lib_readline=yes])
+  if test $py_cv_lib_readline = yes; then
+    break
+  fi
+done
+# Uncomment this line if you want to use READINE_LIBS in Makefile or scripts
+#AC_SUBST([READLINE_LIBS])
+if test $py_cv_lib_readline = no; then
+  AC_MSG_RESULT([none])
+else
+  AC_MSG_RESULT([$READLINE_LIBS])
+  AC_DEFINE(HAVE_LIBREADLINE, 1,
+    [Define if you have the readline library (-lreadline).])
+fi
+
+# check for readline 2.1
+AC_CHECK_LIB(readline, rl_callback_handler_install,
+	AC_DEFINE(HAVE_RL_CALLBACK, 1,
+        [Define if you have readline 2.1]), ,$READLINE_LIBS)
+
+# check for readline 2.2
+AC_PREPROC_IFELSE([AC_LANG_SOURCE([[#include <readline/readline.h>]])],
+  [have_readline=yes],
+  [have_readline=no]
+)
+if test $have_readline = yes
+then
+  AC_EGREP_HEADER([extern int rl_completion_append_character;],
+  [readline/readline.h],
+  AC_DEFINE(HAVE_RL_COMPLETION_APPEND_CHARACTER, 1,
+  [Define if you have readline 2.2]), )
+  AC_EGREP_HEADER([extern int rl_completion_suppress_append;],
+  [readline/readline.h],
+  AC_DEFINE(HAVE_RL_COMPLETION_SUPPRESS_APPEND, 1,
+  [Define if you have rl_completion_suppress_append]), )
+fi
+
+# check for readline 4.0
+AC_CHECK_LIB(readline, rl_pre_input_hook,
+	AC_DEFINE(HAVE_RL_PRE_INPUT_HOOK, 1,
+        [Define if you have readline 4.0]), ,$READLINE_LIBS)
+
+# also in 4.0
+AC_CHECK_LIB(readline, rl_completion_display_matches_hook,
+	AC_DEFINE(HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK, 1,
+        [Define if you have readline 4.0]), ,$READLINE_LIBS)
+
+# check for readline 4.2
+AC_CHECK_LIB(readline, rl_completion_matches,
+	AC_DEFINE(HAVE_RL_COMPLETION_MATCHES, 1,
+        [Define if you have readline 4.2]), ,$READLINE_LIBS)
+
+# also in readline 4.2
+AC_PREPROC_IFELSE([AC_LANG_SOURCE([[#include <readline/readline.h>]])],
+  [have_readline=yes],
+  [have_readline=no]
+)
+if test $have_readline = yes
+then
+  AC_EGREP_HEADER([extern int rl_catch_signals;],
+  [readline/readline.h],
+  AC_DEFINE(HAVE_RL_CATCH_SIGNAL, 1,
+  [Define if you can turn off readline's signal handling.]), )
+fi
+
+# End of readline checks: restore LIBS
+LIBS=$LIBS_no_readline
+
+AC_MSG_CHECKING(for broken nice())
+AC_CACHE_VAL(ac_cv_broken_nice, [
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+int main()
+{
+	int val1 = nice(1);
+	if (val1 != -1 && val1 == nice(2))
+		exit(0);
+	exit(1);
+}
+]])],
+[ac_cv_broken_nice=yes],
+[ac_cv_broken_nice=no],
+[ac_cv_broken_nice=no])])
+AC_MSG_RESULT($ac_cv_broken_nice)
+if test "$ac_cv_broken_nice" = yes
+then
+  AC_DEFINE(HAVE_BROKEN_NICE, 1,
+  [Define if nice() returns success/failure instead of the new priority.])
+fi
+
+AC_MSG_CHECKING(for broken poll())
+AC_CACHE_VAL(ac_cv_broken_poll,
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <poll.h>
+
+int main()
+{
+    struct pollfd poll_struct = { 42, POLLIN|POLLPRI|POLLOUT, 0 };
+    int poll_test;
+
+    close (42);
+
+    poll_test = poll(&poll_struct, 1, 0);
+    if (poll_test < 0)
+        return 0;
+    else if (poll_test == 0 && poll_struct.revents != POLLNVAL)
+        return 0;
+    else
+        return 1;
+}
+]])],
+[ac_cv_broken_poll=yes],
+[ac_cv_broken_poll=no],
+[ac_cv_broken_poll=no]))
+AC_MSG_RESULT($ac_cv_broken_poll)
+if test "$ac_cv_broken_poll" = yes
+then
+  AC_DEFINE(HAVE_BROKEN_POLL, 1,
+      [Define if poll() sets errno on invalid file descriptors.])
+fi
+
+# Before we can test tzset, we need to check if struct tm has a tm_zone 
+# (which is not required by ISO C or UNIX spec) and/or if we support
+# tzname[]
+AC_STRUCT_TIMEZONE
+
+# check tzset(3) exists and works like we expect it to
+AC_MSG_CHECKING(for working tzset())
+AC_CACHE_VAL(ac_cv_working_tzset, [
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <stdlib.h>
+#include <time.h>
+#include <string.h>
+
+#if HAVE_TZNAME
+extern char *tzname[];
+#endif
+
+int main()
+{
+	/* Note that we need to ensure that not only does tzset(3)
+	   do 'something' with localtime, but it works as documented
+	   in the library reference and as expected by the test suite.
+	   This includes making sure that tzname is set properly if
+	   tm->tm_zone does not exist since it is the alternative way
+	   of getting timezone info.
+
+	   Red Hat 6.2 doesn't understand the southern hemisphere 
+	   after New Year's Day.
+	*/
+
+	time_t groundhogday = 1044144000; /* GMT-based */
+	time_t midyear = groundhogday + (365 * 24 * 3600 / 2);
+
+	putenv("TZ=UTC+0");
+	tzset();
+	if (localtime(&groundhogday)->tm_hour != 0)
+	    exit(1);
+#if HAVE_TZNAME
+	/* For UTC, tzname[1] is sometimes "", sometimes "   " */
+	if (strcmp(tzname[0], "UTC") || 
+		(tzname[1][0] != 0 && tzname[1][0] != ' '))
+	    exit(1);
+#endif
+
+	putenv("TZ=EST+5EDT,M4.1.0,M10.5.0");
+	tzset();
+	if (localtime(&groundhogday)->tm_hour != 19)
+	    exit(1);
+#if HAVE_TZNAME
+	if (strcmp(tzname[0], "EST") || strcmp(tzname[1], "EDT"))
+	    exit(1);
+#endif
+
+	putenv("TZ=AEST-10AEDT-11,M10.5.0,M3.5.0");
+	tzset();
+	if (localtime(&groundhogday)->tm_hour != 11)
+	    exit(1);
+#if HAVE_TZNAME
+	if (strcmp(tzname[0], "AEST") || strcmp(tzname[1], "AEDT"))
+	    exit(1);
+#endif
+
+#if HAVE_STRUCT_TM_TM_ZONE
+	if (strcmp(localtime(&groundhogday)->tm_zone, "AEDT"))
+	    exit(1);
+	if (strcmp(localtime(&midyear)->tm_zone, "AEST"))
+	    exit(1);
+#endif
+
+	exit(0);
+}
+]])],
+[ac_cv_working_tzset=yes],
+[ac_cv_working_tzset=no],
+[ac_cv_working_tzset=no])])
+AC_MSG_RESULT($ac_cv_working_tzset)
+if test "$ac_cv_working_tzset" = yes
+then
+  AC_DEFINE(HAVE_WORKING_TZSET, 1,
+  [Define if tzset() actually switches the local timezone in a meaningful way.])
+fi
+
+# Look for subsecond timestamps in struct stat
+AC_MSG_CHECKING(for tv_nsec in struct stat)
+AC_CACHE_VAL(ac_cv_stat_tv_nsec,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/stat.h>]], [[
+struct stat st;
+st.st_mtim.tv_nsec = 1;
+]])],
+[ac_cv_stat_tv_nsec=yes],
+[ac_cv_stat_tv_nsec=no]))
+AC_MSG_RESULT($ac_cv_stat_tv_nsec)
+if test "$ac_cv_stat_tv_nsec" = yes
+then
+  AC_DEFINE(HAVE_STAT_TV_NSEC, 1,
+  [Define if you have struct stat.st_mtim.tv_nsec])
+fi
+
+# Look for BSD style subsecond timestamps in struct stat
+AC_MSG_CHECKING(for tv_nsec2 in struct stat)
+AC_CACHE_VAL(ac_cv_stat_tv_nsec2,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/stat.h>]], [[
+struct stat st;
+st.st_mtimespec.tv_nsec = 1;
+]])],
+[ac_cv_stat_tv_nsec2=yes],
+[ac_cv_stat_tv_nsec2=no]))
+AC_MSG_RESULT($ac_cv_stat_tv_nsec2)
+if test "$ac_cv_stat_tv_nsec2" = yes
+then
+  AC_DEFINE(HAVE_STAT_TV_NSEC2, 1,
+  [Define if you have struct stat.st_mtimensec])
+fi
+
+# On HP/UX 11.0, mvwdelch is a block with a return statement
+AC_MSG_CHECKING(whether mvwdelch is an expression)
+AC_CACHE_VAL(ac_cv_mvwdelch_is_expression,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[
+  int rtn;
+  rtn = mvwdelch(0,0,0);
+]])],
+[ac_cv_mvwdelch_is_expression=yes],
+[ac_cv_mvwdelch_is_expression=no]))
+AC_MSG_RESULT($ac_cv_mvwdelch_is_expression)
+
+if test "$ac_cv_mvwdelch_is_expression" = yes
+then
+  AC_DEFINE(MVWDELCH_IS_EXPRESSION, 1,
+  [Define if mvwdelch in curses.h is an expression.])
+fi
+
+AC_MSG_CHECKING(whether WINDOW has _flags)
+AC_CACHE_VAL(ac_cv_window_has_flags,
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[
+  WINDOW *w;
+  w->_flags = 0;
+]])],
+[ac_cv_window_has_flags=yes],
+[ac_cv_window_has_flags=no]))
+AC_MSG_RESULT($ac_cv_window_has_flags)
+
+
+if test "$ac_cv_window_has_flags" = yes
+then
+  AC_DEFINE(WINDOW_HAS_FLAGS, 1, 
+  [Define if WINDOW in curses.h offers a field _flags.])
+fi
+
+AC_MSG_CHECKING(for is_term_resized)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[void *x=is_term_resized]])],
+  [AC_DEFINE(HAVE_CURSES_IS_TERM_RESIZED, 1, Define if you have the 'is_term_resized' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)]
+)
+
+AC_MSG_CHECKING(for resize_term)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[void *x=resize_term]])],
+  [AC_DEFINE(HAVE_CURSES_RESIZE_TERM, 1, Define if you have the 'resize_term' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)]
+)
+
+AC_MSG_CHECKING(for resizeterm)
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[void *x=resizeterm]])],
+  [AC_DEFINE(HAVE_CURSES_RESIZETERM, 1, Define if you have the 'resizeterm' function.)
+   AC_MSG_RESULT(yes)],
+  [AC_MSG_RESULT(no)]
+)
+
+AC_MSG_CHECKING(for /dev/ptmx)
+
+if test -r /dev/ptmx
+then
+  AC_MSG_RESULT(yes)
+  AC_DEFINE(HAVE_DEV_PTMX, 1,
+  [Define if we have /dev/ptmx.])
+else
+  AC_MSG_RESULT(no)
+fi
+
+AC_MSG_CHECKING(for /dev/ptc)
+
+if test -r /dev/ptc
+then
+  AC_MSG_RESULT(yes)
+  AC_DEFINE(HAVE_DEV_PTC, 1,
+  [Define if we have /dev/ptc.])
+else
+  AC_MSG_RESULT(no)
+fi
+
+if test "$have_long_long" = yes
+then
+  AC_MSG_CHECKING(for %lld and %llu printf() format support)
+  AC_CACHE_VAL(ac_cv_have_long_long_format,
+  AC_RUN_IFELSE([AC_LANG_SOURCE([[[
+  #include <stdio.h>
+  #include <stddef.h>
+  #include <string.h>
+
+  #ifdef HAVE_SYS_TYPES_H
+  #include <sys/types.h>
+  #endif
+
+  int main()
+  {
+      char buffer[256];
+
+      if (sprintf(buffer, "%lld", (long long)123) < 0)
+          return 1;
+      if (strcmp(buffer, "123"))
+          return 1;
+
+      if (sprintf(buffer, "%lld", (long long)-123) < 0)
+          return 1;
+      if (strcmp(buffer, "-123"))
+          return 1;
+
+      if (sprintf(buffer, "%llu", (unsigned long long)123) < 0)
+          return 1;
+      if (strcmp(buffer, "123"))
+          return 1;
+
+      return 0;
+  }
+  ]]])],
+  [ac_cv_have_long_long_format=yes],
+  [ac_cv_have_long_long_format=no],
+  [ac_cv_have_long_long_format=no])
+  )
+  AC_MSG_RESULT($ac_cv_have_long_long_format)
+fi
+
+if test "$ac_cv_have_long_long_format" = yes
+then
+  AC_DEFINE(PY_FORMAT_LONG_LONG, "ll",
+  [Define to printf format modifier for long long type])
+fi
+
+if test $ac_sys_system = Darwin
+then
+	LIBS="$LIBS -framework CoreFoundation"
+fi
+
+AC_CACHE_CHECK([for %zd printf() format support], ac_cv_have_size_t_format, [dnl
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include <stdio.h>
+#include <stddef.h>
+#include <string.h>
+
+#ifdef HAVE_SYS_TYPES_H
+#include <sys/types.h>
+#endif
+
+#ifdef HAVE_SSIZE_T
+typedef ssize_t Py_ssize_t;
+#elif SIZEOF_VOID_P == SIZEOF_LONG
+typedef long Py_ssize_t;
+#else
+typedef int Py_ssize_t;
+#endif
+
+int main()
+{
+    char buffer[256];
+
+    if(sprintf(buffer, "%zd", (size_t)123) < 0)
+       	return 1;
+
+    if (strcmp(buffer, "123"))
+	return 1;
+
+    if (sprintf(buffer, "%zd", (Py_ssize_t)-123) < 0)
+       	return 1;
+
+    if (strcmp(buffer, "-123"))
+	return 1;
+
+    return 0;
+}
+]])],
+  [ac_cv_have_size_t_format=yes],
+  [ac_cv_have_size_t_format=no],
+  [ac_cv_have_size_t_format="cross -- assuming yes"
+])])
+if test "$ac_cv_have_size_t_format" != no ; then
+  AC_DEFINE(PY_FORMAT_SIZE_T, "z",
+  [Define to printf format modifier for Py_ssize_t])
+fi
+
+AC_CHECK_TYPE(socklen_t,,
+  AC_DEFINE(socklen_t,int,
+            [Define to `int' if <sys/socket.h> does not define.]),[
+#ifdef HAVE_SYS_TYPES_H
+#include <sys/types.h>
+#endif
+#ifdef HAVE_SYS_SOCKET_H
+#include <sys/socket.h>
+#endif
+])
+
+AC_MSG_CHECKING(for broken mbstowcs)
+AC_CACHE_VAL(ac_cv_broken_mbstowcs,
+AC_RUN_IFELSE([AC_LANG_SOURCE([[
+#include<stdlib.h>
+int main() {
+    size_t len = -1;
+    const char *str = "text";
+    len = mbstowcs(NULL, str, 0);
+    return (len != 4);
+}
+]])],
+[ac_cv_broken_mbstowcs=no],
+[ac_cv_broken_mbstowcs=yes],
+[ac_cv_broken_mbstowcs=no]))
+AC_MSG_RESULT($ac_cv_broken_mbstowcs)
+if test "$ac_cv_broken_mbstowcs" = yes
+then
+  AC_DEFINE(HAVE_BROKEN_MBSTOWCS, 1,
+  [Define if mbstowcs(NULL, "text", 0) does not return the number of 
+   wide chars that would be converted.])
+fi
+
+AC_MSG_CHECKING(whether $CC supports computed gotos)
+AC_CACHE_VAL(ac_cv_computed_gotos,
+AC_RUN_IFELSE([AC_LANG_SOURCE([[[
+int main(int argc, char **argv)
+{
+    static void *targets[1] = { &&LABEL1 };
+    goto LABEL2;
+LABEL1:
+    return 0;
+LABEL2:
+    goto *targets[0];
+    return 1;
+}
+]]])],
+[ac_cv_computed_gotos=yes],
+[ac_cv_computed_gotos=no],
+[ac_cv_computed_gotos=no]))
+AC_MSG_RESULT($ac_cv_computed_gotos)
+if test "$ac_cv_computed_gotos" = yes
+then
+  AC_DEFINE(HAVE_COMPUTED_GOTOS, 1,
+  [Define if the C compiler supports computed gotos.])
+fi
+
+# Check for --with-computed-gotos
+AC_MSG_CHECKING(for --with-computed-gotos)
+AC_ARG_WITH(computed-gotos,
+            AS_HELP_STRING([--with(out)-computed-gotos],
+                           [Use computed gotos in evaluation loop (enabled by default on supported compilers)]),
+[
+if test "$withval" = yes
+then 
+  AC_DEFINE(USE_COMPUTED_GOTOS, 1,
+  [Define if you want to use computed gotos in ceval.c.]) 
+  AC_MSG_RESULT(yes)
+fi
+if test "$withval" = no
+then 
+  AC_DEFINE(USE_COMPUTED_GOTOS, 0,
+  [Define if you want to use computed gotos in ceval.c.]) 
+  AC_MSG_RESULT(no)
+fi
+],
+[AC_MSG_RESULT(no value specified)])
+
+case $ac_sys_system in
+AIX*)   
+  AC_DEFINE(HAVE_BROKEN_PIPE_BUF, 1, [Define if the system reports an invalid PIPE_BUF value.]) ;;
+esac
+
+
+case $ac_sys_system in
+  OSF*) AC_MSG_ERROR(OSF* systems are deprecated unless somebody volunteers. Check http://bugs.python.org/issue8606) ;;
+esac
+
+AC_CHECK_FUNC(pipe2, AC_DEFINE(HAVE_PIPE2, 1, [Define if the OS supports pipe2()]), )
+
+AC_SUBST(THREADHEADERS)
+
+for h in `(cd $srcdir;echo Python/thread_*.h)`
+do
+  THREADHEADERS="$THREADHEADERS \$(srcdir)/$h"
+done
+
+AC_SUBST(SRCDIRS)
+SRCDIRS="Parser Grammar Objects Python Modules Mac"
+AC_MSG_CHECKING(for build directories)
+for dir in $SRCDIRS; do
+    if test ! -d $dir; then
+        mkdir $dir
+    fi
+done
+AC_MSG_RESULT(done)
+
+# generate output files
+AC_CONFIG_FILES(Makefile.pre Modules/Setup.config Misc/python.pc)
+AC_CONFIG_FILES([Modules/ld_so_aix], [chmod +x Modules/ld_so_aix])
+AC_OUTPUT
+
+echo "creating Modules/Setup"
+if test ! -f Modules/Setup
+then
+	cp $srcdir/Modules/Setup.dist Modules/Setup
+fi
+
+echo "creating Modules/Setup.local"
+if test ! -f Modules/Setup.local
+then
+	echo "# Edit this file for local setup changes" >Modules/Setup.local
+fi
+
+echo "creating Makefile"
+$SHELL $srcdir/Modules/makesetup -c $srcdir/Modules/config.c.in \
+			-s Modules Modules/Setup.config \
+			Modules/Setup.local Modules/Setup
+mv config.c Modules
diff -r 3d0686d90f55 configure.in
--- a/configure.in
+++ /dev/null
@@ -1,4355 +0,0 @@
-dnl ***********************************************
-dnl * Please run autoreconf to test your changes! *
-dnl ***********************************************
-
-# Set VERSION so we only need to edit in one place (i.e., here)
-m4_define(PYTHON_VERSION, 3.2)
-
-dnl Some m4 magic to ensure that the configure script is generated
-dnl by the correct autoconf version.
-m4_define([version_required],
-[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]), [$1]), 0,
-       [],
-       [m4_fatal([Autoconf version $1 is required for Python], 63)])
-])
-AC_PREREQ(2.65)
-
-AC_REVISION($Revision$)
-AC_INIT(python, PYTHON_VERSION, http://bugs.python.org/)
-AC_CONFIG_SRCDIR([Include/object.h])
-AC_CONFIG_HEADER(pyconfig.h)
-
-dnl Ensure that if prefix is specified, it does not end in a slash. If
-dnl it does, we get path names containing '//' which is both ugly and
-dnl can cause trouble.
-
-dnl Last slash shouldn't be stripped if prefix=/
-if test "$prefix" != "/"; then
-    prefix=`echo "$prefix" | sed -e 's/\/$//g'`
-fi    
-
-dnl This is for stuff that absolutely must end up in pyconfig.h.
-dnl Please use pyport.h instead, if possible.
-AH_TOP([
-#ifndef Py_PYCONFIG_H
-#define Py_PYCONFIG_H
-])
-AH_BOTTOM([
-/* Define the macros needed if on a UnixWare 7.x system. */
-#if defined(__USLC__) && defined(__SCO_VERSION__)
-#define STRICT_SYSV_CURSES /* Don't use ncurses extensions */
-#endif
-
-#endif /*Py_PYCONFIG_H*/
-])
-
-# We don't use PACKAGE_ variables, and they cause conflicts
-# with other autoconf-based packages that include Python.h
-grep -v 'define PACKAGE_' <confdefs.h >confdefs.h.new
-rm confdefs.h
-mv confdefs.h.new confdefs.h
-
-AC_SUBST(VERSION)
-VERSION=PYTHON_VERSION
-
-# Version number of Python's own shared library file.
-AC_SUBST(SOVERSION)
-SOVERSION=1.0
-
-# The later defininition of _XOPEN_SOURCE disables certain features
-# on Linux, so we need _GNU_SOURCE to re-enable them (makedev, tm_zone).
-AC_DEFINE(_GNU_SOURCE, 1, [Define on Linux to activate all library features])
-
-# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
-# certain features on NetBSD, so we need _NETBSD_SOURCE to re-enable
-# them.
-AC_DEFINE(_NETBSD_SOURCE, 1, [Define on NetBSD to activate all library features])
-
-# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
-# certain features on FreeBSD, so we need __BSD_VISIBLE to re-enable
-# them.
-AC_DEFINE(__BSD_VISIBLE, 1, [Define on FreeBSD to activate all library features])
-
-# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
-# u_int on Irix 5.3. Defining _BSD_TYPES brings it back.
-AC_DEFINE(_BSD_TYPES, 1, [Define on Irix to enable u_int])
-
-# The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables
-# certain features on Mac OS X, so we need _DARWIN_C_SOURCE to re-enable
-# them.
-AC_DEFINE(_DARWIN_C_SOURCE, 1, [Define on Darwin to activate all library features])
-
-
-define_xopen_source=yes
-
-# Arguments passed to configure.
-AC_SUBST(CONFIG_ARGS)
-CONFIG_ARGS="$ac_configure_args"
-
-AC_MSG_CHECKING([for --enable-universalsdk])
-AC_ARG_ENABLE(universalsdk,
-	AS_HELP_STRING([--enable-universalsdk@<:@=SDKDIR@:>@], [Build against Mac OS X 10.4u SDK (ppc/i386)]),
-[
-	case $enableval in
-	yes)
-		enableval=/Developer/SDKs/MacOSX10.4u.sdk
-		if test ! -d "${enableval}"
-		then
-			enableval=/
-		fi
-		;;
-	esac
-	case $enableval in
-	no)
-		UNIVERSALSDK=
-		enable_universalsdk=
-		;;
-	*)
-		UNIVERSALSDK=$enableval
-		if test ! -d "${UNIVERSALSDK}"
-		then
-			AC_MSG_ERROR([--enable-universalsdk specifies non-existing SDK: ${UNIVERSALSDK}])
-		fi
-		;;
-	esac
-	
-],[
-   	UNIVERSALSDK=
-	enable_universalsdk=
-])
-if test -n "${UNIVERSALSDK}"
-then
-	AC_MSG_RESULT(${UNIVERSALSDK})
-else
-	AC_MSG_RESULT(no)
-fi
-AC_SUBST(UNIVERSALSDK)
-
-AC_SUBST(ARCH_RUN_32BIT)
-
-UNIVERSAL_ARCHS="32-bit"
-AC_SUBST(LIPO_32BIT_FLAGS)
-AC_MSG_CHECKING(for --with-universal-archs)
-AC_ARG_WITH(universal-archs,
-    AS_HELP_STRING([--with-universal-archs=ARCH], [select architectures for universal build ("32-bit", "64-bit", "3-way", "intel" or "all")]),
-[
-	AC_MSG_RESULT($withval)
-	UNIVERSAL_ARCHS="$withval"
-],
-[
- 	AC_MSG_RESULT(32-bit)
-])
-
-
-
-AC_ARG_WITH(framework-name,
-              AS_HELP_STRING([--with-framework-name=FRAMEWORK],
-                             [specify an alternate name of the framework built with --enable-framework]),
-[
-    PYTHONFRAMEWORK=${withval}
-    PYTHONFRAMEWORKDIR=${withval}.framework
-    PYTHONFRAMEWORKIDENTIFIER=org.python.`echo $withval | tr '[A-Z]' '[a-z]'`
-    ],[
-    PYTHONFRAMEWORK=Python
-    PYTHONFRAMEWORKDIR=Python.framework
-    PYTHONFRAMEWORKIDENTIFIER=org.python.python
-])
-dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
-AC_ARG_ENABLE(framework,
-              AS_HELP_STRING([--enable-framework@<:@=INSTALLDIR@:>@], [Build (MacOSX|Darwin) framework]),
-[
-	case $enableval in
-	yes) 
-		enableval=/Library/Frameworks
-	esac
-	case $enableval in
-	no)
-		PYTHONFRAMEWORK=
-		PYTHONFRAMEWORKDIR=no-framework
-		PYTHONFRAMEWORKPREFIX=
-		PYTHONFRAMEWORKINSTALLDIR=
-		FRAMEWORKINSTALLFIRST=
-		FRAMEWORKINSTALLLAST=
-		FRAMEWORKALTINSTALLFIRST=
-		FRAMEWORKALTINSTALLLAST=
-		if test "x${prefix}" = "xNONE"; then
-			FRAMEWORKUNIXTOOLSPREFIX="${ac_default_prefix}"
-		else
-			FRAMEWORKUNIXTOOLSPREFIX="${prefix}"
-		fi
-		enable_framework=
-		;;
-	*)
-		PYTHONFRAMEWORKPREFIX="${enableval}"
-		PYTHONFRAMEWORKINSTALLDIR=$PYTHONFRAMEWORKPREFIX/$PYTHONFRAMEWORKDIR
-		FRAMEWORKINSTALLFIRST="frameworkinstallstructure"
-		FRAMEWORKALTINSTALLFIRST="frameworkinstallstructure "
-		FRAMEWORKINSTALLLAST="frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools"
-		FRAMEWORKALTINSTALLLAST="frameworkinstallmaclib frameworkinstallapps frameworkaltinstallunixtools"
-		FRAMEWORKINSTALLAPPSPREFIX="/Applications"
-
-		if test "x${prefix}" = "xNONE" ; then
-			FRAMEWORKUNIXTOOLSPREFIX="${ac_default_prefix}"
-
-		else
-			FRAMEWORKUNIXTOOLSPREFIX="${prefix}"
-		fi
-
-		case "${enableval}" in
-		/System*)
-			FRAMEWORKINSTALLAPPSPREFIX="/Applications"
-			if test "${prefix}" = "NONE" ; then
-				# See below
-				FRAMEWORKUNIXTOOLSPREFIX="/usr"
-			fi
-			;;
-
-		/Library*)
-			FRAMEWORKINSTALLAPPSPREFIX="/Applications"
-			;;
-
-		*/Library/Frameworks)
-			MDIR="`dirname "${enableval}"`"
-			MDIR="`dirname "${MDIR}"`"
-			FRAMEWORKINSTALLAPPSPREFIX="${MDIR}/Applications"
-
-			if test "${prefix}" = "NONE"; then
-				# User hasn't specified the 
-				# --prefix option, but wants to install
-				# the framework in a non-default location,
-				# ensure that the compatibility links get
-				# installed relative to that prefix as well
-				# instead of in /usr/local.
-				FRAMEWORKUNIXTOOLSPREFIX="${MDIR}"
-			fi
-			;;
-
-		*)
-			FRAMEWORKINSTALLAPPSPREFIX="/Applications"
-			;;
-		esac
-
-		prefix=$PYTHONFRAMEWORKINSTALLDIR/Versions/$VERSION
-
-		# Add files for Mac specific code to the list of output
-		# files:
-		AC_CONFIG_FILES(Mac/Makefile)
-		AC_CONFIG_FILES(Mac/PythonLauncher/Makefile)
-		AC_CONFIG_FILES(Mac/Resources/framework/Info.plist)
-		AC_CONFIG_FILES(Mac/Resources/app/Info.plist)
-	esac
-	],[
-	PYTHONFRAMEWORK=
-	PYTHONFRAMEWORKDIR=no-framework
-	PYTHONFRAMEWORKPREFIX=
-	PYTHONFRAMEWORKINSTALLDIR=
-	FRAMEWORKINSTALLFIRST=
-	FRAMEWORKINSTALLLAST=
-	FRAMEWORKALTINSTALLFIRST=
-	FRAMEWORKALTINSTALLLAST=
-	if test "x${prefix}" = "xNONE" ; then
-		FRAMEWORKUNIXTOOLSPREFIX="${ac_default_prefix}"
-	else
-		FRAMEWORKUNIXTOOLSPREFIX="${prefix}"
-	fi
-	enable_framework=
-
-])
-AC_SUBST(PYTHONFRAMEWORK)
-AC_SUBST(PYTHONFRAMEWORKIDENTIFIER)
-AC_SUBST(PYTHONFRAMEWORKDIR)
-AC_SUBST(PYTHONFRAMEWORKPREFIX)
-AC_SUBST(PYTHONFRAMEWORKINSTALLDIR)
-AC_SUBST(FRAMEWORKINSTALLFIRST)
-AC_SUBST(FRAMEWORKINSTALLLAST)
-AC_SUBST(FRAMEWORKALTINSTALLFIRST)
-AC_SUBST(FRAMEWORKALTINSTALLLAST)
-AC_SUBST(FRAMEWORKUNIXTOOLSPREFIX)
-AC_SUBST(FRAMEWORKINSTALLAPPSPREFIX)
-
-##AC_ARG_WITH(dyld,
-##            AS_HELP_STRING([--with-dyld],
-##                           [Use (OpenStep|Rhapsody) dynamic linker]))
-##
-# Set name for machine-dependent library files
-AC_SUBST(MACHDEP)
-AC_MSG_CHECKING(MACHDEP)
-if test -z "$MACHDEP"
-then
-	ac_sys_system=`uname -s`
-	if test "$ac_sys_system" = "AIX" \
-	-o "$ac_sys_system" = "UnixWare" -o "$ac_sys_system" = "OpenUNIX"; then
-		ac_sys_release=`uname -v`
-	else
-		ac_sys_release=`uname -r`
-	fi
-	ac_md_system=`echo $ac_sys_system |
-			   tr -d '[/ ]' | tr '[[A-Z]]' '[[a-z]]'`
-	ac_md_release=`echo $ac_sys_release |
-			   tr -d '[/ ]' | sed 's/^[[A-Z]]\.//' | sed 's/\..*//'`
-	MACHDEP="$ac_md_system$ac_md_release"
-
-	case $MACHDEP in
-	linux*) MACHDEP="linux2";;
-	cygwin*) MACHDEP="cygwin";;
-	darwin*) MACHDEP="darwin";;
-        irix646) MACHDEP="irix6";;
-	'')	MACHDEP="unknown";;
-	esac
-fi
-	
-# Some systems cannot stand _XOPEN_SOURCE being defined at all; they
-# disable features if it is defined, without any means to access these
-# features as extensions. For these systems, we skip the definition of
-# _XOPEN_SOURCE. Before adding a system to the list to gain access to
-# some feature, make sure there is no alternative way to access this
-# feature. Also, when using wildcards, make sure you have verified the
-# need for not defining _XOPEN_SOURCE on all systems matching the
-# wildcard, and that the wildcard does not include future systems
-# (which may remove their limitations).
-dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
-case $ac_sys_system/$ac_sys_release in
-  # On OpenBSD, select(2) is not available if _XOPEN_SOURCE is defined,
-  # even though select is a POSIX function. Reported by J. Ribbens.
-  # Reconfirmed for OpenBSD 3.3 by Zachary Hamm, for 3.4 by Jason Ish.
-  # In addition, Stefan Krah confirms that issue #1244610 exists through
-  # OpenBSD 4.6, but is fixed in 4.7.
-  OpenBSD/2.* | OpenBSD/3.* | OpenBSD/4.@<:@0123456@:>@) 
-    define_xopen_source=no
-    # OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is
-    # also defined. This can be overridden by defining _BSD_SOURCE
-    # As this has a different meaning on Linux, only define it on OpenBSD
-    AC_DEFINE(_BSD_SOURCE, 1, [Define on OpenBSD to activate all library features])
-    ;;
-  OpenBSD/*)
-    # OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is
-    # also defined. This can be overridden by defining _BSD_SOURCE
-    # As this has a different meaning on Linux, only define it on OpenBSD
-    AC_DEFINE(_BSD_SOURCE, 1, [Define on OpenBSD to activate all library features])
-    ;;
-  # Defining _XOPEN_SOURCE on NetBSD version prior to the introduction of
-  # _NETBSD_SOURCE disables certain features (eg. setgroups). Reported by
-  # Marc Recht
-  NetBSD/1.5 | NetBSD/1.5.* | NetBSD/1.6 | NetBSD/1.6.* | NetBSD/1.6@<:@A-S@:>@)
-    define_xopen_source=no;;
-  # From the perspective of Solaris, _XOPEN_SOURCE is not so much a
-  # request to enable features supported by the standard as a request
-  # to disable features not supported by the standard.  The best way
-  # for Python to use Solaris is simply to leave _XOPEN_SOURCE out
-  # entirely and define __EXTENSIONS__ instead.
-  SunOS/*)
-    define_xopen_source=no;;
-  # On UnixWare 7, u_long is never defined with _XOPEN_SOURCE,
-  # but used in /usr/include/netinet/tcp.h. Reported by Tim Rice.
-  # Reconfirmed for 7.1.4 by Martin v. Loewis.
-  OpenUNIX/8.0.0| UnixWare/7.1.@<:@0-4@:>@)
-    define_xopen_source=no;;
-  # On OpenServer 5, u_short is never defined with _XOPEN_SOURCE,
-  # but used in struct sockaddr.sa_family. Reported by Tim Rice.
-  SCO_SV/3.2)
-    define_xopen_source=no;;
-  # On FreeBSD 4, the math functions C89 does not cover are never defined
-  # with _XOPEN_SOURCE and __BSD_VISIBLE does not re-enable them.
-  FreeBSD/4.*)
-    define_xopen_source=no;;
-  # On MacOS X 10.2, a bug in ncurses.h means that it craps out if 
-  # _XOPEN_EXTENDED_SOURCE is defined. Apparently, this is fixed in 10.3, which
-  # identifies itself as Darwin/7.*
-  # On Mac OS X 10.4, defining _POSIX_C_SOURCE or _XOPEN_SOURCE
-  # disables platform specific features beyond repair.
-  # On Mac OS X 10.3, defining _POSIX_C_SOURCE or _XOPEN_SOURCE 
-  # has no effect, don't bother defining them
-  Darwin/@<:@6789@:>@.*)
-    define_xopen_source=no;;
-  Darwin/1@<:@0-9@:>@.*)
-    define_xopen_source=no;;
-  # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
-  # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
-  # or has another value. By not (re)defining it, the defaults come in place.
-  AIX/4)
-    define_xopen_source=no;;
-  AIX/5)
-    if test `uname -r` -eq 1; then
-      define_xopen_source=no
-    fi
-    ;;
-  # On QNX 6.3.2, defining _XOPEN_SOURCE prevents netdb.h from
-  # defining NI_NUMERICHOST.
-  QNX/6.3.2)
-    define_xopen_source=no
-    ;;
-
-esac
-
-if test $define_xopen_source = yes
-then
-  AC_DEFINE(_XOPEN_SOURCE, 600, 
-            Define to the level of X/Open that your system supports)
-
-  # On Tru64 Unix 4.0F, defining _XOPEN_SOURCE also requires
-  # definition of _XOPEN_SOURCE_EXTENDED and _POSIX_C_SOURCE, or else
-  # several APIs are not declared. Since this is also needed in some
-  # cases for HP-UX, we define it globally.
-  AC_DEFINE(_XOPEN_SOURCE_EXTENDED, 1,
-   	    Define to activate Unix95-and-earlier features)
-
-  AC_DEFINE(_POSIX_C_SOURCE, 200112L, Define to activate features from IEEE Stds 1003.1-2001)
-  
-fi
-
-#
-# SGI compilers allow the specification of the both the ABI and the
-# ISA on the command line.  Depending on the values of these switches,
-# different and often incompatable code will be generated.
-#
-# The SGI_ABI variable can be used to modify the CC and LDFLAGS and
-# thus supply support for various ABI/ISA combinations.  The MACHDEP
-# variable is also adjusted.
-#
-AC_SUBST(SGI_ABI)
-if test ! -z "$SGI_ABI"
-then
-        CC="cc $SGI_ABI"
-        LDFLAGS="$SGI_ABI $LDFLAGS"
-        MACHDEP=`echo "${MACHDEP}${SGI_ABI}" | sed 's/ *//g'`
-fi
-AC_MSG_RESULT($MACHDEP)
-
-# Record the configure-time value of MACOSX_DEPLOYMENT_TARGET,
-# it may influence the way we can build extensions, so distutils
-# needs to check it
-AC_SUBST(CONFIGURE_MACOSX_DEPLOYMENT_TARGET)
-AC_SUBST(EXPORT_MACOSX_DEPLOYMENT_TARGET)
-CONFIGURE_MACOSX_DEPLOYMENT_TARGET=
-EXPORT_MACOSX_DEPLOYMENT_TARGET='#'
-
-AC_MSG_CHECKING(machine type as reported by uname -m)
-ac_sys_machine=`uname -m`
-AC_MSG_RESULT($ac_sys_machine)
-
-# checks for alternative programs
-
-# compiler flags are generated in two sets, BASECFLAGS and OPT.  OPT is just
-# for debug/optimization stuff.  BASECFLAGS is for flags that are required
-# just to get things to compile and link.  Users are free to override OPT
-# when running configure or make.  The build should not break if they do.
-# BASECFLAGS should generally not be messed with, however.
-
-# XXX shouldn't some/most/all of this code be merged with the stuff later
-# on that fiddles with OPT and BASECFLAGS?
-AC_MSG_CHECKING(for --without-gcc)
-AC_ARG_WITH(gcc,
-            AS_HELP_STRING([--without-gcc], [never use gcc]),
-[
-	case $withval in
-	no)	CC=${CC:-cc}
-		without_gcc=yes;;
-	yes)	CC=gcc
-		without_gcc=no;;
-	*)	CC=$withval
-		without_gcc=$withval;;
-	esac], [
-	case $ac_sys_system in
-	AIX*)   CC=${CC:-xlc_r}
-		without_gcc=;;
-	*)	without_gcc=no;;
-	esac])
-AC_MSG_RESULT($without_gcc)
-
-# If the user switches compilers, we can't believe the cache
-if test ! -z "$ac_cv_prog_CC" -a ! -z "$CC" -a "$CC" != "$ac_cv_prog_CC"
-then
-  AC_MSG_ERROR([cached CC is different -- throw away $cache_file
-(it is also a good idea to do 'make clean' before compiling)])
-fi
-
-# Don't let AC_PROG_CC set the default CFLAGS. It normally sets -g -O2
-# when the compiler supports them, but we don't always want -O2, and
-# we set -g later.
-if test -z "$CFLAGS"; then
-        CFLAGS=
-fi
-AC_PROG_CC
-
-AC_SUBST(CXX)
-AC_SUBST(MAINCC)
-AC_MSG_CHECKING(for --with-cxx-main=<compiler>)
-AC_ARG_WITH(cxx_main,
-            AS_HELP_STRING([--with-cxx-main=<compiler>],
-                           [compile main() and link python executable with C++ compiler]),
-[
-	
-	case $withval in
-	no)	with_cxx_main=no
-		MAINCC='$(CC)';;
-	yes)	with_cxx_main=yes
-		MAINCC='$(CXX)';;
-	*)	with_cxx_main=yes
-		MAINCC=$withval
-		if test -z "$CXX"
-		then
-			CXX=$withval
-		fi;;
-	esac], [
-	with_cxx_main=no
-	MAINCC='$(CC)'
-])
-AC_MSG_RESULT($with_cxx_main)
-
-preset_cxx="$CXX"
-if test -z "$CXX"
-then
-        case "$CC" in
-        gcc)    AC_PATH_PROG(CXX, [g++], [g++], [notfound]) ;;
-        cc)     AC_PATH_PROG(CXX, [c++], [c++], [notfound]) ;;
-        esac
-	if test "$CXX" = "notfound"
-	then
-		CXX=""
-	fi
-fi
-if test -z "$CXX"
-then
-	AC_CHECK_PROGS(CXX, $CCC c++ g++ gcc CC cxx cc++ cl, notfound)
-	if test "$CXX" = "notfound"
-	then
-		CXX=""
-	fi
-fi
-if test "$preset_cxx" != "$CXX"
-then
-        AC_MSG_WARN([
-
-  By default, distutils will build C++ extension modules with "$CXX".
-  If this is not intended, then set CXX on the configure command line.
-  ])
-fi
-
-
-AC_MSG_CHECKING([for -Wl,--no-as-needed])
-save_LDFLAGS="$LDFLAGS"
-LDFLAGS="$LDFLAGS -Wl,--no-as-needed"
-AC_LINK_IFELSE([AC_LANG_PROGRAM([[]], [[]])],
-  [NO_AS_NEEDED="-Wl,--no-as-needed"
-   AC_MSG_RESULT([yes])],
-  [NO_AS_NEEDED=""
-   AC_MSG_RESULT([no])])
-LDFLAGS="$save_LDFLAGS"
-AC_SUBST(NO_AS_NEEDED)
-
-
-# checks for UNIX variants that set C preprocessor variables
-AC_USE_SYSTEM_EXTENSIONS
-
-# Check for unsupported systems
-case $ac_sys_system/$ac_sys_release in
-atheos*|Linux*/1*)
-   echo This system \($ac_sys_system/$ac_sys_release\) is no longer supported.
-   echo See README for details.
-   exit 1;;
-esac
-
-AC_EXEEXT
-AC_MSG_CHECKING(for --with-suffix)
-AC_ARG_WITH(suffix,
-            AS_HELP_STRING([--with-suffix=.exe], [set executable suffix]),
-[
-	case $withval in
-	no)	EXEEXT=;;
-	yes)	EXEEXT=.exe;;
-	*)	EXEEXT=$withval;;
-	esac])
-AC_MSG_RESULT($EXEEXT)
-
-# Test whether we're running on a non-case-sensitive system, in which
-# case we give a warning if no ext is given
-AC_SUBST(BUILDEXEEXT)
-AC_MSG_CHECKING(for case-insensitive build directory)
-if test ! -d CaseSensitiveTestDir; then
-mkdir CaseSensitiveTestDir
-fi
-
-if test -d casesensitivetestdir
-then
-    AC_MSG_RESULT(yes)
-    BUILDEXEEXT=.exe
-else
-	AC_MSG_RESULT(no)
-	BUILDEXEEXT=$EXEEXT
-fi
-rmdir CaseSensitiveTestDir
-
-case $MACHDEP in
-bsdos*)
-    case $CC in
-    gcc) CC="$CC -D_HAVE_BSDI";;
-    esac;;
-esac
-
-case $ac_sys_system in
-hp*|HP*)
-    case $CC in
-    cc|*/cc) CC="$CC -Ae";;
-    esac;;
-esac
-
-
-AC_SUBST(LIBRARY)
-AC_MSG_CHECKING(LIBRARY)
-if test -z "$LIBRARY"
-then
-	LIBRARY='libpython$(VERSION)$(ABIFLAGS).a'
-fi
-AC_MSG_RESULT($LIBRARY)
-
-# LDLIBRARY is the name of the library to link against (as opposed to the
-# name of the library into which to insert object files). BLDLIBRARY is also
-# the library to link against, usually. On Mac OS X frameworks, BLDLIBRARY
-# is blank as the main program is not linked directly against LDLIBRARY.
-# LDLIBRARYDIR is the path to LDLIBRARY, which is made in a subdirectory. On
-# systems without shared libraries, LDLIBRARY is the same as LIBRARY
-# (defined in the Makefiles). On Cygwin LDLIBRARY is the import library,
-# DLLLIBRARY is the shared (i.e., DLL) library.
-# 
-# RUNSHARED is used to run shared python without installed libraries
-#
-# INSTSONAME is the name of the shared library that will be use to install
-# on the system - some systems like version suffix, others don't
-#
-# LDVERSION is the shared library version number, normally the Python version
-# with the ABI build flags appended.
-AC_SUBST(LDLIBRARY)
-AC_SUBST(DLLLIBRARY)
-AC_SUBST(BLDLIBRARY)
-AC_SUBST(PY3LIBRARY)
-AC_SUBST(LDLIBRARYDIR)
-AC_SUBST(INSTSONAME)
-AC_SUBST(RUNSHARED)
-AC_SUBST(LDVERSION)
-LDLIBRARY="$LIBRARY"
-BLDLIBRARY='$(LDLIBRARY)'
-INSTSONAME='$(LDLIBRARY)'
-DLLLIBRARY=''
-LDLIBRARYDIR=''
-RUNSHARED=''
-LDVERSION="$VERSION"
-
-# LINKCC is the command that links the python executable -- default is $(CC).
-# If CXX is set, and if it is needed to link a main function that was
-# compiled with CXX, LINKCC is CXX instead. Always using CXX is undesirable:
-# python might then depend on the C++ runtime
-# This is altered for AIX in order to build the export list before 
-# linking.
-AC_SUBST(LINKCC)
-AC_MSG_CHECKING(LINKCC)
-if test -z "$LINKCC"
-then
-	LINKCC='$(PURIFY) $(MAINCC)'
-	case $ac_sys_system in
-	AIX*)
-	   exp_extra="\"\""
-	   if test $ac_sys_release -ge 5 -o \
-		   $ac_sys_release -eq 4 -a `uname -r` -ge 2 ; then
-	       exp_extra="."
-	   fi
-	   LINKCC="\$(srcdir)/Modules/makexp_aix Modules/python.exp $exp_extra \$(LIBRARY); $LINKCC";;
-	QNX*)
-	   # qcc must be used because the other compilers do not
-	   # support -N.
-	   LINKCC=qcc;;
-	esac
-fi
-AC_MSG_RESULT($LINKCC)
-
-# GNULD is set to "yes" if the GNU linker is used.  If this goes wrong
-# make sure we default having it set to "no": this is used by
-# distutils.unixccompiler to know if it should add --enable-new-dtags
-# to linker command lines, and failing to detect GNU ld simply results
-# in the same bahaviour as before.
-AC_SUBST(GNULD)
-AC_MSG_CHECKING(for GNU ld)
-ac_prog=ld
-if test "$GCC" = yes; then
-       ac_prog=`$CC -print-prog-name=ld`
-fi
-case `"$ac_prog" -V 2>&1 < /dev/null` in
-      *GNU*)
-          GNULD=yes;;
-      *)
-          GNULD=no;;
-esac
-AC_MSG_RESULT($GNULD)
-
-AC_C_INLINE
-if test "$ac_cv_c_inline" != no ; then
-        AC_DEFINE(USE_INLINE, 1, [Define to use the C99 inline keyword.])
-        AC_SUBST(USE_INLINE)
-fi
-
-
-AC_MSG_CHECKING(for --enable-shared)
-AC_ARG_ENABLE(shared,
-              AS_HELP_STRING([--enable-shared], [disable/enable building shared python library]))
-
-if test -z "$enable_shared"
-then 
-  case $ac_sys_system in
-  CYGWIN*)
-    enable_shared="yes";;
-  *)
-    enable_shared="no";;
-  esac
-fi
-AC_MSG_RESULT($enable_shared)
-
-AC_MSG_CHECKING(for --enable-profiling)
-AC_ARG_ENABLE(profiling,
-              AS_HELP_STRING([--enable-profiling], [enable C-level code profiling]),
-[ac_save_cc="$CC"
- CC="$CC -pg"
- AC_RUN_IFELSE([AC_LANG_SOURCE([[int main() { return 0; }]])],
-   [ac_enable_profiling="yes"],
-   [ac_enable_profiling="no"],
-   [ac_enable_profiling="no"])
- CC="$ac_save_cc"])
-AC_MSG_RESULT($ac_enable_profiling)
-
-case "$ac_enable_profiling" in
-    "yes")
-	BASECFLAGS="-pg $BASECFLAGS"
-	LDFLAGS="-pg $LDFLAGS"
-    ;;
-esac
-
-AC_MSG_CHECKING(LDLIBRARY)
-
-# MacOSX framework builds need more magic. LDLIBRARY is the dynamic
-# library that we build, but we do not want to link against it (we
-# will find it with a -framework option). For this reason there is an
-# extra variable BLDLIBRARY against which Python and the extension
-# modules are linked, BLDLIBRARY. This is normally the same as
-# LDLIBRARY, but empty for MacOSX framework builds.
-if test "$enable_framework"
-then
-  LDLIBRARY='$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-  RUNSHARED=DYLD_FRAMEWORK_PATH="`pwd`:$DYLD_FRAMEWORK_PATH"
-  BLDLIBRARY=''
-else
-  BLDLIBRARY='$(LDLIBRARY)'
-fi  
-
-# Other platforms follow
-if test $enable_shared = "yes"; then
-  AC_DEFINE(Py_ENABLE_SHARED, 1, [Defined if Python is built as a shared library.])
-  case $ac_sys_system in
-    CYGWIN*)
-          LDLIBRARY='libpython$(LDVERSION).dll.a'
-          DLLLIBRARY='libpython$(LDVERSION).dll'
-          ;;
-    SunOS*)
-	  LDLIBRARY='libpython$(LDVERSION).so'
-	  BLDLIBRARY='-Wl,-R,$(LIBDIR) -L. -lpython$(LDVERSION)'
-	  RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH}
-	  INSTSONAME="$LDLIBRARY".$SOVERSION
-	  if test "$with_pydebug" != yes
-	  then
-	      PY3LIBRARY=libpython3.so
-	  fi
-          ;;
-    Linux*|GNU*|NetBSD*|FreeBSD*|DragonFly*|OpenBSD*)
-	  LDLIBRARY='libpython$(LDVERSION).so'
-	  BLDLIBRARY='-L. -lpython$(LDVERSION)'
-	  RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH}
-	  case $ac_sys_system in
-	      FreeBSD*)
-		SOVERSION=`echo $SOVERSION|cut -d "." -f 1`
-		;;
-	  esac
-	  INSTSONAME="$LDLIBRARY".$SOVERSION
-	  if test "$with_pydebug" != yes
-          then
-	      PY3LIBRARY=libpython3.so
-	  fi
-	  ;;
-    hp*|HP*)
-	  case `uname -m` in
-		ia64)
-			LDLIBRARY='libpython$(LDVERSION).so'
-			;;
-		*)
-			LDLIBRARY='libpython$(LDVERSION).sl'
-			;;
-	  esac
-	  BLDLIBRARY='-Wl,+b,$(LIBDIR) -L. -lpython$(LDVERSION)'
-	  RUNSHARED=SHLIB_PATH=`pwd`:${SHLIB_PATH}
-	  ;;
-    OSF*)
-	  LDLIBRARY='libpython$(LDVERSION).so'
-	  BLDLIBRARY='-rpath $(LIBDIR) -L. -lpython$(LDVERSION)'
-	  RUNSHARED=LD_LIBRARY_PATH=`pwd`:${LD_LIBRARY_PATH}
-	  ;;
-    Darwin*)
-    	LDLIBRARY='libpython$(LDVERSION).dylib'
-	BLDLIBRARY='-L. -lpython$(LDVERSION)'
-	RUNSHARED='DYLD_LIBRARY_PATH=`pwd`:${DYLD_LIBRARY_PATH}'
-	;;
-    AIX*)
-	LDLIBRARY='libpython$(LDVERSION).so'
-	RUNSHARED=LIBPATH=`pwd`:${LIBPATH}
-	;;
-
-  esac
-else # shared is disabled
-  case $ac_sys_system in
-    CYGWIN*)
-          BLDLIBRARY='$(LIBRARY)'
-          LDLIBRARY='libpython$(LDVERSION).dll.a'
-          ;;
-  esac
-fi
-
-AC_MSG_RESULT($LDLIBRARY)
-
-AC_PROG_RANLIB
-AC_SUBST(AR)
-AC_CHECK_PROGS(AR, ar aal, ar)
-
-# tweak ARFLAGS only if the user didn't set it on the command line
-AC_SUBST(ARFLAGS)
-if test -z "$ARFLAGS"
-then
-        ARFLAGS="rc"
-fi
-
-AC_SUBST(SVNVERSION)
-AC_CHECK_PROG(SVNVERSION, svnversion, found, not-found)
-if test $SVNVERSION = found
-then
-	SVNVERSION="svnversion \$(srcdir)"
-else
-	SVNVERSION="echo Unversioned directory"
-fi
-
-AC_SUBST(HGVERSION)
-AC_SUBST(HGTAG)
-AC_SUBST(HGBRANCH)
-
-if test -e $srcdir/.hg/dirstate
-then
-AC_CHECK_PROG(HAS_HG, hg, found, not-found)
-else
-HAS_HG=no-repository
-fi
-if test $HAS_HG = found
-then
-    HGVERSION="hg id -i \$(srcdir)"
-    HGTAG="hg id -t \$(srcdir)"
-    HGBRANCH="hg id -b \$(srcdir)"
-else
-    HGVERSION=""
-    HGTAG=""
-    HGBRANCH=""
-fi
-
-AC_SUBST(DISABLE_ASDLGEN)
-DISABLE_ASDLGEN=""
-AC_CHECK_PROG(HAS_PYTHON, python, found, not-found)
-if test $HAS_HG != found -o $HAS_PYTHON != found
-then
-    DISABLE_ASDLGEN="@echo hg: $HAS_HG, python: $HAS_PYTHON! cannot run \$(srcdir)/Parser/asdl_c.py #"
-fi
-
-
-case $MACHDEP in
-bsdos*|hp*|HP*)
-	# install -d does not work on BSDI or HP-UX
-	if test -z "$INSTALL"
-	then
-		INSTALL="${srcdir}/install-sh -c"
-	fi
-esac
-AC_PROG_INSTALL
-
-# Not every filesystem supports hard links
-AC_SUBST(LN)
-if test -z "$LN" ; then
-	case $ac_sys_system in
-		CYGWIN*) LN="ln -s";;
-		*) LN=ln;;
-	esac
-fi
-
-# For calculating the .so ABI tag.
-AC_SUBST(ABIFLAGS)
-ABIFLAGS=""
-
-# Check for --with-pydebug
-AC_MSG_CHECKING(for --with-pydebug)
-AC_ARG_WITH(pydebug, 
-            AS_HELP_STRING([--with-pydebug], [build with Py_DEBUG defined]),
-[
-if test "$withval" != no
-then 
-  AC_DEFINE(Py_DEBUG, 1, 
-  [Define if you want to build an interpreter with many run-time checks.]) 
-  AC_MSG_RESULT(yes); 
-  Py_DEBUG='true'
-  ABIFLAGS="${ABIFLAGS}d"
-else AC_MSG_RESULT(no); Py_DEBUG='false'
-fi],
-[AC_MSG_RESULT(no)])
-
-# XXX Shouldn't the code above that fiddles with BASECFLAGS and OPT be
-# merged with this chunk of code?
-
-# Optimizer/debugger flags
-# ------------------------
-# (The following bit of code is complicated enough - please keep things
-# indented properly.  Just pretend you're editing Python code. ;-)
-
-# There are two parallel sets of case statements below, one that checks to
-# see if OPT was set and one that does BASECFLAGS setting based upon
-# compiler and platform.  BASECFLAGS tweaks need to be made even if the
-# user set OPT.
-
-# tweak OPT based on compiler and platform, only if the user didn't set
-# it on the command line
-AC_SUBST(OPT)
-if test "${OPT-unset}" = "unset"
-then
-    case $GCC in
-    yes)
-        if test "$CC" != 'g++' ; then
-	    STRICT_PROTO="-Wstrict-prototypes"
-	fi
-        # For gcc 4.x we need to use -fwrapv so lets check if its supported
-        if "$CC" -v --help 2>/dev/null |grep -- -fwrapv > /dev/null; then
-           WRAP="-fwrapv"
-        fi
-
-        # Clang also needs -fwrapv
-        case $CC in
-            *clang*) WRAP="-fwrapv"
-            ;;
-        esac
-
-	case $ac_cv_prog_cc_g in
-	yes)
-	    if test "$Py_DEBUG" = 'true' ; then
-		# Optimization messes up debuggers, so turn it off for
-		# debug builds.
-		OPT="-g -O0 -Wall $STRICT_PROTO"
-	    else
-		OPT="-g $WRAP -O3 -Wall $STRICT_PROTO"
-	    fi
-	    ;;
-	*)
-	    OPT="-O3 -Wall $STRICT_PROTO"
-	    ;;
-	esac
-	case $ac_sys_system in
-	    SCO_SV*) OPT="$OPT -m486 -DSCO5"
-	    ;;
-        esac
-	;;
-
-    *)
-	OPT="-O"
-	;;
-    esac
-fi
-
-AC_SUBST(BASECFLAGS)
-
-# The -arch flags for universal builds on OSX
-UNIVERSAL_ARCH_FLAGS=
-AC_SUBST(UNIVERSAL_ARCH_FLAGS)
-
-# tweak BASECFLAGS based on compiler and platform
-case $GCC in
-yes)
-    # Python doesn't violate C99 aliasing rules, but older versions of
-    # GCC produce warnings for legal Python code.  Enable
-    # -fno-strict-aliasing on versions of GCC that support but produce
-    # warnings.  See Issue3326
-    AC_MSG_CHECKING(whether $CC accepts and needs -fno-strict-aliasing)
-     ac_save_cc="$CC"
-     CC="$CC -fno-strict-aliasing"
-     save_CFLAGS="$CFLAGS"
-     AC_CACHE_VAL(ac_cv_no_strict_aliasing,
-       AC_COMPILE_IFELSE(
-         [
-	   AC_LANG_PROGRAM([[]], [[]])
-	 ],[
-	   CC="$ac_save_cc -fstrict-aliasing"
-           CFLAGS="$CFLAGS -Werror -Wstrict-aliasing"
-           AC_COMPILE_IFELSE(
-	     [
-	       AC_LANG_PROGRAM([[void f(int **x) {}]],
-	         [[double *x; f((int **) &x);]])
-	     ],[
-	       ac_cv_no_strict_aliasing=no
-	     ],[
-               ac_cv_no_strict_aliasing=yes
-	     ])
-	 ],[
-	   ac_cv_no_strict_aliasing=no
-	 ]))
-     CFLAGS="$save_CFLAGS"
-     CC="$ac_save_cc"
-    AC_MSG_RESULT($ac_cv_no_strict_aliasing)
-    if test $ac_cv_no_strict_aliasing = yes
-    then
-      BASECFLAGS="$BASECFLAGS -fno-strict-aliasing"
-    fi
-
-    # if using gcc on alpha, use -mieee to get (near) full IEEE 754
-    # support.  Without this, treatment of subnormals doesn't follow
-    # the standard.
-    case $ac_sys_machine in
-         alpha*)
-                BASECFLAGS="$BASECFLAGS -mieee"
-                ;;
-    esac
-
-    case $ac_sys_system in
-	SCO_SV*)
-	    BASECFLAGS="$BASECFLAGS -m486 -DSCO5"
-	    ;;
-	# is there any other compiler on Darwin besides gcc?
-	Darwin*)
-	    # -Wno-long-double, -no-cpp-precomp, and -mno-fused-madd
-	    # used to be here, but non-Apple gcc doesn't accept them.
-            if test "${CC}" = gcc
-	    then
-		AC_MSG_CHECKING(which compiler should be used)
-		case "${UNIVERSALSDK}" in
-		*/MacOSX10.4u.sdk)
-			# Build using 10.4 SDK, force usage of gcc when the 
-			# compiler is gcc, otherwise the user will get very
-			# confusing error messages when building on OSX 10.6
-			CC=gcc-4.0
-			CPP=cpp-4.0
-			;;
-		esac
-		AC_MSG_RESULT($CC)
-	    fi
-
-
-	    if test "${enable_universalsdk}"; then
-		UNIVERSAL_ARCH_FLAGS=""
-	        if test "$UNIVERSAL_ARCHS" = "32-bit" ; then
-		   UNIVERSAL_ARCH_FLAGS="-arch ppc -arch i386"
-		   ARCH_RUN_32BIT=""
-		   LIPO_32BIT_FLAGS=""
-	         elif test "$UNIVERSAL_ARCHS" = "64-bit" ; then
-		   UNIVERSAL_ARCH_FLAGS="-arch ppc64 -arch x86_64"
-		   LIPO_32BIT_FLAGS=""
-		   ARCH_RUN_32BIT="true"
-
-	         elif test "$UNIVERSAL_ARCHS" = "all" ; then
-		   UNIVERSAL_ARCH_FLAGS="-arch i386 -arch ppc -arch ppc64 -arch x86_64"
-		   LIPO_32BIT_FLAGS="-extract ppc7400 -extract i386"
-		   ARCH_RUN_32BIT="/usr/bin/arch -i386 -ppc"
-
-	         elif test "$UNIVERSAL_ARCHS" = "intel" ; then
-		   UNIVERSAL_ARCH_FLAGS="-arch i386 -arch x86_64"
-		   LIPO_32BIT_FLAGS="-extract i386"
-		   ARCH_RUN_32BIT="/usr/bin/arch -i386"
-
-	         elif test "$UNIVERSAL_ARCHS" = "3-way" ; then
-		   UNIVERSAL_ARCH_FLAGS="-arch i386 -arch ppc -arch x86_64"
-		   LIPO_32BIT_FLAGS="-extract ppc7400 -extract i386"
-		   ARCH_RUN_32BIT="/usr/bin/arch -i386 -ppc"
-
-		 else
-	           AC_MSG_ERROR([proper usage is --with-universal-arch=32-bit|64-bit|all|intel|3-way])
-
-		 fi
-
-
-		CFLAGS="${UNIVERSAL_ARCH_FLAGS} -isysroot ${UNIVERSALSDK} ${CFLAGS}"
-		if test "${UNIVERSALSDK}" != "/"
-		then
-			CFLAGS="-isysroot ${UNIVERSALSDK} ${CFLAGS}"
-			LDFLAGS="-isysroot ${UNIVERSALSDK} ${LDFLAGS}"
-			CPPFLAGS="-isysroot ${UNIVERSALSDK} ${CPPFLAGS}"
-		fi
-	    fi
-
-	    # Calculate the right deployment target for this build.
-	    #
-	    cur_target=`sw_vers -productVersion | sed 's/\(10\.[[0-9]]*\).*/\1/'`
-	    if test ${cur_target} '>' 10.2; then
-		    cur_target=10.3
-		    if test ${enable_universalsdk}; then
-			    if test "${UNIVERSAL_ARCHS}" = "all"; then
-				    # Ensure that the default platform for a 
-				    # 4-way universal build is OSX 10.5, 
-				    # that's the first OS release where 
-				    # 4-way builds make sense.
-				    cur_target='10.5'
-
-			    elif test "${UNIVERSAL_ARCHS}" = "3-way"; then
-				    cur_target='10.5'
-
-			    elif test "${UNIVERSAL_ARCHS}" = "intel"; then
-				    cur_target='10.5'
-
-			    elif test "${UNIVERSAL_ARCHS}" = "64-bit"; then
-				    cur_target='10.5'
-			    fi
-		    else
-			    if test `/usr/bin/arch` = "i386"; then
-				    # On Intel macs default to a deployment
-				    # target of 10.4, that's the first OSX
-				    # release with Intel support.
-				    cur_target="10.4"
-			    fi
-		    fi
-	    fi
-	    CONFIGURE_MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET-${cur_target}}
-	    
-	    # Make sure that MACOSX_DEPLOYMENT_TARGET is set in the 
-	    # environment with a value that is the same as what we'll use
-	    # in the Makefile to ensure that we'll get the same compiler
-	    # environment during configure and build time.
-	    MACOSX_DEPLOYMENT_TARGET="$CONFIGURE_MACOSX_DEPLOYMENT_TARGET"
-	    export MACOSX_DEPLOYMENT_TARGET
-	    EXPORT_MACOSX_DEPLOYMENT_TARGET=''
-
-	    ;;
-	OSF*)
-	    BASECFLAGS="$BASECFLAGS -mieee"
-	    ;;
-    esac
-    ;;
-
-*)
-    case $ac_sys_system in
-    OpenUNIX*|UnixWare*)
-	BASECFLAGS="$BASECFLAGS -K pentium,host,inline,loop_unroll,alloca "
-	;;
-    OSF*)
-	BASECFLAGS="$BASECFLAGS -ieee -std"
-    	;;
-    SCO_SV*)
-	BASECFLAGS="$BASECFLAGS -belf -Ki486 -DSCO5"
-	;;
-    esac
-    ;;
-esac
-
-if test "$Py_DEBUG" = 'true'; then
-  :
-else
-  OPT="-DNDEBUG $OPT"
-fi
-
-if test "$ac_arch_flags"
-then
-	BASECFLAGS="$BASECFLAGS $ac_arch_flags"
-fi
-
-# Check whether GCC supports PyArg_ParseTuple format
-if test "$GCC" = "yes"
-then
-  AC_MSG_CHECKING(whether gcc supports ParseTuple __format__)
-  save_CFLAGS=$CFLAGS
-  CFLAGS="$CFLAGS -Werror"
-  AC_COMPILE_IFELSE([
-    AC_LANG_PROGRAM([[void f(char*,...)__attribute((format(PyArg_ParseTuple, 1, 2)));]], [[]])
-  ],[
-    AC_DEFINE(HAVE_ATTRIBUTE_FORMAT_PARSETUPLE, 1,
-      [Define if GCC supports __attribute__((format(PyArg_ParseTuple, 2, 3)))])
-    AC_MSG_RESULT(yes)
-  ],[
-    AC_MSG_RESULT(no)
-  ])
-  CFLAGS=$save_CFLAGS
-fi
-
-# On some compilers, pthreads are available without further options
-# (e.g. MacOS X). On some of these systems, the compiler will not
-# complain if unaccepted options are passed (e.g. gcc on Mac OS X).
-# So we have to see first whether pthreads are available without
-# options before we can check whether -Kpthread improves anything.
-AC_MSG_CHECKING(whether pthreads are available without options)
-AC_CACHE_VAL(ac_cv_pthread_is_default,
-[AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <pthread.h>
-
-void* routine(void* p){return NULL;}
-
-int main(){
-  pthread_t p;
-  if(pthread_create(&p,NULL,routine,NULL)!=0)
-    return 1;
-  (void)pthread_detach(p);
-  return 0;
-}
-]])],[
-  ac_cv_pthread_is_default=yes
-  ac_cv_kthread=no
-  ac_cv_pthread=no
-],[ac_cv_pthread_is_default=no],[ac_cv_pthread_is_default=no])
-])
-AC_MSG_RESULT($ac_cv_pthread_is_default)
-
-
-if test $ac_cv_pthread_is_default = yes 
-then
-  ac_cv_kpthread=no
-else
-# -Kpthread, if available, provides the right #defines
-# and linker options to make pthread_create available
-# Some compilers won't report that they do not support -Kpthread,
-# so we need to run a program to see whether it really made the
-# function available.
-AC_MSG_CHECKING(whether $CC accepts -Kpthread)
-AC_CACHE_VAL(ac_cv_kpthread,
-[ac_save_cc="$CC"
-CC="$CC -Kpthread"
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <pthread.h>
-
-void* routine(void* p){return NULL;}
-
-int main(){
-  pthread_t p;
-  if(pthread_create(&p,NULL,routine,NULL)!=0)
-    return 1;
-  (void)pthread_detach(p);
-  return 0;
-}
-]])],[ac_cv_kpthread=yes],[ac_cv_kpthread=no],[ac_cv_kpthread=no])
-CC="$ac_save_cc"])
-AC_MSG_RESULT($ac_cv_kpthread)
-fi
-
-if test $ac_cv_kpthread = no -a $ac_cv_pthread_is_default = no
-then
-# -Kthread, if available, provides the right #defines
-# and linker options to make pthread_create available
-# Some compilers won't report that they do not support -Kthread,
-# so we need to run a program to see whether it really made the
-# function available.
-AC_MSG_CHECKING(whether $CC accepts -Kthread)
-AC_CACHE_VAL(ac_cv_kthread,
-[ac_save_cc="$CC"
-CC="$CC -Kthread"
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <pthread.h>
-
-void* routine(void* p){return NULL;}
-
-int main(){
-  pthread_t p;
-  if(pthread_create(&p,NULL,routine,NULL)!=0)
-    return 1;
-  (void)pthread_detach(p);
-  return 0;
-}
-]])],[ac_cv_kthread=yes],[ac_cv_kthread=no],[ac_cv_kthread=no])
-CC="$ac_save_cc"])
-AC_MSG_RESULT($ac_cv_kthread)
-fi
-
-if test $ac_cv_kthread = no -a $ac_cv_pthread_is_default = no
-then
-# -pthread, if available, provides the right #defines
-# and linker options to make pthread_create available
-# Some compilers won't report that they do not support -pthread,
-# so we need to run a program to see whether it really made the
-# function available.
-AC_MSG_CHECKING(whether $CC accepts -pthread)
-AC_CACHE_VAL(ac_cv_thread,
-[ac_save_cc="$CC"
-CC="$CC -pthread"
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <pthread.h>
-
-void* routine(void* p){return NULL;}
-
-int main(){
-  pthread_t p;
-  if(pthread_create(&p,NULL,routine,NULL)!=0)
-    return 1;
-  (void)pthread_detach(p);
-  return 0;
-}
-]])],[ac_cv_pthread=yes],[ac_cv_pthread=no],[ac_cv_pthread=no])
-CC="$ac_save_cc"])
-AC_MSG_RESULT($ac_cv_pthread)
-fi
-
-# If we have set a CC compiler flag for thread support then
-# check if it works for CXX, too.
-ac_cv_cxx_thread=no
-if test ! -z "$CXX"
-then
-AC_MSG_CHECKING(whether $CXX also accepts flags for thread support)
-ac_save_cxx="$CXX"
-
-if test "$ac_cv_kpthread" = "yes"
-then
-  CXX="$CXX -Kpthread"  
-  ac_cv_cxx_thread=yes
-elif test "$ac_cv_kthread" = "yes"
-then
-  CXX="$CXX -Kthread"
-  ac_cv_cxx_thread=yes
-elif test "$ac_cv_pthread" = "yes"
-then 
-  CXX="$CXX -pthread"
-  ac_cv_cxx_thread=yes
-fi
-
-if test $ac_cv_cxx_thread = yes
-then
-  echo 'void foo();int main(){foo();}void foo(){}' > conftest.$ac_ext
-  $CXX -c conftest.$ac_ext 2>&5
-  if $CXX -o conftest$ac_exeext conftest.$ac_objext 2>&5 \
-     && test -s conftest$ac_exeext && ./conftest$ac_exeext
-  then
-    ac_cv_cxx_thread=yes
-  else
-    ac_cv_cxx_thread=no
-  fi
-  rm -fr conftest*
-fi
-AC_MSG_RESULT($ac_cv_cxx_thread)
-fi
-CXX="$ac_save_cxx"
-
-dnl # check for ANSI or K&R ("traditional") preprocessor
-dnl AC_MSG_CHECKING(for C preprocessor type)
-dnl AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-dnl #define spam(name, doc) {#name, &name, #name "() -- " doc}
-dnl int foo;
-dnl struct {char *name; int *addr; char *doc;} desc = spam(foo, "something");
-dnl ]], [[;]])],[cpp_type=ansi],[AC_DEFINE(HAVE_OLD_CPP) cpp_type=traditional])
-dnl AC_MSG_RESULT($cpp_type)
-
-# checks for header files
-AC_HEADER_STDC
-AC_CHECK_HEADERS(asm/types.h conio.h curses.h direct.h dlfcn.h errno.h \
-fcntl.h grp.h \
-ieeefp.h io.h langinfo.h libintl.h ncurses.h poll.h process.h pthread.h \
-shadow.h signal.h stdint.h stropts.h termios.h thread.h \
-unistd.h utime.h \
-sys/audioio.h sys/bsdtty.h sys/epoll.h sys/event.h sys/file.h sys/loadavg.h \
-sys/lock.h sys/mkdev.h sys/modem.h \
-sys/param.h sys/poll.h sys/select.h sys/socket.h sys/statvfs.h sys/stat.h \
-sys/syscall.h sys/termio.h sys/time.h \
-sys/times.h sys/types.h sys/un.h sys/utsname.h sys/wait.h pty.h libutil.h \
-sys/resource.h netpacket/packet.h sysexits.h bluetooth.h \
-bluetooth/bluetooth.h linux/tipc.h spawn.h util.h)
-AC_HEADER_DIRENT
-AC_HEADER_MAJOR
-
-# On Solaris, term.h requires curses.h
-AC_CHECK_HEADERS(term.h,,,[
-#ifdef HAVE_CURSES_H
-#include <curses.h>
-#endif
-])
-
-# On Linux, netlink.h requires asm/types.h
-AC_CHECK_HEADERS(linux/netlink.h,,,[
-#ifdef HAVE_ASM_TYPES_H
-#include <asm/types.h>
-#endif
-#ifdef HAVE_SYS_SOCKET_H
-#include <sys/socket.h>
-#endif
-])
-
-# checks for typedefs
-was_it_defined=no
-AC_MSG_CHECKING(for clock_t in time.h)
-AC_EGREP_HEADER(clock_t, time.h, was_it_defined=yes, [
-    AC_DEFINE(clock_t, long, [Define to 'long' if <time.h> doesn't define.])
-])
-AC_MSG_RESULT($was_it_defined)
-
-# Check whether using makedev requires defining _OSF_SOURCE
-AC_MSG_CHECKING(for makedev)
-AC_LINK_IFELSE([AC_LANG_PROGRAM([[
-#if defined(MAJOR_IN_MKDEV)
-#include <sys/mkdev.h>
-#elif defined(MAJOR_IN_SYSMACROS)
-#include <sys/sysmacros.h>
-#else
-#include <sys/types.h>
-#endif
-]], [[
-  makedev(0, 0) ]])
-],[ac_cv_has_makedev=yes],[ac_cv_has_makedev=no])
-if test "$ac_cv_has_makedev" = "no"; then
-    # we didn't link, try if _OSF_SOURCE will allow us to link
-    AC_LINK_IFELSE([AC_LANG_PROGRAM([[
-#define _OSF_SOURCE 1
-#include <sys/types.h>
-    ]],
-    [[ makedev(0, 0) ]])],
-    [ac_cv_has_makedev=yes],
-    [ac_cv_has_makedev=no])
-    if test "$ac_cv_has_makedev" = "yes"; then
-        AC_DEFINE(_OSF_SOURCE, 1, [Define _OSF_SOURCE to get the makedev macro.])
-    fi
-fi
-AC_MSG_RESULT($ac_cv_has_makedev)
-if test "$ac_cv_has_makedev" = "yes"; then
-    AC_DEFINE(HAVE_MAKEDEV, 1, [Define this if you have the makedev macro.])
-fi
-
-# Enabling LFS on Solaris (2.6 to 9) with gcc 2.95 triggers a bug in
-# the system headers: If _XOPEN_SOURCE and _LARGEFILE_SOURCE are
-# defined, but the compiler does not support pragma redefine_extname,
-# and _LARGEFILE64_SOURCE is not defined, the headers refer to 64-bit
-# structures (such as rlimit64) without declaring them. As a
-# work-around, disable LFS on such configurations
-
-use_lfs=yes
-AC_MSG_CHECKING(Solaris LFS bug)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#define _LARGEFILE_SOURCE 1
-#define _FILE_OFFSET_BITS 64
-#include <sys/resource.h>
-]], [[struct rlimit foo;]])],[sol_lfs_bug=no],[sol_lfs_bug=yes])
-AC_MSG_RESULT($sol_lfs_bug)
-if test "$sol_lfs_bug" = "yes"; then
-  use_lfs=no
-fi
-
-if test "$use_lfs" = "yes"; then
-# Two defines needed to enable largefile support on various platforms
-# These may affect some typedefs
-case $ac_sys_system/$ac_sys_release in
-AIX*)
-    AC_DEFINE(_LARGE_FILES, 1, 
-    [This must be defined on AIX systems to enable large file support.])
-    ;;
-esac
-AC_DEFINE(_LARGEFILE_SOURCE, 1, 
-[This must be defined on some systems to enable large file support.])
-AC_DEFINE(_FILE_OFFSET_BITS, 64,
-[This must be set to 64 on some systems to enable large file support.])
-fi
-
-# Add some code to confdefs.h so that the test for off_t works on SCO
-cat >> confdefs.h <<\EOF
-#if defined(SCO_DS)
-#undef _OFF_T
-#endif
-EOF
-
-# Type availability checks
-AC_TYPE_MODE_T
-AC_TYPE_OFF_T
-AC_TYPE_PID_T
-AC_DEFINE_UNQUOTED([RETSIGTYPE],[void],[assume C89 semantics that RETSIGTYPE is always void])
-AC_TYPE_SIZE_T
-AC_TYPE_UID_T
-AC_TYPE_UINT32_T
-AC_TYPE_UINT64_T
-AC_TYPE_INT32_T
-AC_TYPE_INT64_T
-AC_CHECK_TYPE(ssize_t,
-  AC_DEFINE(HAVE_SSIZE_T, 1, [Define if your compiler provides ssize_t]),,)
-
-# Sizes of various common basic types
-# ANSI C requires sizeof(char) == 1, so no need to check it
-AC_CHECK_SIZEOF(int, 4)
-AC_CHECK_SIZEOF(long, 4)
-AC_CHECK_SIZEOF(void *, 4)
-AC_CHECK_SIZEOF(short, 2)
-AC_CHECK_SIZEOF(float, 4)
-AC_CHECK_SIZEOF(double, 8)
-AC_CHECK_SIZEOF(fpos_t, 4)
-AC_CHECK_SIZEOF(size_t, 4)
-AC_CHECK_SIZEOF(pid_t, 4)
-
-AC_MSG_CHECKING(for long long support)
-have_long_long=no
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[long long x; x = (long long)0;]])],[
-  AC_DEFINE(HAVE_LONG_LONG, 1, [Define this if you have the type long long.]) 
-  have_long_long=yes
-],[])
-AC_MSG_RESULT($have_long_long)
-if test "$have_long_long" = yes ; then
-AC_CHECK_SIZEOF(long long, 8)
-fi
-
-AC_MSG_CHECKING(for long double support)
-have_long_double=no
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[long double x; x = (long double)0;]])],[
-  AC_DEFINE(HAVE_LONG_DOUBLE, 1, [Define this if you have the type long double.]) 
-  have_long_double=yes
-],[])
-AC_MSG_RESULT($have_long_double)
-if test "$have_long_double" = yes ; then
-AC_CHECK_SIZEOF(long double, 16)
-fi
-
-
-AC_MSG_CHECKING(for _Bool support)
-have_c99_bool=no
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[_Bool x; x = (_Bool)0;]])],[
-  AC_DEFINE(HAVE_C99_BOOL, 1, [Define this if you have the type _Bool.]) 
-  have_c99_bool=yes
-],[])
-AC_MSG_RESULT($have_c99_bool)
-if test "$have_c99_bool" = yes ; then
-AC_CHECK_SIZEOF(_Bool, 1)
-fi
-
-AC_CHECK_TYPES(uintptr_t, 
-   [AC_CHECK_SIZEOF(uintptr_t, 4)], 
-   [], [#ifdef HAVE_STDINT_H
-        #include <stdint.h>
-        #endif
-        #ifdef HAVE_INTTYPES_H
-        #include <inttypes.h>
-        #endif])
-
-AC_CHECK_SIZEOF(off_t, [], [
-#ifdef HAVE_SYS_TYPES_H
-#include <sys/types.h>
-#endif
-])
-
-AC_MSG_CHECKING(whether to enable large file support)
-if test "$have_long_long" = yes
-then
-if test "$ac_cv_sizeof_off_t" -gt "$ac_cv_sizeof_long" -a \
-	"$ac_cv_sizeof_long_long" -ge "$ac_cv_sizeof_off_t"; then
-  AC_DEFINE(HAVE_LARGEFILE_SUPPORT, 1, 
-  [Defined to enable large file support when an off_t is bigger than a long
-   and long long is available and at least as big as an off_t. You may need
-   to add some flags for configuration and compilation to enable this mode.
-   (For Solaris and Linux, the necessary defines are already defined.)])
-  AC_MSG_RESULT(yes)
-else
-  AC_MSG_RESULT(no)
-fi
-else
-  AC_MSG_RESULT(no)
-fi
-
-AC_CHECK_SIZEOF(time_t, [], [
-#ifdef HAVE_SYS_TYPES_H
-#include <sys/types.h>
-#endif
-#ifdef HAVE_TIME_H
-#include <time.h>
-#endif
-])
-
-# if have pthread_t then define SIZEOF_PTHREAD_T
-ac_save_cc="$CC"
-if test "$ac_cv_kpthread" = "yes"
-then CC="$CC -Kpthread"
-elif test "$ac_cv_kthread" = "yes"
-then CC="$CC -Kthread"
-elif test "$ac_cv_pthread" = "yes"
-then CC="$CC -pthread"
-fi
-
-AC_MSG_CHECKING(for pthread_t)
-have_pthread_t=no
-AC_COMPILE_IFELSE([
-  AC_LANG_PROGRAM([[#include <pthread.h>]], [[pthread_t x; x = *(pthread_t*)0;]])
-],[have_pthread_t=yes],[])
-AC_MSG_RESULT($have_pthread_t)
-if test "$have_pthread_t" = yes ; then
-  AC_CHECK_SIZEOF(pthread_t, [], [
-#ifdef HAVE_PTHREAD_H
-#include <pthread.h>
-#endif
-  ])
-fi
-CC="$ac_save_cc"
-
-AC_SUBST(OTHER_LIBTOOL_OPT)
-case $ac_sys_system/$ac_sys_release in
-  Darwin/@<:@01567@:>@\..*) 
-    OTHER_LIBTOOL_OPT="-prebind -seg1addr 0x10000000"
-    ;;
-  Darwin/*)
-    OTHER_LIBTOOL_OPT=""
-    ;;
-esac
-
-
-ARCH_RUN_32BIT=""
-AC_SUBST(LIBTOOL_CRUFT)
-case $ac_sys_system/$ac_sys_release in
-  Darwin/@<:@01567@:>@\..*) 
-    LIBTOOL_CRUFT="-framework System -lcc_dynamic"
-    if test "${enable_universalsdk}"; then
-	    :
-    else
-        LIBTOOL_CRUFT="${LIBTOOL_CRUFT} -arch_only `/usr/bin/arch`"
-    fi
-    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -install_name $(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -compatibility_version $(VERSION) -current_version $(VERSION)';;
-  Darwin/*)
-    gcc_version=`gcc -dumpversion`
-    if test ${gcc_version} '<' 4.0
-        then
-            LIBTOOL_CRUFT="-lcc_dynamic"
-        else 
-            LIBTOOL_CRUFT=""
-    fi
-    AC_RUN_IFELSE([AC_LANG_SOURCE([[
-    #include <unistd.h>
-    int main(int argc, char*argv[])
-    {
-      if (sizeof(long) == 4) {
-    	  return 0;
-      } else {
-      	  return 1;
-      }
-    }
-    ]])],[ac_osx_32bit=yes],[ac_osx_32bit=no],[ac_osx_32bit=yes])
-    
-    if test "${ac_osx_32bit}" = "yes"; then
-    	case `/usr/bin/arch` in
-    	i386) 
-    		MACOSX_DEFAULT_ARCH="i386" 
-    		;;
-    	ppc) 
-    		MACOSX_DEFAULT_ARCH="ppc" 
-    		;;
-    	*)
-    		AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
-    		;;
-    	esac
-    else
-    	case `/usr/bin/arch` in
-    	i386) 
-    		MACOSX_DEFAULT_ARCH="x86_64" 
-    		;;
-    	ppc) 
-    		MACOSX_DEFAULT_ARCH="ppc64" 
-    		;;
-    	*)
-    		AC_MSG_ERROR([Unexpected output of 'arch' on OSX])
-    		;;
-    	esac
-
-	#ARCH_RUN_32BIT="true"
-    fi
-
-    LIBTOOL_CRUFT=$LIBTOOL_CRUFT" -lSystem -lSystemStubs -arch_only ${MACOSX_DEFAULT_ARCH}"
-    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -install_name $(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-    LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -compatibility_version $(VERSION) -current_version $(VERSION)';;
-esac
-
-AC_MSG_CHECKING(for --enable-framework)
-if test "$enable_framework"
-then
-	BASECFLAGS="$BASECFLAGS -fno-common -dynamic"
-	# -F. is needed to allow linking to the framework while 
-	# in the build location.
-	AC_DEFINE(WITH_NEXT_FRAMEWORK, 1, 
-         [Define if you want to produce an OpenStep/Rhapsody framework
-         (shared library plus accessory files).])
-	AC_MSG_RESULT(yes)
-	if test $enable_shared = "yes"
-	then
-		AC_MSG_ERROR([Specifying both --enable-shared and --enable-framework is not supported, use only --enable-framework instead])
-	fi
-else
-	AC_MSG_RESULT(no)
-fi
-
-AC_MSG_CHECKING(for dyld)
-case $ac_sys_system/$ac_sys_release in
-  Darwin/*)
-  	AC_DEFINE(WITH_DYLD, 1, 
-        [Define if you want to use the new-style (Openstep, Rhapsody, MacOS)
-         dynamic linker (dyld) instead of the old-style (NextStep) dynamic
-         linker (rld). Dyld is necessary to support frameworks.])
-  	AC_MSG_RESULT(always on for Darwin)
-  	;;
-  *)
-	AC_MSG_RESULT(no)
-	;;
-esac
-
-# Set info about shared libraries.
-AC_SUBST(SO)
-AC_SUBST(LDSHARED)
-AC_SUBST(LDCXXSHARED)
-AC_SUBST(BLDSHARED)
-AC_SUBST(CCSHARED)
-AC_SUBST(LINKFORSHARED)
-
-AC_DEFINE_UNQUOTED(SHLIB_EXT, "$SO", [Define this to be extension of shared libraries (including the dot!).])
-# LDSHARED is the ld *command* used to create shared library
-# -- "cc -G" on SunOS 5.x, "ld -shared" on IRIX 5
-# (Shared libraries in this instance are shared modules to be loaded into
-# Python, as opposed to building Python itself as a shared library.)
-AC_MSG_CHECKING(LDSHARED)
-if test -z "$LDSHARED"
-then
-	case $ac_sys_system/$ac_sys_release in
-	AIX*)
-		BLDSHARED="\$(srcdir)/Modules/ld_so_aix \$(CC) -bI:\$(srcdir)/Modules/python.exp"
-		LDSHARED="\$(BINLIBDEST)/config/ld_so_aix \$(CC) -bI:\$(BINLIBDEST)/config/python.exp"
-		;;
-	IRIX/5*) LDSHARED="ld -shared";;
-	IRIX*/6*) LDSHARED="ld ${SGI_ABI} -shared -all";;
-	SunOS/5*) 
-		if test "$GCC" = "yes" ; then
-			LDSHARED='$(CC) -shared'
-			LDCXXSHARED='$(CXX) -shared'
-		else
-			LDSHARED='$(CC) -G'
-			LDCXXSHARED='$(CXX) -G'
-		fi ;;
-	hp*|HP*)
-		if test "$GCC" = "yes" ; then
-			LDSHARED='$(CC) -shared'
-			LDCXXSHARED='$(CXX) -shared'
-		else
-			LDSHARED='ld -b'
-		fi ;;
-	OSF*) LDSHARED="ld -shared -expect_unresolved \"*\"";;
-	Darwin/1.3*)
-		LDSHARED='$(CC) -bundle'
-		LDCXXSHARED='$(CXX) -bundle'
-		if test "$enable_framework" ; then
-			# Link against the framework. All externals should be defined.
-			BLDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-			LDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-			LDCXXSHARED="$LDCXXSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-		else
-			# No framework. Ignore undefined symbols, assuming they come from Python
-			LDSHARED="$LDSHARED -undefined suppress"
-			LDCXXSHARED="$LDCXXSHARED -undefined suppress"
-		fi ;;
-	Darwin/1.4*|Darwin/5.*|Darwin/6.*)
-		LDSHARED='$(CC) -bundle'
-		LDCXXSHARED='$(CXX) -bundle'
-		if test "$enable_framework" ; then
-			# Link against the framework. All externals should be defined.
-			BLDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-			LDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-			LDCXXSHARED="$LDCXXSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-		else
-			# No framework, use the Python app as bundle-loader
-			BLDSHARED="$LDSHARED "'-bundle_loader $(BUILDPYTHON)'
-			LDSHARED="$LDSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
-			LDCXXSHARED="$LDCXXSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
-		fi ;;
-	Darwin/*)
-		# Use -undefined dynamic_lookup whenever possible (10.3 and later).
-		# This allows an extension to be used in any Python
-
-		if test ${MACOSX_DEPLOYMENT_TARGET} '>' 10.2
-		then
-			if test "${enable_universalsdk}"; then
-				LDFLAGS="${UNIVERSAL_ARCH_FLAGS} -isysroot ${UNIVERSALSDK} ${LDFLAGS}"
-			fi
-			LDSHARED='$(CC) -bundle -undefined dynamic_lookup'
-			LDCXXSHARED='$(CXX) -bundle -undefined dynamic_lookup'
-			BLDSHARED="$LDSHARED"
-		else
-			LDSHARED='$(CC) -bundle'
-			LDCXXSHARED='$(CXX) -bundle'
-			if test "$enable_framework" ; then
-				# Link against the framework. All externals should be defined.
-				BLDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-				LDSHARED="$LDSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-				LDCXXSHARED="$LDCXXSHARED "'$(PYTHONFRAMEWORKPREFIX)/$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-			else
-				# No framework, use the Python app as bundle-loader
-				BLDSHARED="$LDSHARED "'-bundle_loader $(BUILDPYTHON)'
-				LDSHARED="$LDSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
-				LDCXXSHARED="$LDCXXSHARED "'-bundle_loader $(BINDIR)/python$(VERSION)$(EXE)'
-			fi
-		fi
-		;;
-	Linux*|GNU*|QNX*)
-		LDSHARED='$(CC) -shared'
-		LDCXXSHARED='$(CXX) -shared';;
-	BSD/OS*/4*)
-		LDSHARED="gcc -shared"
-		LDCXXSHARED="g++ -shared";;
-	FreeBSD*)
-		if [[ "`$CC -dM -E - </dev/null | grep __ELF__`" != "" ]]
-		then
-			LDSHARED='$(CC) -shared'
-			LDCXXSHARED='$(CXX) -shared'
-		else
-			LDSHARED="ld -Bshareable"
-		fi;;
-	OpenBSD*)
-		if [[ "`$CC -dM -E - </dev/null | grep __ELF__`" != "" ]]
-		then
-				LDSHARED='$(CC) -shared $(CCSHARED)'
-				LDCXXSHARED='$(CXX) -shared $(CCSHARED)'
-		else
-				case `uname -r` in
-				[[01]].* | 2.[[0-7]] | 2.[[0-7]].*)
-				   LDSHARED="ld -Bshareable ${LDFLAGS}"
-				   ;;
-				*)
-				   LDSHARED='$(CC) -shared $(CCSHARED)'
-				   LDCXXSHARED='$(CXX) -shared $(CCSHARED)'
-				   ;;
-				esac
-		fi;;
-	NetBSD*|DragonFly*)
-		LDSHARED='$(CC) -shared'
-		LDCXXSHARED='$(CXX) -shared';;
-	OpenUNIX*|UnixWare*)
-		if test "$GCC" = "yes" ; then
-			LDSHARED='$(CC) -shared'
-			LDCXXSHARED='$(CXX) -shared'
-		else
-			LDSHARED='$(CC) -G'
-			LDCXXSHARED='$(CXX) -G'
-		fi;;
-	SCO_SV*)
-		LDSHARED='$(CC) -Wl,-G,-Bexport'
-		LDCXXSHARED='$(CXX) -Wl,-G,-Bexport';;
-	CYGWIN*)
-		LDSHARED="gcc -shared -Wl,--enable-auto-image-base"
-		LDCXXSHARED="g++ -shared -Wl,--enable-auto-image-base";;
-	*)	LDSHARED="ld";;
-	esac
-fi
-AC_MSG_RESULT($LDSHARED)
-LDCXXSHARED=${LDCXXSHARED-$LDSHARED}
-BLDSHARED=${BLDSHARED-$LDSHARED}
-# CCSHARED are the C *flags* used to create objects to go into a shared
-# library (module) -- this is only needed for a few systems
-AC_MSG_CHECKING(CCSHARED)
-if test -z "$CCSHARED"
-then
-	case $ac_sys_system/$ac_sys_release in
-	SunOS*) if test "$GCC" = yes;
-		then CCSHARED="-fPIC";
-		elif test `uname -p` = sparc;
-		then CCSHARED="-xcode=pic32";
-		else CCSHARED="-Kpic";
-		fi;;
-	hp*|HP*) if test "$GCC" = yes;
-		 then CCSHARED="-fPIC";
-		 else CCSHARED="+z";
-		 fi;;
-	Linux*|GNU*) CCSHARED="-fPIC";;
-	BSD/OS*/4*) CCSHARED="-fpic";;
-	FreeBSD*|NetBSD*|OpenBSD*|DragonFly*) CCSHARED="-fPIC";;
-	OpenUNIX*|UnixWare*)
-		if test "$GCC" = "yes"
-		then CCSHARED="-fPIC"
-		else CCSHARED="-KPIC"
-		fi;;
-	SCO_SV*)
-		if test "$GCC" = "yes"
-		then CCSHARED="-fPIC"
-		else CCSHARED="-Kpic -belf"
-		fi;;
-	IRIX*/6*)  case $CC in
-		   *gcc*) CCSHARED="-shared";;
-		   *) CCSHARED="";;
-		   esac;;
-	esac
-fi
-AC_MSG_RESULT($CCSHARED)
-# LINKFORSHARED are the flags passed to the $(CC) command that links
-# the python executable -- this is only needed for a few systems
-AC_MSG_CHECKING(LINKFORSHARED)
-if test -z "$LINKFORSHARED"
-then
-	case $ac_sys_system/$ac_sys_release in
-	AIX*)	LINKFORSHARED='-Wl,-bE:Modules/python.exp -lld';;
-	hp*|HP*)
-	    LINKFORSHARED="-Wl,-E -Wl,+s";;
-#	    LINKFORSHARED="-Wl,-E -Wl,+s -Wl,+b\$(BINLIBDEST)/lib-dynload";;
-	BSD/OS/4*) LINKFORSHARED="-Xlinker -export-dynamic";;
-	Linux*|GNU*) LINKFORSHARED="-Xlinker -export-dynamic";;
-	# -u libsys_s pulls in all symbols in libsys
-	Darwin/*) 
-		LINKFORSHARED="$extra_undefs -framework CoreFoundation"
-		if test "$enable_framework"
-		then
-			LINKFORSHARED="$LINKFORSHARED "'$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
-		fi
-		LINKFORSHARED="$LINKFORSHARED";;
-	OpenUNIX*|UnixWare*) LINKFORSHARED="-Wl,-Bexport";;
-	SCO_SV*) LINKFORSHARED="-Wl,-Bexport";;
-	ReliantUNIX*) LINKFORSHARED="-W1 -Blargedynsym";;
-	FreeBSD*|NetBSD*|OpenBSD*|DragonFly*) 
-		if [[ "`$CC -dM -E - </dev/null | grep __ELF__`" != "" ]]
-		then
-			LINKFORSHARED="-Wl,--export-dynamic"
-		fi;;
-	SunOS/5*) case $CC in
-		  *gcc*)
-		    if $CC -Xlinker --help 2>&1 | grep export-dynamic >/dev/null
-		    then
-			LINKFORSHARED="-Xlinker --export-dynamic"
-		    fi;;
-		  esac;;
-	CYGWIN*)
-		if test $enable_shared = "no"
-		then
-			LINKFORSHARED='-Wl,--out-implib=$(LDLIBRARY)'
-		fi;;
-	QNX*)
-		# -Wl,-E causes the symbols to be added to the dynamic
-		# symbol table so that they can be found when a module
-		# is loaded.  -N 2048K causes the stack size to be set
-		# to 2048 kilobytes so that the stack doesn't overflow
-		# when running test_compile.py.
-		LINKFORSHARED='-Wl,-E -N 2048K';;
-	esac
-fi
-AC_MSG_RESULT($LINKFORSHARED)
-
-
-AC_SUBST(CFLAGSFORSHARED)
-AC_MSG_CHECKING(CFLAGSFORSHARED)
-if test ! "$LIBRARY" = "$LDLIBRARY"
-then
-	case $ac_sys_system in
-	CYGWIN*)
-		# Cygwin needs CCSHARED when building extension DLLs
-		# but not when building the interpreter DLL.
-		CFLAGSFORSHARED='';;
-	*)
-		CFLAGSFORSHARED='$(CCSHARED)'
-	esac
-fi
-AC_MSG_RESULT($CFLAGSFORSHARED)
-
-# SHLIBS are libraries (except -lc and -lm) to link to the python shared
-# library (with --enable-shared).
-# For platforms on which shared libraries are not allowed to have unresolved
-# symbols, this must be set to $(LIBS) (expanded by make). We do this even
-# if it is not required, since it creates a dependency of the shared library
-# to LIBS. This, in turn, means that applications linking the shared libpython
-# don't need to link LIBS explicitly. The default should be only changed
-# on systems where this approach causes problems.
-AC_SUBST(SHLIBS)
-AC_MSG_CHECKING(SHLIBS)
-case "$ac_sys_system" in
-	*)
-		SHLIBS='$(LIBS)';;
-esac
-AC_MSG_RESULT($SHLIBS)
-
-
-# checks for libraries
-AC_CHECK_LIB(dl, dlopen)	# Dynamic linking for SunOS/Solaris and SYSV
-AC_CHECK_LIB(dld, shl_load)	# Dynamic linking for HP-UX
-
-# only check for sem_init if thread support is requested
-if test "$with_threads" = "yes" -o -z "$with_threads"; then
-    AC_SEARCH_LIBS(sem_init, pthread rt posix4) # 'Real Time' functions on Solaris
-						# posix4 on Solaris 2.6
-						# pthread (first!) on Linux
-fi
-
-# check if we need libintl for locale functions
-AC_CHECK_LIB(intl, textdomain,
-	[AC_DEFINE(WITH_LIBINTL, 1,
-	[Define to 1 if libintl is needed for locale functions.])
-        LIBS="-lintl $LIBS"])
-
-# checks for system dependent C++ extensions support
-case "$ac_sys_system" in
-	AIX*)	AC_MSG_CHECKING(for genuine AIX C++ extensions support)
-		AC_LINK_IFELSE([
-		  AC_LANG_PROGRAM([[#include <load.h>]],
-				  [[loadAndInit("", 0, "")]])
-		],[
-		  AC_DEFINE(AIX_GENUINE_CPLUSPLUS, 1,
-                      [Define for AIX if your compiler is a genuine IBM xlC/xlC_r
-                       and you want support for AIX C++ shared extension modules.])
-		  AC_MSG_RESULT(yes)
-		],[
-		  AC_MSG_RESULT(no)
-		]);;
-	*) ;;
-esac
-
-# Most SVR4 platforms (e.g. Solaris) need -lsocket and -lnsl.
-AC_CHECK_LIB(nsl, t_open, [LIBS="-lnsl $LIBS"]) # SVR4
-AC_CHECK_LIB(socket, socket, [LIBS="-lsocket $LIBS"], [], $LIBS) # SVR4 sockets
-
-AC_MSG_CHECKING(for --with-libs)
-AC_ARG_WITH(libs,
-            AS_HELP_STRING([--with-libs='lib1 ...'], [link against additional libs]),
-[
-AC_MSG_RESULT($withval)
-LIBS="$withval $LIBS"
-],
-[AC_MSG_RESULT(no)])
-
-AC_PATH_TOOL([PKG_CONFIG], [pkg-config])
-
-# Check for use of the system expat library
-AC_MSG_CHECKING(for --with-system-expat)
-AC_ARG_WITH(system_expat,
-            AS_HELP_STRING([--with-system-expat], [build pyexpat module using an installed expat library]),
-            [],
-            [with_system_expat="no"])
-
-AC_MSG_RESULT($with_system_expat)
-
-# Check for use of the system libffi library
-AC_MSG_CHECKING(for --with-system-ffi)
-AC_ARG_WITH(system_ffi,
-            AS_HELP_STRING([--with-system-ffi], [build _ctypes module using an installed ffi library]),
-            [],
-            [with_system_ffi="no"])
-
-if test "$with_system_ffi" = "yes" && test -n "$PKG_CONFIG"; then
-    LIBFFI_INCLUDEDIR="`"$PKG_CONFIG" libffi --cflags-only-I 2>/dev/null | sed -e 's/^-I//;s/ *$//'`"
-else
-    LIBFFI_INCLUDEDIR=""
-fi
-AC_SUBST(LIBFFI_INCLUDEDIR)
-
-AC_MSG_RESULT($with_system_ffi)
-
-# Check for support for loadable sqlite extensions
-AC_MSG_CHECKING(for --enable-loadable-sqlite-extensions)
-AC_ARG_ENABLE(loadable-sqlite-extensions,
-              AS_HELP_STRING([--enable-loadable-sqlite-extensions], [support loadable extensions in _sqlite module]),
-              [],
-              [enable_loadable_sqlite_extensions="no"])
-
-AC_MSG_RESULT($enable_loadable_sqlite_extensions)
-
-# Check for --with-dbmliborder
-AC_MSG_CHECKING(for --with-dbmliborder)
-AC_ARG_WITH(dbmliborder,
-            AS_HELP_STRING([--with-dbmliborder=db1:db2:...], [order to check db backends for dbm. Valid value is a colon separated string with the backend names `ndbm', `gdbm' and `bdb'.]),
-[
-if test x$with_dbmliborder = xyes
-then
-AC_MSG_ERROR([proper usage is --with-dbmliborder=db1:db2:...])
-else
-  for db in `echo $with_dbmliborder | sed 's/:/ /g'`; do
-    if test x$db != xndbm && test x$db != xgdbm && test x$db != xbdb
-    then
-      AC_MSG_ERROR([proper usage is --with-dbmliborder=db1:db2:...])
-    fi
-  done
-fi])
-AC_MSG_RESULT($with_dbmliborder)
-
-# Determine if signalmodule should be used.
-AC_SUBST(USE_SIGNAL_MODULE)
-AC_SUBST(SIGNAL_OBJS)
-AC_MSG_CHECKING(for --with-signal-module)
-AC_ARG_WITH(signal-module,
-            AS_HELP_STRING([--with-signal-module], [disable/enable signal module]))
-
-if test -z "$with_signal_module"
-then with_signal_module="yes"
-fi
-AC_MSG_RESULT($with_signal_module)
-
-if test "${with_signal_module}" = "yes"; then
-	USE_SIGNAL_MODULE=""
-	SIGNAL_OBJS=""
-else
-	USE_SIGNAL_MODULE="#"
-	SIGNAL_OBJS="Parser/intrcheck.o Python/sigcheck.o"
-fi
-
-# This is used to generate Setup.config
-AC_SUBST(USE_THREAD_MODULE)
-USE_THREAD_MODULE=""
-
-AC_MSG_CHECKING(for --with-dec-threads)
-AC_SUBST(LDLAST)
-AC_ARG_WITH(dec-threads,
-            AS_HELP_STRING([--with-dec-threads], [use DEC Alpha/OSF1 thread-safe libraries]),
-[
-AC_MSG_RESULT($withval)
-LDLAST=-threads
-if test "${with_thread+set}" != set; then
-   with_thread="$withval";
-fi],
-[AC_MSG_RESULT(no)])
-
-# Templates for things AC_DEFINEd more than once.
-# For a single AC_DEFINE, no template is needed.
-AH_TEMPLATE(C_THREADS,[Define if you have the Mach cthreads package])
-AH_TEMPLATE(_REENTRANT,
-  [Define to force use of thread-safe errno, h_errno, and other functions])
-AH_TEMPLATE(WITH_THREAD,
-  [Define if you want to compile in rudimentary thread support])
-
-AC_MSG_CHECKING(for --with-threads)
-dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
-AC_ARG_WITH(threads,
-            AS_HELP_STRING([--with(out)-threads@<:@=DIRECTORY@:>@], [disable/enable thread support]))
-
-# --with-thread is deprecated, but check for it anyway
-dnl quadrigraphs "@<:@" and "@:>@" produce "[" and "]" in the output
-AC_ARG_WITH(thread,
-            AS_HELP_STRING([--with(out)-thread@<:@=DIRECTORY@:>@], [deprecated; use --with(out)-threads]),
-            [with_threads=$with_thread])
-
-if test -z "$with_threads"
-then with_threads="yes"
-fi
-AC_MSG_RESULT($with_threads)
-
-AC_SUBST(THREADOBJ)
-if test "$with_threads" = "no"
-then
-    USE_THREAD_MODULE="#"
-elif test "$ac_cv_pthread_is_default" = yes
-then
-    AC_DEFINE(WITH_THREAD)
-    # Defining _REENTRANT on system with POSIX threads should not hurt.
-    AC_DEFINE(_REENTRANT)
-    posix_threads=yes
-    THREADOBJ="Python/thread.o"    
-elif test "$ac_cv_kpthread" = "yes"
-then
-    CC="$CC -Kpthread"
-    if test "$ac_cv_cxx_thread" = "yes"; then
-        CXX="$CXX -Kpthread"
-    fi
-    AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    THREADOBJ="Python/thread.o"
-elif test "$ac_cv_kthread" = "yes"
-then
-    CC="$CC -Kthread"
-    if test "$ac_cv_cxx_thread" = "yes"; then
-        CXX="$CXX -Kthread"
-    fi
-    AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    THREADOBJ="Python/thread.o"
-elif test "$ac_cv_pthread" = "yes"
-then
-    CC="$CC -pthread"
-    if test "$ac_cv_cxx_thread" = "yes"; then
-        CXX="$CXX -pthread"
-    fi
-    AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    THREADOBJ="Python/thread.o"
-else
-    if test ! -z "$with_threads" -a -d "$with_threads"
-    then LDFLAGS="$LDFLAGS -L$with_threads"
-    fi
-    if test ! -z "$withval" -a -d "$withval"
-    then LDFLAGS="$LDFLAGS -L$withval"
-    fi
-
-    # According to the POSIX spec, a pthreads implementation must
-    # define _POSIX_THREADS in unistd.h. Some apparently don't
-    # (e.g. gnu pth with pthread emulation)
-    AC_MSG_CHECKING(for _POSIX_THREADS in unistd.h)
-    AC_EGREP_CPP(yes,
-    [
-#include <unistd.h>
-#ifdef _POSIX_THREADS
-yes
-#endif
-    ], unistd_defines_pthreads=yes, unistd_defines_pthreads=no)
-    AC_MSG_RESULT($unistd_defines_pthreads)
-
-    AC_DEFINE(_REENTRANT)
-    AC_CHECK_HEADER(cthreads.h, [AC_DEFINE(WITH_THREAD)
-    AC_DEFINE(C_THREADS)
-    AC_DEFINE(HURD_C_THREADS, 1,
-    [Define if you are using Mach cthreads directly under /include])
-    LIBS="$LIBS -lthreads"
-    THREADOBJ="Python/thread.o"],[
-    AC_CHECK_HEADER(mach/cthreads.h, [AC_DEFINE(WITH_THREAD)
-    AC_DEFINE(C_THREADS)
-    AC_DEFINE(MACH_C_THREADS, 1,
-    [Define if you are using Mach cthreads under mach /])
-    THREADOBJ="Python/thread.o"],[
-    # Just looking for pthread_create in libpthread is not enough:
-    # on HP/UX, pthread.h renames pthread_create to a different symbol name.
-    # So we really have to include pthread.h, and then link.
-    _libs=$LIBS
-    LIBS="$LIBS -lpthread"
-    AC_MSG_CHECKING([for pthread_create in -lpthread])
-    AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <pthread.h>
-
-void * start_routine (void *arg) { exit (0); }]], [[
-pthread_create (NULL, NULL, start_routine, NULL)]])],[
-    AC_MSG_RESULT(yes)
-    AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    THREADOBJ="Python/thread.o"],[
-    LIBS=$_libs
-    AC_CHECK_FUNC(pthread_detach, [AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    THREADOBJ="Python/thread.o"],[
-    AC_CHECK_LIB(pthreads, pthread_create, [AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    LIBS="$LIBS -lpthreads"
-    THREADOBJ="Python/thread.o"], [
-    AC_CHECK_LIB(c_r, pthread_create, [AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    LIBS="$LIBS -lc_r"
-    THREADOBJ="Python/thread.o"], [
-    AC_CHECK_LIB(pthread, __pthread_create_system, [AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    LIBS="$LIBS -lpthread"
-    THREADOBJ="Python/thread.o"], [
-    AC_CHECK_LIB(cma, pthread_create, [AC_DEFINE(WITH_THREAD)
-    posix_threads=yes
-    LIBS="$LIBS -lcma"
-    THREADOBJ="Python/thread.o"],[
-    USE_THREAD_MODULE="#"])
-    ])])])])])])])
-
-    AC_CHECK_LIB(mpc, usconfig, [AC_DEFINE(WITH_THREAD)
-    LIBS="$LIBS -lmpc"
-    THREADOBJ="Python/thread.o"
-    USE_THREAD_MODULE=""])
-
-    if test "$posix_threads" != "yes"; then     
-      AC_CHECK_LIB(thread, thr_create, [AC_DEFINE(WITH_THREAD)
-      LIBS="$LIBS -lthread"
-      THREADOBJ="Python/thread.o"
-      USE_THREAD_MODULE=""])
-    fi
-
-    if test "$USE_THREAD_MODULE" != "#"
-    then
-        # If the above checks didn't disable threads, (at least) OSF1
-        # needs this '-threads' argument during linking.
-        case $ac_sys_system in
-        OSF1) LDLAST=-threads;;
-        esac
-    fi
-fi
-
-if test "$posix_threads" = "yes"; then
-      if test "$unistd_defines_pthreads" = "no"; then
-         AC_DEFINE(_POSIX_THREADS, 1,
-         [Define if you have POSIX threads, 
-          and your system does not define that.])
-      fi
-
-      # Bug 662787: Using semaphores causes unexplicable hangs on Solaris 8.
-      case  $ac_sys_system/$ac_sys_release in
-      SunOS/5.6) AC_DEFINE(HAVE_PTHREAD_DESTRUCTOR, 1,
-                       [Defined for Solaris 2.6 bug in pthread header.])
-		       ;;
-      SunOS/5.8) AC_DEFINE(HAVE_BROKEN_POSIX_SEMAPHORES, 1,
-		       [Define if the Posix semaphores do not work on your system])
-		       ;;
-      AIX/*) AC_DEFINE(HAVE_BROKEN_POSIX_SEMAPHORES, 1,
-		       [Define if the Posix semaphores do not work on your system])
-		       ;;
-      esac
-
-      AC_MSG_CHECKING(if PTHREAD_SCOPE_SYSTEM is supported)
-      AC_CACHE_VAL(ac_cv_pthread_system_supported,
-      [AC_RUN_IFELSE([AC_LANG_SOURCE([[#include <pthread.h>
-      void *foo(void *parm) {
-        return NULL;
-      }
-      main() {
-        pthread_attr_t attr;
-        pthread_t id;
-        if (pthread_attr_init(&attr)) exit(-1);
-        if (pthread_attr_setscope(&attr, PTHREAD_SCOPE_SYSTEM)) exit(-1);
-        if (pthread_create(&id, &attr, foo, NULL)) exit(-1);
-        exit(0);
-      }]])],
-      [ac_cv_pthread_system_supported=yes],
-      [ac_cv_pthread_system_supported=no],
-      [ac_cv_pthread_system_supported=no])
-      ])
-      AC_MSG_RESULT($ac_cv_pthread_system_supported)
-      if test "$ac_cv_pthread_system_supported" = "yes"; then
-        AC_DEFINE(PTHREAD_SYSTEM_SCHED_SUPPORTED, 1, [Defined if PTHREAD_SCOPE_SYSTEM supported.])
-      fi
-      AC_CHECK_FUNCS(pthread_sigmask,
-        [case $ac_sys_system in
-        CYGWIN*)
-          AC_DEFINE(HAVE_BROKEN_PTHREAD_SIGMASK, 1,
-            [Define if pthread_sigmask() does not work on your system.])
-            ;;
-        esac])
-fi
-
-
-# Check for enable-ipv6
-AH_TEMPLATE(ENABLE_IPV6, [Define if --enable-ipv6 is specified])
-AC_MSG_CHECKING([if --enable-ipv6 is specified])
-AC_ARG_ENABLE(ipv6,
-[  --enable-ipv6           Enable ipv6 (with ipv4) support
-  --disable-ipv6          Disable ipv6 support],
-[ case "$enableval" in
-  no)
-       AC_MSG_RESULT(no)
-       ipv6=no
-       ;;
-  *)   AC_MSG_RESULT(yes)
-       AC_DEFINE(ENABLE_IPV6)
-       ipv6=yes
-       ;;
-  esac ],
-
-[
-dnl the check does not work on cross compilation case...
-  AC_RUN_IFELSE([AC_LANG_SOURCE([[ /* AF_INET6 available check */
-#include <sys/types.h>
-#include <sys/socket.h>
-main()
-{
- if (socket(AF_INET6, SOCK_STREAM, 0) < 0)
-   exit(1);
- else
-   exit(0);
-}
-]])],[
-  AC_MSG_RESULT(yes)
-  ipv6=yes
-],[
-  AC_MSG_RESULT(no)
-  ipv6=no
-],[
-  AC_MSG_RESULT(no)
-  ipv6=no
-])
-
-if test "$ipv6" = "yes"; then
-	AC_MSG_CHECKING(if RFC2553 API is available)
-	AC_COMPILE_IFELSE([
-	  AC_LANG_PROGRAM([[#include <sys/types.h>
-#include <netinet/in.h>]],
-			  [[struct sockaddr_in6 x;
-			    x.sin6_scope_id;]])
-	],[
-	  AC_MSG_RESULT(yes)
-	  ipv6=yes
-	],[
-	  AC_MSG_RESULT(no, IPv6 disabled)
-	  ipv6=no
-	])
-fi
-
-if test "$ipv6" = "yes"; then
-	AC_DEFINE(ENABLE_IPV6)
-fi
-])
-
-ipv6type=unknown
-ipv6lib=none
-ipv6trylibc=no
-
-if test "$ipv6" = "yes"; then
-	AC_MSG_CHECKING([ipv6 stack type])
-	for i in inria kame linux-glibc linux-inet6 solaris toshiba v6d zeta;
-	do
-		case $i in
-		inria)
-			dnl http://www.kame.net/
-			AC_EGREP_CPP(yes, [
-#include <netinet/in.h>
-#ifdef IPV6_INRIA_VERSION
-yes
-#endif],
-				[ipv6type=$i])
-			;;
-		kame)
-			dnl http://www.kame.net/
-			AC_EGREP_CPP(yes, [
-#include <netinet/in.h>
-#ifdef __KAME__
-yes
-#endif],
-				[ipv6type=$i;
-				ipv6lib=inet6
-				ipv6libdir=/usr/local/v6/lib
-				ipv6trylibc=yes])
-			;;
-		linux-glibc)
-			dnl http://www.v6.linux.or.jp/
-			AC_EGREP_CPP(yes, [
-#include <features.h>
-#if defined(__GLIBC__) && ((__GLIBC__ == 2 && __GLIBC_MINOR__ >= 1) || (__GLIBC__ > 2))
-yes
-#endif],
-				[ipv6type=$i;
-				ipv6trylibc=yes])
-			;;
-		linux-inet6)
-			dnl http://www.v6.linux.or.jp/
-			if test -d /usr/inet6; then
-				ipv6type=$i
-				ipv6lib=inet6
-				ipv6libdir=/usr/inet6/lib
-				BASECFLAGS="-I/usr/inet6/include $BASECFLAGS"
-			fi
-			;;
-		solaris)
-			if test -f /etc/netconfig; then
-                          if $GREP -q tcp6 /etc/netconfig; then
-				ipv6type=$i
-				ipv6trylibc=yes
-                          fi
-                        fi
-			;;
-		toshiba)
-			AC_EGREP_CPP(yes, [
-#include <sys/param.h>
-#ifdef _TOSHIBA_INET6
-yes
-#endif],
-				[ipv6type=$i;
-				ipv6lib=inet6;
-				ipv6libdir=/usr/local/v6/lib])
-			;;
-		v6d)
-			AC_EGREP_CPP(yes, [
-#include </usr/local/v6/include/sys/v6config.h>
-#ifdef __V6D__
-yes
-#endif],
-				[ipv6type=$i;
-				ipv6lib=v6;
-				ipv6libdir=/usr/local/v6/lib;
-				BASECFLAGS="-I/usr/local/v6/include $BASECFLAGS"])
-			;;
-		zeta)
-			AC_EGREP_CPP(yes, [
-#include <sys/param.h>
-#ifdef _ZETA_MINAMI_INET6
-yes
-#endif],
-				[ipv6type=$i;
-				ipv6lib=inet6;
-				ipv6libdir=/usr/local/v6/lib])
-			;;
-		esac
-		if test "$ipv6type" != "unknown"; then
-			break
-		fi
-	done
-	AC_MSG_RESULT($ipv6type)
-fi
-
-if test "$ipv6" = "yes" -a "$ipv6lib" != "none"; then
-	if test -d $ipv6libdir -a -f $ipv6libdir/lib$ipv6lib.a; then
-		LIBS="-L$ipv6libdir -l$ipv6lib $LIBS"
-		echo "using lib$ipv6lib"
-	else
-		if test $ipv6trylibc = "yes"; then
-			echo "using libc"
-		else
-			echo 'Fatal: no $ipv6lib library found.  cannot continue.'
-			echo "You need to fetch lib$ipv6lib.a from appropriate"
-			echo 'ipv6 kit and compile beforehand.'
-			exit 1
-		fi
-	fi
-fi
-
-AC_MSG_CHECKING(for OSX 10.5 SDK or later)
-AC_COMPILE_IFELSE([
-  AC_LANG_PROGRAM([[#include <Carbon/Carbon.h>]], [[FSIORefNum fRef = 0]])
-],[
-  AC_DEFINE(HAVE_OSX105_SDK, 1, [Define if compiling using MacOS X 10.5 SDK or later.])
-  AC_MSG_RESULT(yes)
-],[
-  AC_MSG_RESULT(no)
-])
-
-# Check for --with-doc-strings
-AC_MSG_CHECKING(for --with-doc-strings)
-AC_ARG_WITH(doc-strings,
-            AS_HELP_STRING([--with(out)-doc-strings], [disable/enable documentation strings]))
-
-if test -z "$with_doc_strings"
-then with_doc_strings="yes"
-fi
-if test "$with_doc_strings" != "no"
-then
-    AC_DEFINE(WITH_DOC_STRINGS, 1,
-      [Define if you want documentation strings in extension modules])
-fi
-AC_MSG_RESULT($with_doc_strings)
-
-# Check if eval loop should use timestamp counter profiling
-AC_MSG_CHECKING(for --with-tsc)
-AC_ARG_WITH(tsc,
-	    AS_HELP_STRING([--with(out)-tsc],[enable/disable timestamp counter profile]),[
-if test "$withval" != no
-then 
-  AC_DEFINE(WITH_TSC, 1, 
-    [Define to profile with the Pentium timestamp counter]) 
-    AC_MSG_RESULT(yes)
-else AC_MSG_RESULT(no)
-fi],
-[AC_MSG_RESULT(no)])
-
-# Check for Python-specific malloc support
-AC_MSG_CHECKING(for --with-pymalloc)
-AC_ARG_WITH(pymalloc,
-            AS_HELP_STRING([--with(out)-pymalloc], [disable/enable specialized mallocs]))
-
-if test -z "$with_pymalloc"
-then
-    with_pymalloc="yes"
-    ABIFLAGS="${ABIFLAGS}m"
-fi
-if test "$with_pymalloc" != "no"
-then
-    AC_DEFINE(WITH_PYMALLOC, 1, 
-     [Define if you want to compile in Python-specific mallocs])
-fi
-AC_MSG_RESULT($with_pymalloc)
-
-# Check for Valgrind support
-AC_MSG_CHECKING([for --with-valgrind])
-AC_ARG_WITH([valgrind],
-  AS_HELP_STRING([--with-valgrind], [Enable Valgrind support]),,
-  with_valgrind=no)
-AC_MSG_RESULT([$with_valgrind])
-if test "$with_valgrind" != no; then
-    AC_CHECK_HEADER([valgrind/valgrind.h],
-      [AC_DEFINE([WITH_VALGRIND], 1, [Define if you want pymalloc to be disabled when running under valgrind])],
-      [AC_MSG_ERROR([Valgrind support requested but headers not available])]
-    )
-    OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT"
-fi
-
-# -I${DLINCLDIR} is added to the compile rule for importdl.o
-AC_SUBST(DLINCLDIR)
-DLINCLDIR=.
-
-# the dlopen() function means we might want to use dynload_shlib.o. some
-# platforms, such as AIX, have dlopen(), but don't want to use it.
-AC_CHECK_FUNCS(dlopen)
-
-# DYNLOADFILE specifies which dynload_*.o file we will use for dynamic
-# loading of modules.
-AC_SUBST(DYNLOADFILE)
-AC_MSG_CHECKING(DYNLOADFILE)
-if test -z "$DYNLOADFILE"
-then
-	case $ac_sys_system/$ac_sys_release in
-	AIX*) # Use dynload_shlib.c and dlopen() if we have it; otherwise dynload_aix.c
-	if test "$ac_cv_func_dlopen" = yes
-	then DYNLOADFILE="dynload_shlib.o"
-	else DYNLOADFILE="dynload_aix.o"
-	fi
-	;;
-	hp*|HP*) DYNLOADFILE="dynload_hpux.o";;
-	# Use dynload_next.c only on 10.2 and below, which don't have native dlopen()
-	Darwin/@<:@0156@:>@\..*) DYNLOADFILE="dynload_next.o";;
-	*)
-	# use dynload_shlib.c and dlopen() if we have it; otherwise stub
-	# out any dynamic loading
-	if test "$ac_cv_func_dlopen" = yes
-	then DYNLOADFILE="dynload_shlib.o"
-	else DYNLOADFILE="dynload_stub.o"
-	fi
-	;;
-	esac
-fi
-AC_MSG_RESULT($DYNLOADFILE)
-if test "$DYNLOADFILE" != "dynload_stub.o"
-then
-	AC_DEFINE(HAVE_DYNAMIC_LOADING, 1,
-        [Defined when any dynamic module loading is enabled.])
-fi
-
-# MACHDEP_OBJS can be set to platform-specific object files needed by Python
-
-AC_SUBST(MACHDEP_OBJS)
-AC_MSG_CHECKING(MACHDEP_OBJS)
-if test -z "$MACHDEP_OBJS"
-then
-	MACHDEP_OBJS=$extra_machdep_objs
-else
-	MACHDEP_OBJS="$MACHDEP_OBJS $extra_machdep_objs"
-fi
-AC_MSG_RESULT(MACHDEP_OBJS)
-
-# checks for library functions
-AC_CHECK_FUNCS(alarm accept4 setitimer getitimer bind_textdomain_codeset chown \
- clock confstr ctermid execv fchmod fchown fork fpathconf ftime ftruncate \
- gai_strerror getgroups getlogin getloadavg getpeername getpgid getpid \
- getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \
- initgroups kill killpg lchmod lchown lstat mbrtowc mkfifo mknod mktime \
- mremap nice pathconf pause plock poll pthread_init \
- putenv readlink realpath \
- select sem_open sem_timedwait sem_getvalue sem_unlink setegid seteuid \
- setgid \
- setlocale setregid setreuid setresuid setresgid setsid setpgid setpgrp setuid setvbuf \
- sigaction siginterrupt sigrelse snprintf strftime strlcpy \
- sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \
- truncate uname unsetenv utimes waitpid wait3 wait4 \
- wcscoll wcsftime wcsxfrm _getpty)
-
-AC_CHECK_DECL(dirfd,
-    AC_DEFINE(HAVE_DIRFD, 1,
-              Define if you have the 'dirfd' function or macro.), ,
-      [#include <sys/types.h>
-       #include <dirent.h>])
-
-# For some functions, having a definition is not sufficient, since
-# we want to take their address.
-AC_MSG_CHECKING(for chroot)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=chroot]])],
-  [AC_DEFINE(HAVE_CHROOT, 1, Define if you have the 'chroot' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-AC_MSG_CHECKING(for link)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=link]])],
-  [AC_DEFINE(HAVE_LINK, 1, Define if you have the 'link' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-AC_MSG_CHECKING(for symlink)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=symlink]])],
-  [AC_DEFINE(HAVE_SYMLINK, 1, Define if you have the 'symlink' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-AC_MSG_CHECKING(for fchdir)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=fchdir]])],
-  [AC_DEFINE(HAVE_FCHDIR, 1, Define if you have the 'fchdir' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-AC_MSG_CHECKING(for fsync)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=fsync]])],
-  [AC_DEFINE(HAVE_FSYNC, 1, Define if you have the 'fsync' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-AC_MSG_CHECKING(for fdatasync)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[void *x=fdatasync]])],
-  [AC_DEFINE(HAVE_FDATASYNC, 1, Define if you have the 'fdatasync' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-AC_MSG_CHECKING(for epoll)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/epoll.h>]], [[void *x=epoll_create]])],
-  [AC_DEFINE(HAVE_EPOLL, 1, Define if you have the 'epoll' functions.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-AC_MSG_CHECKING(for kqueue)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <sys/types.h>
-#include <sys/event.h>
-    ]], [[int x=kqueue()]])],
-  [AC_DEFINE(HAVE_KQUEUE, 1, Define if you have the 'kqueue' functions.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-# On some systems (eg. FreeBSD 5), we would find a definition of the
-# functions ctermid_r, setgroups in the library, but no prototype
-# (e.g. because we use _XOPEN_SOURCE). See whether we can take their
-# address to avoid compiler warnings and potential miscompilations
-# because of the missing prototypes.
-
-AC_MSG_CHECKING(for ctermid_r)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <stdio.h>
-]], [[void* p = ctermid_r]])],
-  [AC_DEFINE(HAVE_CTERMID_R, 1, Define if you have the 'ctermid_r' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-
-AC_CACHE_CHECK([for flock declaration], [ac_cv_flock_decl],
-  [AC_COMPILE_IFELSE(
-    [AC_LANG_PROGRAM(
-      [#include <sys/file.h>],
-      [void* p = flock]
-    )],
-    [ac_cv_flock_decl=yes],
-    [ac_cv_flock_decl=no]
-  )
-])
-if test "x${ac_cv_flock_decl}" = xyes; then
-  AC_CHECK_FUNCS(flock,,
-    AC_CHECK_LIB(bsd,flock,
-      [AC_DEFINE(HAVE_FLOCK)
-       AC_DEFINE(FLOCK_NEEDS_LIBBSD, 1, Define if flock needs to be linked with bsd library.)
-    ])
-  )
-fi
-
-AC_MSG_CHECKING(for getpagesize)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <unistd.h>
-]], [[void* p = getpagesize]])],
-  [AC_DEFINE(HAVE_GETPAGESIZE, 1, Define if you have the 'getpagesize' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-
-AC_MSG_CHECKING(for broken unsetenv)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <stdlib.h>
-]], [[int res = unsetenv("DUMMY")]])],
-  [AC_MSG_RESULT(no)],
-  [AC_DEFINE(HAVE_BROKEN_UNSETENV, 1, Define if `unsetenv` does not return an int.)
-   AC_MSG_RESULT(yes)
-])
-
-dnl check for true
-AC_CHECK_PROGS(TRUE, true, /bin/true)
-
-dnl On some systems (e.g. Solaris 9), hstrerror and inet_aton are in -lresolv
-dnl On others, they are in the C library, so we to take no action
-AC_CHECK_LIB(c, inet_aton, [$ac_cv_prog_TRUE],
-  AC_CHECK_LIB(resolv, inet_aton)
-)
-
-# On Tru64, chflags seems to be present, but calling it will
-# exit Python
-AC_CACHE_CHECK([for chflags], [ac_cv_have_chflags], [dnl
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <sys/stat.h>
-#include <unistd.h>
-int main(int argc, char*argv[])
-{
-  if(chflags(argv[0], 0) != 0)
-    return 1;
-  return 0;
-}
-]])],
-[ac_cv_have_chflags=yes],
-[ac_cv_have_chflags=no],
-[ac_cv_have_chflags=cross])
-])
-if test "$ac_cv_have_chflags" = cross ; then
-  AC_CHECK_FUNC([chflags], [ac_cv_have_chflags="yes"], [ac_cv_have_chflags="no"])
-fi
-if test "$ac_cv_have_chflags" = yes ; then
-  AC_DEFINE(HAVE_CHFLAGS, 1, [Define to 1 if you have the 'chflags' function.])
-fi
-
-AC_CACHE_CHECK([for lchflags], [ac_cv_have_lchflags], [dnl
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <sys/stat.h>
-#include <unistd.h>
-int main(int argc, char*argv[])
-{
-  if(lchflags(argv[0], 0) != 0)
-    return 1;
-  return 0;
-}
-]])],[ac_cv_have_lchflags=yes],[ac_cv_have_lchflags=no],[ac_cv_have_lchflags=cross])
-])
-if test "$ac_cv_have_lchflags" = cross ; then
-  AC_CHECK_FUNC([lchflags], [ac_cv_have_lchflags="yes"], [ac_cv_have_lchflags="no"])
-fi
-if test "$ac_cv_have_lchflags" = yes ; then
-  AC_DEFINE(HAVE_LCHFLAGS, 1, [Define to 1 if you have the 'lchflags' function.])
-fi
-
-dnl Check if system zlib has *Copy() functions
-dnl
-dnl On MacOSX the linker will search for dylibs on the entire linker path
-dnl before searching for static libraries. setup.py adds -Wl,-search_paths_first
-dnl to revert to a more traditional unix behaviour and make it possible to
-dnl override the system libz with a local static library of libz. Temporarily
-dnl add that flag to our CFLAGS as well to ensure that we check the version
-dnl of libz that will be used by setup.py. 
-dnl The -L/usr/local/lib is needed as wel to get the same compilation 
-dnl environment as setup.py (and leaving it out can cause configure to use the
-dnl wrong version of the library)
-case $ac_sys_system/$ac_sys_release in
-Darwin/*) 
-	_CUR_CFLAGS="${CFLAGS}"
-	_CUR_LDFLAGS="${LDFLAGS}"
-	CFLAGS="${CFLAGS} -Wl,-search_paths_first"
-	LDFLAGS="${LDFLAGS} -Wl,-search_paths_first -L/usr/local/lib"
-	;;
-esac
-
-AC_CHECK_LIB(z, inflateCopy, AC_DEFINE(HAVE_ZLIB_COPY, 1, [Define if the zlib library has inflateCopy]))
-
-case $ac_sys_system/$ac_sys_release in
-Darwin/*) 
-	CFLAGS="${_CUR_CFLAGS}"
-	LDFLAGS="${_CUR_LDFLAGS}"
-	;;
-esac
-
-AC_MSG_CHECKING(for hstrerror)
-AC_LINK_IFELSE([AC_LANG_PROGRAM([[
-#include <netdb.h>
-]], [[void* p = hstrerror; hstrerror(0)]])],
-  [AC_DEFINE(HAVE_HSTRERROR, 1, Define if you have the 'hstrerror' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-
-AC_MSG_CHECKING(for inet_aton)
-AC_LINK_IFELSE([AC_LANG_PROGRAM([[
-#include <sys/types.h>
-#include <sys/socket.h>
-#include <netinet/in.h>
-#include <arpa/inet.h>
-]], [[void* p = inet_aton;inet_aton(0,0)]])],
-  [AC_DEFINE(HAVE_INET_ATON, 1, Define if you have the 'inet_aton' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-
-AC_MSG_CHECKING(for inet_pton)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <sys/types.h>
-#include <sys/socket.h>
-#include <netinet/in.h>
-#include <arpa/inet.h>
-]], [[void* p = inet_pton]])],
-  [AC_DEFINE(HAVE_INET_PTON, 1, Define if you have the 'inet_pton' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-
-# On some systems, setgroups is in unistd.h, on others, in grp.h
-AC_MSG_CHECKING(for setgroups)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <unistd.h>
-#ifdef HAVE_GRP_H
-#include <grp.h>
-#endif
-]], [[void* p = setgroups]])],
-  [AC_DEFINE(HAVE_SETGROUPS, 1, Define if you have the 'setgroups' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)
-])
-
-# check for openpty and forkpty
-
-AC_CHECK_FUNCS(openpty,, 
-   AC_CHECK_LIB(util,openpty,
-     [AC_DEFINE(HAVE_OPENPTY) LIBS="$LIBS -lutil"],
-     AC_CHECK_LIB(bsd,openpty, [AC_DEFINE(HAVE_OPENPTY) LIBS="$LIBS -lbsd"])
-   )
-)
-AC_CHECK_FUNCS(forkpty,, 
-   AC_CHECK_LIB(util,forkpty, 
-     [AC_DEFINE(HAVE_FORKPTY) LIBS="$LIBS -lutil"],
-     AC_CHECK_LIB(bsd,forkpty, [AC_DEFINE(HAVE_FORKPTY) LIBS="$LIBS -lbsd"])
-   )
-)
-
-# Stuff for expat.
-AC_CHECK_FUNCS(memmove)
-
-# check for long file support functions
-AC_CHECK_FUNCS(fseek64 fseeko fstatvfs ftell64 ftello statvfs)
-
-AC_REPLACE_FUNCS(dup2 getcwd strdup)
-AC_CHECK_FUNCS(getpgrp, 
-  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[getpgrp(0);]])],
-    [AC_DEFINE(GETPGRP_HAVE_ARG, 1, [Define if getpgrp() must be called as getpgrp(0).])],
-    [])
-)
-AC_CHECK_FUNCS(setpgrp,
-  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <unistd.h>]], [[setpgrp(0,0);]])],
-    [AC_DEFINE(SETPGRP_HAVE_ARG, 1, [Define if setpgrp() must be called as setpgrp(0, 0).])],
-    [])
-)
-AC_CHECK_FUNCS(gettimeofday, 
-  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/time.h>]],
-  				     [[gettimeofday((struct timeval*)0,(struct timezone*)0);]])],
-    [],
-    [AC_DEFINE(GETTIMEOFDAY_NO_TZ, 1,
-      [Define if gettimeofday() does not have second (timezone) argument
-       This is the case on Motorola V4 (R40V4.2)])
-    ])
-)
-
-AC_MSG_CHECKING(for major, minor, and makedev)
-AC_LINK_IFELSE([AC_LANG_PROGRAM([[
-#if defined(MAJOR_IN_MKDEV)
-#include <sys/mkdev.h>
-#elif defined(MAJOR_IN_SYSMACROS)
-#include <sys/sysmacros.h>
-#else
-#include <sys/types.h>
-#endif
-]], [[
-  makedev(major(0),minor(0));
-]])],[
-  AC_DEFINE(HAVE_DEVICE_MACROS, 1,
-	    [Define to 1 if you have the device macros.])
-  AC_MSG_RESULT(yes)
-],[
-  AC_MSG_RESULT(no)
-])
-
-# On OSF/1 V5.1, getaddrinfo is available, but a define
-# for [no]getaddrinfo in netdb.h. 
-AC_MSG_CHECKING(for getaddrinfo)
-AC_LINK_IFELSE([AC_LANG_PROGRAM([[
-#include <sys/types.h>
-#include <sys/socket.h>
-#include <netdb.h>
-#include <stdio.h>
-]], [[getaddrinfo(NULL, NULL, NULL, NULL);]])],
-[have_getaddrinfo=yes],
-[have_getaddrinfo=no])
-AC_MSG_RESULT($have_getaddrinfo)
-if test $have_getaddrinfo = yes
-then
-  AC_MSG_CHECKING(getaddrinfo bug)
-  AC_CACHE_VAL(ac_cv_buggy_getaddrinfo,
-  AC_RUN_IFELSE([AC_LANG_SOURCE([[[
-#include <sys/types.h>
-#include <netdb.h>
-#include <string.h>
-#include <sys/socket.h>
-#include <netinet/in.h>
-
-int main()
-{
-  int passive, gaierr, inet4 = 0, inet6 = 0;
-  struct addrinfo hints, *ai, *aitop;
-  char straddr[INET6_ADDRSTRLEN], strport[16];
-
-  for (passive = 0; passive <= 1; passive++) {
-    memset(&hints, 0, sizeof(hints));
-    hints.ai_family = AF_UNSPEC;
-    hints.ai_flags = passive ? AI_PASSIVE : 0;
-    hints.ai_socktype = SOCK_STREAM;
-    hints.ai_protocol = IPPROTO_TCP;
-    if ((gaierr = getaddrinfo(NULL, "54321", &hints, &aitop)) != 0) {
-      (void)gai_strerror(gaierr);
-      goto bad;
-    }
-    for (ai = aitop; ai; ai = ai->ai_next) {
-      if (ai->ai_addr == NULL ||
-          ai->ai_addrlen == 0 ||
-          getnameinfo(ai->ai_addr, ai->ai_addrlen,
-                      straddr, sizeof(straddr), strport, sizeof(strport),
-                      NI_NUMERICHOST|NI_NUMERICSERV) != 0) {
-        goto bad;
-      }
-      switch (ai->ai_family) {
-      case AF_INET:
-        if (strcmp(strport, "54321") != 0) {
-          goto bad;
-        }
-        if (passive) {
-          if (strcmp(straddr, "0.0.0.0") != 0) {
-            goto bad;
-          }
-        } else {
-          if (strcmp(straddr, "127.0.0.1") != 0) {
-            goto bad;
-          }
-        }
-        inet4++;
-        break;
-      case AF_INET6:
-        if (strcmp(strport, "54321") != 0) {
-          goto bad;
-        }
-        if (passive) {
-          if (strcmp(straddr, "::") != 0) {
-            goto bad;
-          }
-        } else {
-          if (strcmp(straddr, "::1") != 0) {
-            goto bad;
-          }
-        }
-        inet6++;
-        break;
-      case AF_UNSPEC:
-        goto bad;
-        break;
-      default:
-        /* another family support? */
-        break;
-      }
-    }
-  }
-
-  if (!(inet4 == 0 || inet4 == 2))
-    goto bad;
-  if (!(inet6 == 0 || inet6 == 2))
-    goto bad;
-
-  if (aitop)
-    freeaddrinfo(aitop);
-  return 0;
-
- bad:
-  if (aitop)
-    freeaddrinfo(aitop);
-  return 1;
-}
-]]])],
-[ac_cv_buggy_getaddrinfo=no],
-[ac_cv_buggy_getaddrinfo=yes],
-[ac_cv_buggy_getaddrinfo=yes]))
-fi
-
-AC_MSG_RESULT($ac_cv_buggy_getaddrinfo)
-
-if test $have_getaddrinfo = no -o "$ac_cv_buggy_getaddrinfo" = yes
-then
-	if test $ipv6 = yes
-	then
-		echo 'Fatal: You must get working getaddrinfo() function.'
-		echo '       or you can specify "--disable-ipv6"'.
-		exit 1
-	fi
-else
-	AC_DEFINE(HAVE_GETADDRINFO, 1, [Define if you have the getaddrinfo function.])
-fi
-
-AC_CHECK_FUNCS(getnameinfo)
-
-# checks for structures
-AC_HEADER_TIME
-AC_STRUCT_TM
-AC_STRUCT_TIMEZONE
-AC_CHECK_MEMBERS([struct stat.st_rdev])
-AC_CHECK_MEMBERS([struct stat.st_blksize])
-AC_CHECK_MEMBERS([struct stat.st_flags])
-AC_CHECK_MEMBERS([struct stat.st_gen])
-AC_CHECK_MEMBERS([struct stat.st_birthtime])
-AC_STRUCT_ST_BLOCKS
-
-AC_MSG_CHECKING(for time.h that defines altzone)
-AC_CACHE_VAL(ac_cv_header_time_altzone,[
-  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <time.h>]], [[return altzone;]])],
-    [ac_cv_header_time_altzone=yes],
-    [ac_cv_header_time_altzone=no])
-  ])
-AC_MSG_RESULT($ac_cv_header_time_altzone)
-if test $ac_cv_header_time_altzone = yes; then
-  AC_DEFINE(HAVE_ALTZONE, 1, [Define this if your time.h defines altzone.])
-fi
-
-was_it_defined=no
-AC_MSG_CHECKING(whether sys/select.h and sys/time.h may both be included)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <sys/types.h>
-#include <sys/select.h>
-#include <sys/time.h>
-]], [[;]])],[
-  AC_DEFINE(SYS_SELECT_WITH_SYS_TIME, 1,
-  [Define if  you can safely include both <sys/select.h> and <sys/time.h>
-   (which you can't on SCO ODT 3.0).]) 
-  was_it_defined=yes
-],[])
-AC_MSG_RESULT($was_it_defined)
-
-AC_MSG_CHECKING(for addrinfo)
-AC_CACHE_VAL(ac_cv_struct_addrinfo,
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <netdb.h>]], [[struct addrinfo a]])],
-  [ac_cv_struct_addrinfo=yes],
-  [ac_cv_struct_addrinfo=no]))
-AC_MSG_RESULT($ac_cv_struct_addrinfo)
-if test $ac_cv_struct_addrinfo = yes; then
-	AC_DEFINE(HAVE_ADDRINFO, 1, [struct addrinfo (netdb.h)])
-fi
-
-AC_MSG_CHECKING(for sockaddr_storage)
-AC_CACHE_VAL(ac_cv_struct_sockaddr_storage,
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#		include <sys/types.h>
-#		include <sys/socket.h>]], [[struct sockaddr_storage s]])],
-  [ac_cv_struct_sockaddr_storage=yes],
-  [ac_cv_struct_sockaddr_storage=no]))
-AC_MSG_RESULT($ac_cv_struct_sockaddr_storage)
-if test $ac_cv_struct_sockaddr_storage = yes; then
-	AC_DEFINE(HAVE_SOCKADDR_STORAGE, 1, [struct sockaddr_storage (sys/socket.h)])
-fi
-
-# checks for compiler characteristics
-
-AC_C_CHAR_UNSIGNED
-AC_C_CONST
-
-works=no
-AC_MSG_CHECKING(for working volatile)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[volatile int x; x = 0;]])],
-  [works=yes],
-  [AC_DEFINE(volatile, , [Define to empty if the keyword does not work.])]
-)
-AC_MSG_RESULT($works)
-
-works=no
-AC_MSG_CHECKING(for working signed char)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[signed char c;]])],
-  [works=yes],
-  [AC_DEFINE(signed, , [Define to empty if the keyword does not work.])]
-)
-AC_MSG_RESULT($works)
-
-have_prototypes=no
-AC_MSG_CHECKING(for prototypes)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[int foo(int x) { return 0; }]], [[return foo(10);]])],
-  [AC_DEFINE(HAVE_PROTOTYPES, 1, 
-     [Define if your compiler supports function prototype]) 
-   have_prototypes=yes],
-  []
-)
-AC_MSG_RESULT($have_prototypes)
-
-works=no
-AC_MSG_CHECKING(for variable length prototypes and stdarg.h)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <stdarg.h>
-int foo(int x, ...) {
-	va_list va;
-	va_start(va, x);
-	va_arg(va, int);
-	va_arg(va, char *);
-	va_arg(va, double);
-	return 0;
-}
-]], [[return foo(10, "", 3.14);]])],[
-  AC_DEFINE(HAVE_STDARG_PROTOTYPES, 1,
-   [Define if your compiler supports variable length function prototypes
-   (e.g. void fprintf(FILE *, char *, ...);) *and* <stdarg.h>]) 
-  works=yes
-],[])
-AC_MSG_RESULT($works)
-
-# check for socketpair
-AC_MSG_CHECKING(for socketpair)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <sys/types.h>
-#include <sys/socket.h>
-]], [[void *x=socketpair]])],
-  [AC_DEFINE(HAVE_SOCKETPAIR, 1, [Define if you have the 'socketpair' function.])
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)]
-)
-
-# check if sockaddr has sa_len member
-AC_MSG_CHECKING(if sockaddr has sa_len member)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/types.h>
-#include <sys/socket.h>]], [[struct sockaddr x;
-x.sa_len = 0;]])],
-  [AC_MSG_RESULT(yes)
-   AC_DEFINE(HAVE_SOCKADDR_SA_LEN, 1, [Define if sockaddr has sa_len member])],
-  [AC_MSG_RESULT(no)]
-)
-
-va_list_is_array=no
-AC_MSG_CHECKING(whether va_list is an array)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#ifdef HAVE_STDARG_PROTOTYPES
-#include <stdarg.h>
-#else
-#include <varargs.h>
-#endif
-]], [[va_list list1, list2; list1 = list2;]])],[],[
- AC_DEFINE(VA_LIST_IS_ARRAY, 1, [Define if a va_list is an array of some kind]) 
- va_list_is_array=yes
-])
-AC_MSG_RESULT($va_list_is_array)
-
-# sigh -- gethostbyname_r is a mess; it can have 3, 5 or 6 arguments :-(
-AH_TEMPLATE(HAVE_GETHOSTBYNAME_R,
-  [Define this if you have some version of gethostbyname_r()])
-
-AC_CHECK_FUNC(gethostbyname_r, [
-  AC_DEFINE(HAVE_GETHOSTBYNAME_R)
-  AC_MSG_CHECKING([gethostbyname_r with 6 args])
-  OLD_CFLAGS=$CFLAGS
-  CFLAGS="$CFLAGS $MY_CPPFLAGS $MY_THREAD_CPPFLAGS $MY_CFLAGS"
-  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#   include <netdb.h>
-  ]], [[
-    char *name;
-    struct hostent *he, *res;
-    char buffer[2048];
-    int buflen = 2048;
-    int h_errnop;
-
-    (void) gethostbyname_r(name, he, buffer, buflen, &res, &h_errnop)
-  ]])],[
-    AC_DEFINE(HAVE_GETHOSTBYNAME_R)
-    AC_DEFINE(HAVE_GETHOSTBYNAME_R_6_ARG, 1,
-    [Define this if you have the 6-arg version of gethostbyname_r().])
-    AC_MSG_RESULT(yes)
-  ],[
-    AC_MSG_RESULT(no)
-    AC_MSG_CHECKING([gethostbyname_r with 5 args])
-    AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#       include <netdb.h>
-      ]], [[
-        char *name;
-        struct hostent *he;
-        char buffer[2048];
-        int buflen = 2048;
-        int h_errnop;
-
-        (void) gethostbyname_r(name, he, buffer, buflen, &h_errnop)
-      ]])],
-      [
-        AC_DEFINE(HAVE_GETHOSTBYNAME_R)
-        AC_DEFINE(HAVE_GETHOSTBYNAME_R_5_ARG, 1,
-          [Define this if you have the 5-arg version of gethostbyname_r().])
-        AC_MSG_RESULT(yes)
-      ], [
-        AC_MSG_RESULT(no)
-        AC_MSG_CHECKING([gethostbyname_r with 3 args])
-        AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#           include <netdb.h>
-          ]], [[
-            char *name;
-            struct hostent *he;
-            struct hostent_data data;
-
-            (void) gethostbyname_r(name, he, &data);
-          ]])],
-          [
-            AC_DEFINE(HAVE_GETHOSTBYNAME_R)
-            AC_DEFINE(HAVE_GETHOSTBYNAME_R_3_ARG, 1,
-              [Define this if you have the 3-arg version of gethostbyname_r().])
-            AC_MSG_RESULT(yes)
-          ], [
-           AC_MSG_RESULT(no)
-        ])
-    ])
-  ])
-  CFLAGS=$OLD_CFLAGS
-], [
-  AC_CHECK_FUNCS(gethostbyname)
-])
-AC_SUBST(HAVE_GETHOSTBYNAME_R_6_ARG)
-AC_SUBST(HAVE_GETHOSTBYNAME_R_5_ARG)
-AC_SUBST(HAVE_GETHOSTBYNAME_R_3_ARG)
-AC_SUBST(HAVE_GETHOSTBYNAME_R)
-AC_SUBST(HAVE_GETHOSTBYNAME)
-
-# checks for system services
-# (none yet)
-
-# Linux requires this for correct f.p. operations
-AC_CHECK_FUNC(__fpu_control,
-  [],
-  [AC_CHECK_LIB(ieee, __fpu_control)
-])
-
-# Check for --with-fpectl
-AC_MSG_CHECKING(for --with-fpectl)
-AC_ARG_WITH(fpectl,
-            AS_HELP_STRING([--with-fpectl], [enable SIGFPE catching]),
-[
-if test "$withval" != no
-then 
-  AC_DEFINE(WANT_SIGFPE_HANDLER, 1,
-  [Define if you want SIGFPE handled (see Include/pyfpe.h).]) 
-  AC_MSG_RESULT(yes)
-else AC_MSG_RESULT(no)
-fi],
-[AC_MSG_RESULT(no)])
-
-# check for --with-libm=...
-AC_SUBST(LIBM)
-case $ac_sys_system in
-Darwin) ;;
-*) LIBM=-lm
-esac
-AC_MSG_CHECKING(for --with-libm=STRING)
-AC_ARG_WITH(libm,
-            AS_HELP_STRING([--with-libm=STRING], [math library]),
-[
-if test "$withval" = no
-then LIBM=
-     AC_MSG_RESULT(force LIBM empty)
-elif test "$withval" != yes
-then LIBM=$withval
-     AC_MSG_RESULT(set LIBM="$withval")
-else AC_MSG_ERROR([proper usage is --with-libm=STRING])
-fi],
-[AC_MSG_RESULT(default LIBM="$LIBM")])
-
-# check for --with-libc=...
-AC_SUBST(LIBC)
-AC_MSG_CHECKING(for --with-libc=STRING)
-AC_ARG_WITH(libc,
-            AS_HELP_STRING([--with-libc=STRING], [C library]),
-[
-if test "$withval" = no
-then LIBC=
-     AC_MSG_RESULT(force LIBC empty)
-elif test "$withval" != yes
-then LIBC=$withval
-     AC_MSG_RESULT(set LIBC="$withval")
-else AC_MSG_ERROR([proper usage is --with-libc=STRING])
-fi],
-[AC_MSG_RESULT(default LIBC="$LIBC")])
-
-# **************************************************
-# * Check for various properties of floating point *
-# **************************************************
-
-AC_MSG_CHECKING(whether C doubles are little-endian IEEE 754 binary64)
-AC_CACHE_VAL(ac_cv_little_endian_double, [
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <string.h>
-int main() {
-    double x = 9006104071832581.0;
-    if (memcmp(&x, "\x05\x04\x03\x02\x01\xff\x3f\x43", 8) == 0)
-        return 0;
-    else
-        return 1;
-}
-]])],
-[ac_cv_little_endian_double=yes],
-[ac_cv_little_endian_double=no],
-[ac_cv_little_endian_double=no])])
-AC_MSG_RESULT($ac_cv_little_endian_double)
-if test "$ac_cv_little_endian_double" = yes
-then
-  AC_DEFINE(DOUBLE_IS_LITTLE_ENDIAN_IEEE754, 1,
-  [Define if C doubles are 64-bit IEEE 754 binary format, stored
-   with the least significant byte first])
-fi
-
-AC_MSG_CHECKING(whether C doubles are big-endian IEEE 754 binary64)
-AC_CACHE_VAL(ac_cv_big_endian_double, [
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <string.h>
-int main() {
-    double x = 9006104071832581.0;
-    if (memcmp(&x, "\x43\x3f\xff\x01\x02\x03\x04\x05", 8) == 0)
-        return 0;
-    else
-        return 1;
-}
-]])],
-[ac_cv_big_endian_double=yes],
-[ac_cv_big_endian_double=no],
-[ac_cv_big_endian_double=no])])
-AC_MSG_RESULT($ac_cv_big_endian_double)
-if test "$ac_cv_big_endian_double" = yes
-then
-  AC_DEFINE(DOUBLE_IS_BIG_ENDIAN_IEEE754, 1,
-  [Define if C doubles are 64-bit IEEE 754 binary format, stored
-   with the most significant byte first])
-fi
-
-# Some ARM platforms use a mixed-endian representation for doubles.
-# While Python doesn't currently have full support for these platforms
-# (see e.g., issue 1762561), we can at least make sure that float <-> string
-# conversions work.
-AC_MSG_CHECKING(whether C doubles are ARM mixed-endian IEEE 754 binary64)
-AC_CACHE_VAL(ac_cv_mixed_endian_double, [
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <string.h>
-int main() {
-    double x = 9006104071832581.0;
-    if (memcmp(&x, "\x01\xff\x3f\x43\x05\x04\x03\x02", 8) == 0)
-        return 0;
-    else
-        return 1;
-}
-]])],
-[ac_cv_mixed_endian_double=yes],
-[ac_cv_mixed_endian_double=no],
-[ac_cv_mixed_endian_double=no])])
-AC_MSG_RESULT($ac_cv_mixed_endian_double)
-if test "$ac_cv_mixed_endian_double" = yes
-then
-  AC_DEFINE(DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754, 1,
-  [Define if C doubles are 64-bit IEEE 754 binary format, stored
-   in ARM mixed-endian order (byte order 45670123)])
-fi
-
-# The short float repr introduced in Python 3.1 requires the
-# correctly-rounded string <-> double conversion functions from
-# Python/dtoa.c, which in turn require that the FPU uses 53-bit
-# rounding; this is a problem on x86, where the x87 FPU has a default
-# rounding precision of 64 bits.  For gcc/x86, we can fix this by
-# using inline assembler to get and set the x87 FPU control word.
-
-# This inline assembler syntax may also work for suncc and icc,
-# so we try it on all platforms.
-
-AC_MSG_CHECKING(whether we can use gcc inline assembler to get and set x87 control word)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[
-  unsigned short cw;
-  __asm__ __volatile__ ("fnstcw %0" : "=m" (cw));
-  __asm__ __volatile__ ("fldcw %0" : : "m" (cw));
-]])],[have_gcc_asm_for_x87=yes],[have_gcc_asm_for_x87=no])
-AC_MSG_RESULT($have_gcc_asm_for_x87)
-if test "$have_gcc_asm_for_x87" = yes
-then
-    AC_DEFINE(HAVE_GCC_ASM_FOR_X87, 1,
-    [Define if we can use gcc inline assembler to get and set x87 control word])
-fi
-
-# Detect whether system arithmetic is subject to x87-style double
-# rounding issues.  The result of this test has little meaning on non
-# IEEE 754 platforms.  On IEEE 754, test should return 1 if rounding
-# mode is round-to-nearest and double rounding issues are present, and
-# 0 otherwise.  See http://bugs.python.org/issue2937 for more info.
-AC_MSG_CHECKING(for x87-style double rounding)
-# $BASECFLAGS may affect the result
-ac_save_cc="$CC"
-CC="$CC $BASECFLAGS"
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <stdlib.h>
-#include <math.h>
-int main() {
-    volatile double x, y, z;
-    /* 1./(1-2**-53) -> 1+2**-52 (correct), 1.0 (double rounding) */
-    x = 0.99999999999999989; /* 1-2**-53 */
-    y = 1./x;
-    if (y != 1.)
-        exit(0);
-    /* 1e16+2.99999 -> 1e16+2. (correct), 1e16+4. (double rounding) */
-    x = 1e16;
-    y = 2.99999;
-    z = x + y;
-    if (z != 1e16+4.)
-        exit(0);
-    /* both tests show evidence of double rounding */
-    exit(1);
-}
-]])],
-[ac_cv_x87_double_rounding=no],
-[ac_cv_x87_double_rounding=yes],
-[ac_cv_x87_double_rounding=no])
-CC="$ac_save_cc"
-AC_MSG_RESULT($ac_cv_x87_double_rounding)
-if test "$ac_cv_x87_double_rounding" = yes
-then
-  AC_DEFINE(X87_DOUBLE_ROUNDING, 1,
-  [Define if arithmetic is subject to x87-style double rounding issue])
-fi
-
-# ************************************
-# * Check for mathematical functions *
-# ************************************
-
-LIBS_SAVE=$LIBS
-LIBS="$LIBS $LIBM"
-
-AC_CHECK_FUNCS([acosh asinh atanh copysign erf erfc expm1 finite gamma])
-AC_CHECK_FUNCS([hypot lgamma log1p round tgamma])
-AC_CHECK_DECLS([isinf, isnan, isfinite], [], [], [[#include <math.h>]])
-
-# On FreeBSD 6.2, it appears that tanh(-0.) returns 0. instead of
-# -0. on some architectures.
-AC_MSG_CHECKING(whether tanh preserves the sign of zero)
-AC_CACHE_VAL(ac_cv_tanh_preserves_zero_sign, [
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <math.h>
-#include <stdlib.h>
-int main() {
-    /* return 0 if either negative zeros don't exist
-       on this platform or if negative zeros exist
-       and tanh(-0.) == -0. */
-  if (atan2(0., -1.) == atan2(-0., -1.) ||
-      atan2(tanh(-0.), -1.) == atan2(-0., -1.)) exit(0);
-  else exit(1);
-}
-]])],
-[ac_cv_tanh_preserves_zero_sign=yes],
-[ac_cv_tanh_preserves_zero_sign=no],
-[ac_cv_tanh_preserves_zero_sign=no])])
-AC_MSG_RESULT($ac_cv_tanh_preserves_zero_sign)
-if test "$ac_cv_tanh_preserves_zero_sign" = yes
-then
-  AC_DEFINE(TANH_PRESERVES_ZERO_SIGN, 1,
-  [Define if tanh(-0.) is -0., or if platform doesn't have signed zeros])
-fi
-
-if test "$ac_cv_func_log1p" = yes
-then
-    # On some versions of AIX, log1p(-0.) returns 0. instead of
-    # -0.  See issue #9920.
-    AC_MSG_CHECKING(whether log1p drops the sign of negative zero)
-    AC_CACHE_VAL(ac_cv_log1p_drops_zero_sign, [
-    AC_RUN_IFELSE([AC_LANG_SOURCE([[
-    #include <math.h>
-    #include <stdlib.h>
-    int main() {
-        /* Fail if the signs of log1p(-0.) and -0. can be
-	   distinguished. */
-        if (atan2(log1p(-0.), -1.) == atan2(-0., -1.))
-            return 0;
-        else
-            return 1;
-    }
-    ]])],
-    [ac_cv_log1p_drops_zero_sign=no],
-    [ac_cv_log1p_drops_zero_sign=yes],
-    [ac_cv_log1p_drops_zero_sign=no])])
-    AC_MSG_RESULT($ac_cv_log1p_drops_zero_sign)
-fi
-if test "$ac_cv_log1p_drops_zero_sign" = yes
-then
-  AC_DEFINE(LOG1P_DROPS_ZERO_SIGN, 1,
-  [Define if log1p(-0.) is 0. rather than -0.])
-fi
-
-LIBS=$LIBS_SAVE
-
-# For multiprocessing module, check that sem_open
-# actually works.  For FreeBSD versions <= 7.2,
-# the kernel module that provides POSIX semaphores
-# isn't loaded by default, so an attempt to call
-# sem_open results in a 'Signal 12' error.
-AC_MSG_CHECKING(whether POSIX semaphores are enabled)
-AC_CACHE_VAL(ac_cv_posix_semaphores_enabled,
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <unistd.h>
-#include <fcntl.h>
-#include <stdio.h>
-#include <semaphore.h>
-#include <sys/stat.h>
-
-int main(void) {
-  sem_t *a = sem_open("/autoconf", O_CREAT, S_IRUSR|S_IWUSR, 0);
-  if (a == SEM_FAILED) {
-    perror("sem_open");
-    return 1;
-  }
-  sem_close(a);
-  sem_unlink("/autoconf");
-  return 0;
-}
-]])],
-[ac_cv_posix_semaphores_enabled=yes],
-[ac_cv_posix_semaphores_enabled=no],
-[ac_cv_posix_semaphores_enabled=yes])
-)
-AC_MSG_RESULT($ac_cv_posix_semaphores_enabled)
-if test $ac_cv_posix_semaphores_enabled = no
-then
-  AC_DEFINE(POSIX_SEMAPHORES_NOT_ENABLED, 1,
-            [Define if POSIX semaphores aren't enabled on your system])
-fi
-
-# Multiprocessing check for broken sem_getvalue
-AC_MSG_CHECKING(for broken sem_getvalue)
-AC_CACHE_VAL(ac_cv_broken_sem_getvalue,
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <unistd.h>
-#include <fcntl.h>
-#include <stdio.h>
-#include <semaphore.h>
-#include <sys/stat.h>
-
-int main(void){
-  sem_t *a = sem_open("/autocftw", O_CREAT, S_IRUSR|S_IWUSR, 0);
-  int count;
-  int res;
-  if(a==SEM_FAILED){
-    perror("sem_open");
-    return 1;
-
-  }
-  res = sem_getvalue(a, &count);
-  sem_close(a);
-  sem_unlink("/autocftw");
-  return res==-1 ? 1 : 0;
-}
-]])],
-[ac_cv_broken_sem_getvalue=no],
-[ac_cv_broken_sem_getvalue=yes],
-[ac_cv_broken_sem_getvalue=yes])
-)
-AC_MSG_RESULT($ac_cv_broken_sem_getvalue)
-if test $ac_cv_broken_sem_getvalue = yes
-then
-  AC_DEFINE(HAVE_BROKEN_SEM_GETVALUE, 1,
-  [define to 1 if your sem_getvalue is broken.])
-fi
-
-# determine what size digit to use for Python's longs
-AC_MSG_CHECKING([digit size for Python's longs])
-AC_ARG_ENABLE(big-digits,
-AS_HELP_STRING([--enable-big-digits@<:@=BITS@:>@],[use big digits for Python longs [[BITS=30]]]),
-[case $enable_big_digits in
-yes)
-  enable_big_digits=30 ;;
-no)
-  enable_big_digits=15 ;;
-[15|30])
-  ;;
-*)
-  AC_MSG_ERROR([bad value $enable_big_digits for --enable-big-digits; value should be 15 or 30]) ;;
-esac
-AC_MSG_RESULT($enable_big_digits)
-AC_DEFINE_UNQUOTED(PYLONG_BITS_IN_DIGIT, $enable_big_digits, [Define as the preferred size in bits of long digits])
-],
-[AC_MSG_RESULT(no value specified)])
-
-# check for wchar.h
-AC_CHECK_HEADER(wchar.h, [
-  AC_DEFINE(HAVE_WCHAR_H, 1, 
-  [Define if the compiler provides a wchar.h header file.]) 
-  wchar_h="yes"
-],
-wchar_h="no"
-)
-
-# determine wchar_t size
-if test "$wchar_h" = yes
-then
-  AC_CHECK_SIZEOF(wchar_t, 4, [#include <wchar.h>])
-fi
-
-AC_MSG_CHECKING(for UCS-4 tcl)
-have_ucs4_tcl=no
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
-#include <tcl.h>
-#if TCL_UTF_MAX != 6
-# error "NOT UCS4_TCL"
-#endif]], [[]])],[
-  AC_DEFINE(HAVE_UCS4_TCL, 1, [Define this if you have tcl and TCL_UTF_MAX==6])
-  have_ucs4_tcl=yes
-],[])
-AC_MSG_RESULT($have_ucs4_tcl)
-
-# check whether wchar_t is signed or not
-if test "$wchar_h" = yes
-then
-  # check whether wchar_t is signed or not
-  AC_MSG_CHECKING(whether wchar_t is signed)
-  AC_CACHE_VAL(ac_cv_wchar_t_signed, [
-  AC_RUN_IFELSE([AC_LANG_SOURCE([[
-  #include <wchar.h>
-  int main()
-  {
-	/* Success: exit code 0 */
-        exit((((wchar_t) -1) < ((wchar_t) 0)) ? 0 : 1);
-  }
-  ]])],
-  [ac_cv_wchar_t_signed=yes],
-  [ac_cv_wchar_t_signed=no],
-  [ac_cv_wchar_t_signed=yes])])
-  AC_MSG_RESULT($ac_cv_wchar_t_signed)
-fi
-
-AC_MSG_CHECKING(what type to use for str)
-AC_ARG_WITH(wide-unicode, 
-            AS_HELP_STRING([--with-wide-unicode], [Use 4-byte Unicode characters (default is 2 bytes)]),
-[
-if test "$withval" != no
-then unicode_size="4"
-else unicode_size="2"
-fi
-],
-[
-case "$have_ucs4_tcl" in
-  yes) unicode_size="4";;
-  *)   unicode_size="2" ;;
-esac
-])
-
-AH_TEMPLATE(Py_UNICODE_SIZE,
-  [Define as the size of the unicode type.])
-case "$unicode_size" in
-  4)
-     AC_DEFINE(Py_UNICODE_SIZE, 4)
-     ABIFLAGS="${ABIFLAGS}u"
-     ;;
-  *) AC_DEFINE(Py_UNICODE_SIZE, 2) ;;
-esac
-
-AH_TEMPLATE(PY_UNICODE_TYPE,
-  [Define as the integral type used for Unicode representation.])
-
-# wchar_t is only usable if it maps to an unsigned type
-if test "$unicode_size" = "$ac_cv_sizeof_wchar_t" \
-          -a "$ac_cv_wchar_t_signed" = "no"
-then
-  PY_UNICODE_TYPE="wchar_t"
-  AC_DEFINE(HAVE_USABLE_WCHAR_T, 1,
-  [Define if you have a useable wchar_t type defined in wchar.h; useable
-   means wchar_t must be an unsigned type with at least 16 bits. (see
-   Include/unicodeobject.h).])
-  AC_DEFINE(PY_UNICODE_TYPE,wchar_t)
-elif test "$ac_cv_sizeof_short" = "$unicode_size"
-then
-     PY_UNICODE_TYPE="unsigned short"
-     AC_DEFINE(PY_UNICODE_TYPE,unsigned short)
-elif test "$ac_cv_sizeof_long" = "$unicode_size"
-then
-     PY_UNICODE_TYPE="unsigned long"
-     AC_DEFINE(PY_UNICODE_TYPE,unsigned long)
-else
-     PY_UNICODE_TYPE="no type found"
-fi
-AC_MSG_RESULT($PY_UNICODE_TYPE)
-
-# check for endianness
-AC_C_BIGENDIAN
-
-# ABI version string for Python extension modules.  This appears between the
-# periods in shared library file names, e.g. foo.<SOABI>.so.  It is calculated
-# from the following attributes which affect the ABI of this Python build (in
-# this order):
-#
-# * The Python implementation (always 'cpython-' for us)
-# * The major and minor version numbers
-# * --with-pydebug (adds a 'd')
-# * --with-pymalloc (adds a 'm')
-# * --with-wide-unicode (adds a 'u')
-#
-# Thus for example, Python 3.2 built with wide unicode, pydebug, and pymalloc,
-# would get a shared library ABI version tag of 'cpython-32dmu' and shared
-# libraries would be named 'foo.cpython-32dmu.so'.
-AC_SUBST(SOABI)
-AC_MSG_CHECKING(ABIFLAGS)
-AC_MSG_RESULT($ABIFLAGS)
-AC_MSG_CHECKING(SOABI)
-SOABI='cpython-'`echo $VERSION | tr -d .`${ABIFLAGS}
-AC_MSG_RESULT($SOABI)
-
-AC_MSG_CHECKING(LDVERSION)
-LDVERSION='$(VERSION)$(ABIFLAGS)'
-AC_MSG_RESULT($LDVERSION)
-
-# SO is the extension of shared libraries `(including the dot!)
-# -- usually .so, .sl on HP-UX, .dll on Cygwin
-AC_MSG_CHECKING(SO)
-if test -z "$SO"
-then
-	case $ac_sys_system in
-	hp*|HP*)
-		case `uname -m` in
-			ia64) SO=.so;;
-	  		*)    SO=.sl;;
-		esac
-		;;
-	CYGWIN*)   SO=.dll;;
-	Linux*|GNU*)
-		   SO=.${SOABI}.so;;
-	*)	   SO=.so;;
-	esac
-else
-	# this might also be a termcap variable, see #610332
-	echo
-	echo '====================================================================='
-	echo '+                                                                   +'
-	echo '+ WARNING: You have set SO in your environment.                     +'
-	echo '+ Do you really mean to change the extension for shared libraries?  +'
-	echo '+ Continuing in 10 seconds to let you to ponder.                    +'
-	echo '+                                                                   +'
-	echo '====================================================================='
-	sleep 10
-fi
-AC_MSG_RESULT($SO)
-
-# Check whether right shifting a negative integer extends the sign bit
-# or fills with zeros (like the Cray J90, according to Tim Peters).
-AC_MSG_CHECKING(whether right shift extends the sign bit)
-AC_CACHE_VAL(ac_cv_rshift_extends_sign, [
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-int main()
-{
-	exit(((-1)>>3 == -1) ? 0 : 1);
-}
-]])],
-[ac_cv_rshift_extends_sign=yes],
-[ac_cv_rshift_extends_sign=no],
-[ac_cv_rshift_extends_sign=yes])])
-AC_MSG_RESULT($ac_cv_rshift_extends_sign)
-if test "$ac_cv_rshift_extends_sign" = no
-then
-  AC_DEFINE(SIGNED_RIGHT_SHIFT_ZERO_FILLS, 1,
-  [Define if i>>j for signed int i does not extend the sign bit
-   when i < 0])
-fi
-
-# check for getc_unlocked and related locking functions
-AC_MSG_CHECKING(for getc_unlocked() and friends)
-AC_CACHE_VAL(ac_cv_have_getc_unlocked, [
-AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <stdio.h>]], [[
-	FILE *f = fopen("/dev/null", "r");
-	flockfile(f);
-	getc_unlocked(f);
-	funlockfile(f);
-]])],[ac_cv_have_getc_unlocked=yes],[ac_cv_have_getc_unlocked=no])])
-AC_MSG_RESULT($ac_cv_have_getc_unlocked)
-if test "$ac_cv_have_getc_unlocked" = yes
-then
-  AC_DEFINE(HAVE_GETC_UNLOCKED, 1,
-  [Define this if you have flockfile(), getc_unlocked(), and funlockfile()])
-fi
-
-# check where readline lives
-# save the value of LIBS so we don't actually link Python with readline
-LIBS_no_readline=$LIBS
-
-# On some systems we need to link readline to a termcap compatible
-# library.  NOTE: Keep the precedence of listed libraries synchronised
-# with setup.py.
-py_cv_lib_readline=no
-AC_MSG_CHECKING([how to link readline libs])
-for py_libtermcap in "" ncursesw ncurses curses termcap; do
-  if test -z "$py_libtermcap"; then
-    READLINE_LIBS="-lreadline"
-  else
-    READLINE_LIBS="-lreadline -l$py_libtermcap"
-  fi
-  LIBS="$READLINE_LIBS $LIBS_no_readline"
-  AC_LINK_IFELSE(
-    [AC_LANG_CALL([],[readline])],
-    [py_cv_lib_readline=yes])
-  if test $py_cv_lib_readline = yes; then
-    break
-  fi
-done
-# Uncomment this line if you want to use READINE_LIBS in Makefile or scripts
-#AC_SUBST([READLINE_LIBS])
-if test $py_cv_lib_readline = no; then
-  AC_MSG_RESULT([none])
-else
-  AC_MSG_RESULT([$READLINE_LIBS])
-  AC_DEFINE(HAVE_LIBREADLINE, 1,
-    [Define if you have the readline library (-lreadline).])
-fi
-
-# check for readline 2.1
-AC_CHECK_LIB(readline, rl_callback_handler_install,
-	AC_DEFINE(HAVE_RL_CALLBACK, 1,
-        [Define if you have readline 2.1]), ,$READLINE_LIBS)
-
-# check for readline 2.2
-AC_PREPROC_IFELSE([AC_LANG_SOURCE([[#include <readline/readline.h>]])],
-  [have_readline=yes],
-  [have_readline=no]
-)
-if test $have_readline = yes
-then
-  AC_EGREP_HEADER([extern int rl_completion_append_character;],
-  [readline/readline.h],
-  AC_DEFINE(HAVE_RL_COMPLETION_APPEND_CHARACTER, 1,
-  [Define if you have readline 2.2]), )
-  AC_EGREP_HEADER([extern int rl_completion_suppress_append;],
-  [readline/readline.h],
-  AC_DEFINE(HAVE_RL_COMPLETION_SUPPRESS_APPEND, 1,
-  [Define if you have rl_completion_suppress_append]), )
-fi
-
-# check for readline 4.0
-AC_CHECK_LIB(readline, rl_pre_input_hook,
-	AC_DEFINE(HAVE_RL_PRE_INPUT_HOOK, 1,
-        [Define if you have readline 4.0]), ,$READLINE_LIBS)
-
-# also in 4.0
-AC_CHECK_LIB(readline, rl_completion_display_matches_hook,
-	AC_DEFINE(HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK, 1,
-        [Define if you have readline 4.0]), ,$READLINE_LIBS)
-
-# check for readline 4.2
-AC_CHECK_LIB(readline, rl_completion_matches,
-	AC_DEFINE(HAVE_RL_COMPLETION_MATCHES, 1,
-        [Define if you have readline 4.2]), ,$READLINE_LIBS)
-
-# also in readline 4.2
-AC_PREPROC_IFELSE([AC_LANG_SOURCE([[#include <readline/readline.h>]])],
-  [have_readline=yes],
-  [have_readline=no]
-)
-if test $have_readline = yes
-then
-  AC_EGREP_HEADER([extern int rl_catch_signals;],
-  [readline/readline.h],
-  AC_DEFINE(HAVE_RL_CATCH_SIGNAL, 1,
-  [Define if you can turn off readline's signal handling.]), )
-fi
-
-# End of readline checks: restore LIBS
-LIBS=$LIBS_no_readline
-
-AC_MSG_CHECKING(for broken nice())
-AC_CACHE_VAL(ac_cv_broken_nice, [
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-int main()
-{
-	int val1 = nice(1);
-	if (val1 != -1 && val1 == nice(2))
-		exit(0);
-	exit(1);
-}
-]])],
-[ac_cv_broken_nice=yes],
-[ac_cv_broken_nice=no],
-[ac_cv_broken_nice=no])])
-AC_MSG_RESULT($ac_cv_broken_nice)
-if test "$ac_cv_broken_nice" = yes
-then
-  AC_DEFINE(HAVE_BROKEN_NICE, 1,
-  [Define if nice() returns success/failure instead of the new priority.])
-fi
-
-AC_MSG_CHECKING(for broken poll())
-AC_CACHE_VAL(ac_cv_broken_poll,
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <poll.h>
-
-int main()
-{
-    struct pollfd poll_struct = { 42, POLLIN|POLLPRI|POLLOUT, 0 };
-    int poll_test;
-
-    close (42);
-
-    poll_test = poll(&poll_struct, 1, 0);
-    if (poll_test < 0)
-        return 0;
-    else if (poll_test == 0 && poll_struct.revents != POLLNVAL)
-        return 0;
-    else
-        return 1;
-}
-]])],
-[ac_cv_broken_poll=yes],
-[ac_cv_broken_poll=no],
-[ac_cv_broken_poll=no]))
-AC_MSG_RESULT($ac_cv_broken_poll)
-if test "$ac_cv_broken_poll" = yes
-then
-  AC_DEFINE(HAVE_BROKEN_POLL, 1,
-      [Define if poll() sets errno on invalid file descriptors.])
-fi
-
-# Before we can test tzset, we need to check if struct tm has a tm_zone 
-# (which is not required by ISO C or UNIX spec) and/or if we support
-# tzname[]
-AC_STRUCT_TIMEZONE
-
-# check tzset(3) exists and works like we expect it to
-AC_MSG_CHECKING(for working tzset())
-AC_CACHE_VAL(ac_cv_working_tzset, [
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <stdlib.h>
-#include <time.h>
-#include <string.h>
-
-#if HAVE_TZNAME
-extern char *tzname[];
-#endif
-
-int main()
-{
-	/* Note that we need to ensure that not only does tzset(3)
-	   do 'something' with localtime, but it works as documented
-	   in the library reference and as expected by the test suite.
-	   This includes making sure that tzname is set properly if
-	   tm->tm_zone does not exist since it is the alternative way
-	   of getting timezone info.
-
-	   Red Hat 6.2 doesn't understand the southern hemisphere 
-	   after New Year's Day.
-	*/
-
-	time_t groundhogday = 1044144000; /* GMT-based */
-	time_t midyear = groundhogday + (365 * 24 * 3600 / 2);
-
-	putenv("TZ=UTC+0");
-	tzset();
-	if (localtime(&groundhogday)->tm_hour != 0)
-	    exit(1);
-#if HAVE_TZNAME
-	/* For UTC, tzname[1] is sometimes "", sometimes "   " */
-	if (strcmp(tzname[0], "UTC") || 
-		(tzname[1][0] != 0 && tzname[1][0] != ' '))
-	    exit(1);
-#endif
-
-	putenv("TZ=EST+5EDT,M4.1.0,M10.5.0");
-	tzset();
-	if (localtime(&groundhogday)->tm_hour != 19)
-	    exit(1);
-#if HAVE_TZNAME
-	if (strcmp(tzname[0], "EST") || strcmp(tzname[1], "EDT"))
-	    exit(1);
-#endif
-
-	putenv("TZ=AEST-10AEDT-11,M10.5.0,M3.5.0");
-	tzset();
-	if (localtime(&groundhogday)->tm_hour != 11)
-	    exit(1);
-#if HAVE_TZNAME
-	if (strcmp(tzname[0], "AEST") || strcmp(tzname[1], "AEDT"))
-	    exit(1);
-#endif
-
-#if HAVE_STRUCT_TM_TM_ZONE
-	if (strcmp(localtime(&groundhogday)->tm_zone, "AEDT"))
-	    exit(1);
-	if (strcmp(localtime(&midyear)->tm_zone, "AEST"))
-	    exit(1);
-#endif
-
-	exit(0);
-}
-]])],
-[ac_cv_working_tzset=yes],
-[ac_cv_working_tzset=no],
-[ac_cv_working_tzset=no])])
-AC_MSG_RESULT($ac_cv_working_tzset)
-if test "$ac_cv_working_tzset" = yes
-then
-  AC_DEFINE(HAVE_WORKING_TZSET, 1,
-  [Define if tzset() actually switches the local timezone in a meaningful way.])
-fi
-
-# Look for subsecond timestamps in struct stat
-AC_MSG_CHECKING(for tv_nsec in struct stat)
-AC_CACHE_VAL(ac_cv_stat_tv_nsec,
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/stat.h>]], [[
-struct stat st;
-st.st_mtim.tv_nsec = 1;
-]])],
-[ac_cv_stat_tv_nsec=yes],
-[ac_cv_stat_tv_nsec=no]))
-AC_MSG_RESULT($ac_cv_stat_tv_nsec)
-if test "$ac_cv_stat_tv_nsec" = yes
-then
-  AC_DEFINE(HAVE_STAT_TV_NSEC, 1,
-  [Define if you have struct stat.st_mtim.tv_nsec])
-fi
-
-# Look for BSD style subsecond timestamps in struct stat
-AC_MSG_CHECKING(for tv_nsec2 in struct stat)
-AC_CACHE_VAL(ac_cv_stat_tv_nsec2,
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <sys/stat.h>]], [[
-struct stat st;
-st.st_mtimespec.tv_nsec = 1;
-]])],
-[ac_cv_stat_tv_nsec2=yes],
-[ac_cv_stat_tv_nsec2=no]))
-AC_MSG_RESULT($ac_cv_stat_tv_nsec2)
-if test "$ac_cv_stat_tv_nsec2" = yes
-then
-  AC_DEFINE(HAVE_STAT_TV_NSEC2, 1,
-  [Define if you have struct stat.st_mtimensec])
-fi
-
-# On HP/UX 11.0, mvwdelch is a block with a return statement
-AC_MSG_CHECKING(whether mvwdelch is an expression)
-AC_CACHE_VAL(ac_cv_mvwdelch_is_expression,
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[
-  int rtn;
-  rtn = mvwdelch(0,0,0);
-]])],
-[ac_cv_mvwdelch_is_expression=yes],
-[ac_cv_mvwdelch_is_expression=no]))
-AC_MSG_RESULT($ac_cv_mvwdelch_is_expression)
-
-if test "$ac_cv_mvwdelch_is_expression" = yes
-then
-  AC_DEFINE(MVWDELCH_IS_EXPRESSION, 1,
-  [Define if mvwdelch in curses.h is an expression.])
-fi
-
-AC_MSG_CHECKING(whether WINDOW has _flags)
-AC_CACHE_VAL(ac_cv_window_has_flags,
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[
-  WINDOW *w;
-  w->_flags = 0;
-]])],
-[ac_cv_window_has_flags=yes],
-[ac_cv_window_has_flags=no]))
-AC_MSG_RESULT($ac_cv_window_has_flags)
-
-
-if test "$ac_cv_window_has_flags" = yes
-then
-  AC_DEFINE(WINDOW_HAS_FLAGS, 1, 
-  [Define if WINDOW in curses.h offers a field _flags.])
-fi
-
-AC_MSG_CHECKING(for is_term_resized)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[void *x=is_term_resized]])],
-  [AC_DEFINE(HAVE_CURSES_IS_TERM_RESIZED, 1, Define if you have the 'is_term_resized' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)]
-)
-
-AC_MSG_CHECKING(for resize_term)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[void *x=resize_term]])],
-  [AC_DEFINE(HAVE_CURSES_RESIZE_TERM, 1, Define if you have the 'resize_term' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)]
-)
-
-AC_MSG_CHECKING(for resizeterm)
-AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <curses.h>]], [[void *x=resizeterm]])],
-  [AC_DEFINE(HAVE_CURSES_RESIZETERM, 1, Define if you have the 'resizeterm' function.)
-   AC_MSG_RESULT(yes)],
-  [AC_MSG_RESULT(no)]
-)
-
-AC_MSG_CHECKING(for /dev/ptmx)
-
-if test -r /dev/ptmx
-then
-  AC_MSG_RESULT(yes)
-  AC_DEFINE(HAVE_DEV_PTMX, 1,
-  [Define if we have /dev/ptmx.])
-else
-  AC_MSG_RESULT(no)
-fi
-
-AC_MSG_CHECKING(for /dev/ptc)
-
-if test -r /dev/ptc
-then
-  AC_MSG_RESULT(yes)
-  AC_DEFINE(HAVE_DEV_PTC, 1,
-  [Define if we have /dev/ptc.])
-else
-  AC_MSG_RESULT(no)
-fi
-
-if test "$have_long_long" = yes
-then
-  AC_MSG_CHECKING(for %lld and %llu printf() format support)
-  AC_CACHE_VAL(ac_cv_have_long_long_format,
-  AC_RUN_IFELSE([AC_LANG_SOURCE([[[
-  #include <stdio.h>
-  #include <stddef.h>
-  #include <string.h>
-
-  #ifdef HAVE_SYS_TYPES_H
-  #include <sys/types.h>
-  #endif
-
-  int main()
-  {
-      char buffer[256];
-
-      if (sprintf(buffer, "%lld", (long long)123) < 0)
-          return 1;
-      if (strcmp(buffer, "123"))
-          return 1;
-
-      if (sprintf(buffer, "%lld", (long long)-123) < 0)
-          return 1;
-      if (strcmp(buffer, "-123"))
-          return 1;
-
-      if (sprintf(buffer, "%llu", (unsigned long long)123) < 0)
-          return 1;
-      if (strcmp(buffer, "123"))
-          return 1;
-
-      return 0;
-  }
-  ]]])],
-  [ac_cv_have_long_long_format=yes],
-  [ac_cv_have_long_long_format=no],
-  [ac_cv_have_long_long_format=no])
-  )
-  AC_MSG_RESULT($ac_cv_have_long_long_format)
-fi
-
-if test "$ac_cv_have_long_long_format" = yes
-then
-  AC_DEFINE(PY_FORMAT_LONG_LONG, "ll",
-  [Define to printf format modifier for long long type])
-fi
-
-if test $ac_sys_system = Darwin
-then
-	LIBS="$LIBS -framework CoreFoundation"
-fi
-
-AC_CACHE_CHECK([for %zd printf() format support], ac_cv_have_size_t_format, [dnl
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include <stdio.h>
-#include <stddef.h>
-#include <string.h>
-
-#ifdef HAVE_SYS_TYPES_H
-#include <sys/types.h>
-#endif
-
-#ifdef HAVE_SSIZE_T
-typedef ssize_t Py_ssize_t;
-#elif SIZEOF_VOID_P == SIZEOF_LONG
-typedef long Py_ssize_t;
-#else
-typedef int Py_ssize_t;
-#endif
-
-int main()
-{
-    char buffer[256];
-
-    if(sprintf(buffer, "%zd", (size_t)123) < 0)
-       	return 1;
-
-    if (strcmp(buffer, "123"))
-	return 1;
-
-    if (sprintf(buffer, "%zd", (Py_ssize_t)-123) < 0)
-       	return 1;
-
-    if (strcmp(buffer, "-123"))
-	return 1;
-
-    return 0;
-}
-]])],
-  [ac_cv_have_size_t_format=yes],
-  [ac_cv_have_size_t_format=no],
-  [ac_cv_have_size_t_format="cross -- assuming yes"
-])])
-if test "$ac_cv_have_size_t_format" != no ; then
-  AC_DEFINE(PY_FORMAT_SIZE_T, "z",
-  [Define to printf format modifier for Py_ssize_t])
-fi
-
-AC_CHECK_TYPE(socklen_t,,
-  AC_DEFINE(socklen_t,int,
-            [Define to `int' if <sys/socket.h> does not define.]),[
-#ifdef HAVE_SYS_TYPES_H
-#include <sys/types.h>
-#endif
-#ifdef HAVE_SYS_SOCKET_H
-#include <sys/socket.h>
-#endif
-])
-
-AC_MSG_CHECKING(for broken mbstowcs)
-AC_CACHE_VAL(ac_cv_broken_mbstowcs,
-AC_RUN_IFELSE([AC_LANG_SOURCE([[
-#include<stdlib.h>
-int main() {
-    size_t len = -1;
-    const char *str = "text";
-    len = mbstowcs(NULL, str, 0);
-    return (len != 4);
-}
-]])],
-[ac_cv_broken_mbstowcs=no],
-[ac_cv_broken_mbstowcs=yes],
-[ac_cv_broken_mbstowcs=no]))
-AC_MSG_RESULT($ac_cv_broken_mbstowcs)
-if test "$ac_cv_broken_mbstowcs" = yes
-then
-  AC_DEFINE(HAVE_BROKEN_MBSTOWCS, 1,
-  [Define if mbstowcs(NULL, "text", 0) does not return the number of 
-   wide chars that would be converted.])
-fi
-
-AC_MSG_CHECKING(whether $CC supports computed gotos)
-AC_CACHE_VAL(ac_cv_computed_gotos,
-AC_RUN_IFELSE([AC_LANG_SOURCE([[[
-int main(int argc, char **argv)
-{
-    static void *targets[1] = { &&LABEL1 };
-    goto LABEL2;
-LABEL1:
-    return 0;
-LABEL2:
-    goto *targets[0];
-    return 1;
-}
-]]])],
-[ac_cv_computed_gotos=yes],
-[ac_cv_computed_gotos=no],
-[ac_cv_computed_gotos=no]))
-AC_MSG_RESULT($ac_cv_computed_gotos)
-if test "$ac_cv_computed_gotos" = yes
-then
-  AC_DEFINE(HAVE_COMPUTED_GOTOS, 1,
-  [Define if the C compiler supports computed gotos.])
-fi
-
-# Check for --with-computed-gotos
-AC_MSG_CHECKING(for --with-computed-gotos)
-AC_ARG_WITH(computed-gotos,
-            AS_HELP_STRING([--with(out)-computed-gotos],
-                           [Use computed gotos in evaluation loop (enabled by default on supported compilers)]),
-[
-if test "$withval" = yes
-then 
-  AC_DEFINE(USE_COMPUTED_GOTOS, 1,
-  [Define if you want to use computed gotos in ceval.c.]) 
-  AC_MSG_RESULT(yes)
-fi
-if test "$withval" = no
-then 
-  AC_DEFINE(USE_COMPUTED_GOTOS, 0,
-  [Define if you want to use computed gotos in ceval.c.]) 
-  AC_MSG_RESULT(no)
-fi
-],
-[AC_MSG_RESULT(no value specified)])
-
-case $ac_sys_system in
-AIX*)   
-  AC_DEFINE(HAVE_BROKEN_PIPE_BUF, 1, [Define if the system reports an invalid PIPE_BUF value.]) ;;
-esac
-
-
-case $ac_sys_system in
-  OSF*) AC_MSG_ERROR(OSF* systems are deprecated unless somebody volunteers. Check http://bugs.python.org/issue8606) ;;
-esac
-
-AC_CHECK_FUNC(pipe2, AC_DEFINE(HAVE_PIPE2, 1, [Define if the OS supports pipe2()]), )
-
-AC_SUBST(THREADHEADERS)
-
-for h in `(cd $srcdir;echo Python/thread_*.h)`
-do
-  THREADHEADERS="$THREADHEADERS \$(srcdir)/$h"
-done
-
-AC_SUBST(SRCDIRS)
-SRCDIRS="Parser Grammar Objects Python Modules Mac"
-AC_MSG_CHECKING(for build directories)
-for dir in $SRCDIRS; do
-    if test ! -d $dir; then
-        mkdir $dir
-    fi
-done
-AC_MSG_RESULT(done)
-
-# generate output files
-AC_CONFIG_FILES(Makefile.pre Modules/Setup.config Misc/python.pc)
-AC_CONFIG_FILES([Modules/ld_so_aix], [chmod +x Modules/ld_so_aix])
-AC_OUTPUT
-
-echo "creating Modules/Setup"
-if test ! -f Modules/Setup
-then
-	cp $srcdir/Modules/Setup.dist Modules/Setup
-fi
-
-echo "creating Modules/Setup.local"
-if test ! -f Modules/Setup.local
-then
-	echo "# Edit this file for local setup changes" >Modules/Setup.local
-fi
-
-echo "creating Makefile"
-$SHELL $srcdir/Modules/makesetup -c $srcdir/Modules/config.c.in \
-			-s Modules Modules/Setup.config \
-			Modules/Setup.local Modules/Setup
-mv config.c Modules
diff -r 3d0686d90f55 pyconfig.h.in
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -1,4 +1,4 @@
-/* pyconfig.h.in.  Generated from configure.in by autoheader.  */
+/* pyconfig.h.in.  Generated from configure.ac by autoheader.  */
 
 
 #ifndef Py_PYCONFIG_H
diff -r 3d0686d90f55 setup.py
--- a/setup.py
+++ b/setup.py
@@ -60,6 +60,9 @@
     Returns True if 'path' can be located in an OSX SDK
     """
     return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/')
+    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
+                or path.startswith('/System/')
+                or path.startswith('/Library/') )
 
 def find_file(filename, std_dirs, paths):
     """Searches for the directory where a given file is located,
@@ -197,7 +200,7 @@
 
         # Python header files
         headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h"))
+        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
 
         for ext in self.extensions[:]:
             ext.sources = [ find_module_file(filename, moddirlist)
@@ -467,6 +470,10 @@
         if platform in ['osf1', 'unixware7', 'openunix8']:
             lib_dirs += ['/usr/ccs/lib']
 
+        # HP-UX11iv3 keeps files in lib/hpux folders.
+        if platform == 'hp-ux11':
+            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
+
         if platform == 'darwin':
             # This should work on any unixy platform ;-)
             # If the user has bothered specifying additional -I and -L flags
@@ -974,12 +981,12 @@
         if sys.platform == 'darwin':
             sysroot = macosx_sdk_root()
 
-        for d in inc_dirs + sqlite_inc_paths:
+        for d_ in inc_dirs + sqlite_inc_paths:
+            d = d_
+            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
+                d = os.path.join(sysroot, d[1:])
+
             f = os.path.join(d, "sqlite3.h")
-
-            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                f = os.path.join(sysroot, d[1:], "sqlite3.h")
-
             if os.path.exists(f):
                 if sqlite_setup_debug: print("sqlite: found %s"%f)
                 with open(f) as file:
@@ -1073,10 +1080,14 @@
             for cand in dbm_order:
                 if cand == "ndbm":
                     if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others don't
+                        # Some systems have -lndbm, others have -lgdbm_compat,
+                        # others don't have either
                         if self.compiler.find_library_file(lib_dirs,
                                                                'ndbm'):
                             ndbm_libs = ['ndbm']
+                        elif self.compiler.find_library_file(lib_dirs,
+                                                             'gdbm_compat'):
+                            ndbm_libs = ['gdbm_compat']
                         else:
                             ndbm_libs = []
                         print("building dbm using ndbm")
