An interpreted, interactive, object-oriented programming language
CentOS Sources
2015-11-19 f63228654ecef84a78c552dac832f4cd939cf584
import python-2.7.5-34.el7
34 files added
1 files modified
17985 ■■■■■ changed files
SOURCES/00196-avoid-double-close-of-pipes.patch 288 ●●●●● patch | view | raw | blame | history
SOURCES/00197-add-missing-import-in-bdist_rpm.patch 20 ●●●●● patch | view | raw | blame | history
SOURCES/00198-fix-readline-erroneous-output.patch 166 ●●●●● patch | view | raw | blame | history
SOURCES/00199-CVE-2013-1753.patch 88 ●●●●● patch | view | raw | blame | history
SOURCES/00200-CVE-2014-4616.patch 52 ●●●●● patch | view | raw | blame | history
SOURCES/00201-CVE-2014-4650.patch 35 ●●●●● patch | view | raw | blame | history
SOURCES/00202-CVE-2014-7185.patch 51 ●●●●● patch | view | raw | blame | history
SOURCES/00203-CVE-2013-1752-nntplib.patch 108 ●●●●● patch | view | raw | blame | history
SOURCES/00204-CVE-2013-1752-ftplib.patch 149 ●●●●● patch | view | raw | blame | history
SOURCES/00205-CVE-2013-1752-httplib-headers.patch 51 ●●●●● patch | view | raw | blame | history
SOURCES/00206-CVE-2013-1752-poplib.patch 60 ●●●●● patch | view | raw | blame | history
SOURCES/00207-CVE-2013-1752-smtplib.patch 100 ●●●●● patch | view | raw | blame | history
SOURCES/00208-CVE-2013-1752-imaplib.patch 59 ●●●●● patch | view | raw | blame | history
SOURCES/00209-pep466-backport-hmac.compare_digest.patch 353 ●●●●● patch | view | raw | blame | history
SOURCES/00210-pep466-backport-hashlib.pbkdf2_hmac.patch 489 ●●●●● patch | view | raw | blame | history
SOURCES/00211-pep466-UTF-7-decoder-fix-illegal-unicode.patch 67 ●●●●● patch | view | raw | blame | history
SOURCES/00212-pep466-pyunicode_fromformat-raise-overflow.patch 40 ●●●●● patch | view | raw | blame | history
SOURCES/00213-pep466-pyunicode_fromformat-fix-formats.patch 176 ●●●●● patch | view | raw | blame | history
SOURCES/00214-pep466-backport-py3-ssl-changes.patch 13082 ●●●●● patch | view | raw | blame | history
SOURCES/00215-pep466-reflect-openssl-settings-ssltests.patch 26 ●●●●● patch | view | raw | blame | history
SOURCES/00216-pep466-fix-load-verify-locs-unicode.patch 72 ●●●●● patch | view | raw | blame | history
SOURCES/00217-pep466-backport-hashlib-algorithm-consts.patch 189 ●●●●● patch | view | raw | blame | history
SOURCES/00218-pep466-backport-urandom-pers-fd.patch 165 ●●●●● patch | view | raw | blame | history
SOURCES/00219-pep466-fix-referenced-sslwrap.patch 91 ●●●●● patch | view | raw | blame | history
SOURCES/00220-pep466-allow-passing-ssl-urrlib-httplib.patch 673 ●●●●● patch | view | raw | blame | history
SOURCES/00221-pep466-backport-sslwrap-c-ssl.patch 384 ●●●●● patch | view | raw | blame | history
SOURCES/00222-add-2014-bit-dh-key.patch 49 ●●●●● patch | view | raw | blame | history
SOURCES/00223-pep476-verify-certs-by-default.patch 153 ●●●●● patch | view | raw | blame | history
SOURCES/00224-pep476-add-toggle-for-cert-verify.patch 103 ●●●●● patch | view | raw | blame | history
SOURCES/00225-cprofile-sort-option.patch 25 ●●●●● patch | view | raw | blame | history
SOURCES/00227-accept-none-keyfile-loadcertchain.patch 105 ●●●●● patch | view | raw | blame | history
SOURCES/00228-backport-ssl-version.patch 260 ●●●●● patch | view | raw | blame | history
SOURCES/cert-verification.cfg 5 ●●●●● patch | view | raw | blame | history
SOURCES/python.conf 1 ●●●● patch | view | raw | blame | history
SPECS/python.spec 250 ●●●●● patch | view | raw | blame | history
SOURCES/00196-avoid-double-close-of-pipes.patch
New file
@@ -0,0 +1,288 @@
# HG changeset patch
# User Antoine Pitrou <solipsis@pitrou.net>
# Date 1377898693 -7200
# Node ID 43749cb6bdbd0fdab70f76cd171c3c02a3f600dd
# Parent  ba54011aa295004ad87438211fe3bb1568dd69ab
Issue #18851: Avoid a double close of subprocess pipes when the child process fails starting.
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -698,12 +698,12 @@ class Popen(object):
         (p2cread, p2cwrite,
          c2pread, c2pwrite,
-         errread, errwrite) = self._get_handles(stdin, stdout, stderr)
+         errread, errwrite), to_close = self._get_handles(stdin, stdout, stderr)
         try:
             self._execute_child(args, executable, preexec_fn, close_fds,
                                 cwd, env, universal_newlines,
-                                startupinfo, creationflags, shell,
+                                startupinfo, creationflags, shell, to_close,
                                 p2cread, p2cwrite,
                                 c2pread, c2pwrite,
                                 errread, errwrite)
@@ -711,18 +711,12 @@ class Popen(object):
             # Preserve original exception in case os.close raises.
             exc_type, exc_value, exc_trace = sys.exc_info()
-            to_close = []
-            # Only close the pipes we created.
-            if stdin == PIPE:
-                to_close.extend((p2cread, p2cwrite))
-            if stdout == PIPE:
-                to_close.extend((c2pread, c2pwrite))
-            if stderr == PIPE:
-                to_close.extend((errread, errwrite))
-
             for fd in to_close:
                 try:
-                    os.close(fd)
+                    if mswindows:
+                        fd.Close()
+                    else:
+                        os.close(fd)
                 except EnvironmentError:
                     pass
@@ -816,8 +810,9 @@ class Popen(object):
             """Construct and return tuple with IO objects:
             p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
             """
+            to_close = set()
             if stdin is None and stdout is None and stderr is None:
-                return (None, None, None, None, None, None)
+                return (None, None, None, None, None, None), to_close
             p2cread, p2cwrite = None, None
             c2pread, c2pwrite = None, None
@@ -835,6 +830,10 @@ class Popen(object):
                 # Assuming file-like object
                 p2cread = msvcrt.get_osfhandle(stdin.fileno())
             p2cread = self._make_inheritable(p2cread)
+            # We just duplicated the handle, it has to be closed at the end
+            to_close.add(p2cread)
+            if stdin == PIPE:
+                to_close.add(p2cwrite)
             if stdout is None:
                 c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE)
@@ -848,6 +847,10 @@ class Popen(object):
                 # Assuming file-like object
                 c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
             c2pwrite = self._make_inheritable(c2pwrite)
+            # We just duplicated the handle, it has to be closed at the end
+            to_close.add(c2pwrite)
+            if stdout == PIPE:
+                to_close.add(c2pread)
             if stderr is None:
                 errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE)
@@ -863,10 +866,14 @@ class Popen(object):
                 # Assuming file-like object
                 errwrite = msvcrt.get_osfhandle(stderr.fileno())
             errwrite = self._make_inheritable(errwrite)
+            # We just duplicated the handle, it has to be closed at the end
+            to_close.add(errwrite)
+            if stderr == PIPE:
+                to_close.add(errread)
             return (p2cread, p2cwrite,
                     c2pread, c2pwrite,
-                    errread, errwrite)
+                    errread, errwrite), to_close
         def _make_inheritable(self, handle):
@@ -895,7 +902,7 @@ class Popen(object):
         def _execute_child(self, args, executable, preexec_fn, close_fds,
                            cwd, env, universal_newlines,
-                           startupinfo, creationflags, shell,
+                           startupinfo, creationflags, shell, to_close,
                            p2cread, p2cwrite,
                            c2pread, c2pwrite,
                            errread, errwrite):
@@ -934,6 +941,10 @@ class Popen(object):
                     # kill children.
                     creationflags |= _subprocess.CREATE_NEW_CONSOLE
+            def _close_in_parent(fd):
+                fd.Close()
+                to_close.remove(fd)
+
             # Start the process
             try:
                 hp, ht, pid, tid = _subprocess.CreateProcess(executable, args,
@@ -958,11 +969,11 @@ class Popen(object):
                 # pipe will not close when the child process exits and the
                 # ReadFile will hang.
                 if p2cread is not None:
-                    p2cread.Close()
+                    _close_in_parent(p2cread)
                 if c2pwrite is not None:
-                    c2pwrite.Close()
+                    _close_in_parent(c2pwrite)
                 if errwrite is not None:
-                    errwrite.Close()
+                    _close_in_parent(errwrite)
             # Retain the process handle, but close the thread handle
             self._child_created = True
@@ -1088,6 +1099,7 @@ class Popen(object):
             """Construct and return tuple with IO objects:
             p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
             """
+            to_close = set()
             p2cread, p2cwrite = None, None
             c2pread, c2pwrite = None, None
             errread, errwrite = None, None
@@ -1096,6 +1108,7 @@ class Popen(object):
                 pass
             elif stdin == PIPE:
                 p2cread, p2cwrite = self.pipe_cloexec()
+                to_close.update((p2cread, p2cwrite))
             elif isinstance(stdin, int):
                 p2cread = stdin
             else:
@@ -1106,6 +1119,7 @@ class Popen(object):
                 pass
             elif stdout == PIPE:
                 c2pread, c2pwrite = self.pipe_cloexec()
+                to_close.update((c2pread, c2pwrite))
             elif isinstance(stdout, int):
                 c2pwrite = stdout
             else:
@@ -1116,6 +1130,7 @@ class Popen(object):
                 pass
             elif stderr == PIPE:
                 errread, errwrite = self.pipe_cloexec()
+                to_close.update((errread, errwrite))
             elif stderr == STDOUT:
                 errwrite = c2pwrite
             elif isinstance(stderr, int):
@@ -1126,7 +1141,7 @@ class Popen(object):
             return (p2cread, p2cwrite,
                     c2pread, c2pwrite,
-                    errread, errwrite)
+                    errread, errwrite), to_close
         def _set_cloexec_flag(self, fd, cloexec=True):
@@ -1170,7 +1185,7 @@ class Popen(object):
         def _execute_child(self, args, executable, preexec_fn, close_fds,
                            cwd, env, universal_newlines,
-                           startupinfo, creationflags, shell,
+                           startupinfo, creationflags, shell, to_close,
                            p2cread, p2cwrite,
                            c2pread, c2pwrite,
                            errread, errwrite):
@@ -1189,6 +1204,10 @@ class Popen(object):
             if executable is None:
                 executable = args[0]
+            def _close_in_parent(fd):
+                os.close(fd)
+                to_close.remove(fd)
+
             # For transferring possible exec failure from child to parent
             # The first char specifies the exception type: 0 means
             # OSError, 1 means some other error.
@@ -1283,17 +1302,17 @@ class Popen(object):
                     # be sure the FD is closed no matter what
                     os.close(errpipe_write)
-                if p2cread is not None and p2cwrite is not None:
-                    os.close(p2cread)
-                if c2pwrite is not None and c2pread is not None:
-                    os.close(c2pwrite)
-                if errwrite is not None and errread is not None:
-                    os.close(errwrite)
-
                 # Wait for exec to fail or succeed; possibly raising exception
                 # Exception limited to 1M
                 data = _eintr_retry_call(os.read, errpipe_read, 1048576)
             finally:
+                if p2cread is not None and p2cwrite is not None:
+                    _close_in_parent(p2cread)
+                if c2pwrite is not None and c2pread is not None:
+                    _close_in_parent(c2pwrite)
+                if errwrite is not None and errread is not None:
+                    _close_in_parent(errwrite)
+
                 # be sure the FD is closed no matter what
                 os.close(errpipe_read)
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -14,6 +14,10 @@ try:
     import resource
 except ImportError:
     resource = None
+try:
+    import threading
+except ImportError:
+    threading = None
 mswindows = (sys.platform == "win32")
@@ -629,6 +633,36 @@ class ProcessTestCase(BaseTestCase):
             if c.exception.errno not in (errno.ENOENT, errno.EACCES):
                 raise c.exception
+    @unittest.skipIf(threading is None, "threading required")
+    def test_double_close_on_error(self):
+        # Issue #18851
+        fds = []
+        def open_fds():
+            for i in range(20):
+                fds.extend(os.pipe())
+                time.sleep(0.001)
+        t = threading.Thread(target=open_fds)
+        t.start()
+        try:
+            with self.assertRaises(EnvironmentError):
+                subprocess.Popen(['nonexisting_i_hope'],
+                                 stdin=subprocess.PIPE,
+                                 stdout=subprocess.PIPE,
+                                 stderr=subprocess.PIPE)
+        finally:
+            t.join()
+            exc = None
+            for fd in fds:
+                # If a double close occurred, some of those fds will
+                # already have been closed by mistake, and os.close()
+                # here will raise.
+                try:
+                    os.close(fd)
+                except OSError as e:
+                    exc = e
+            if exc is not None:
+                raise exc
+
     def test_handles_closed_on_exception(self):
         # If CreateProcess exits with an error, ensure the
         # duplicate output handles are released
@@ -783,7 +817,7 @@ class POSIXProcessTestCase(BaseTestCase)
         def _execute_child(
                 self, args, executable, preexec_fn, close_fds, cwd, env,
-                universal_newlines, startupinfo, creationflags, shell,
+                universal_newlines, startupinfo, creationflags, shell, to_close,
                 p2cread, p2cwrite,
                 c2pread, c2pwrite,
                 errread, errwrite):
@@ -791,7 +825,7 @@ class POSIXProcessTestCase(BaseTestCase)
                 subprocess.Popen._execute_child(
                         self, args, executable, preexec_fn, close_fds,
                         cwd, env, universal_newlines,
-                        startupinfo, creationflags, shell,
+                        startupinfo, creationflags, shell, to_close,
                         p2cread, p2cwrite,
                         c2pread, c2pwrite,
                         errread, errwrite)
SOURCES/00197-add-missing-import-in-bdist_rpm.patch
New file
@@ -0,0 +1,20 @@
# HG changeset patch
# User Éric Araujo <merwok@netwok.org>
# Date 1394614885 14400
# Node ID 677327810121891704491bafa6209af5b60ebc91
# Parent  0f1237b61f58a77a159ab6e452782a8924ff2966
Fix missing import in bdist_rpm (#18045)
diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py
--- a/Lib/distutils/command/bdist_rpm.py
+++ b/Lib/distutils/command/bdist_rpm.py
@@ -12,6 +12,7 @@ import string
 from distutils.core import Command
 from distutils.debug import DEBUG
 from distutils.file_util import write_file
+from distutils.sysconfig import get_python_version
 from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
                               DistutilsFileError, DistutilsExecError)
 from distutils import log
SOURCES/00198-fix-readline-erroneous-output.patch
New file
@@ -0,0 +1,166 @@
# HG changeset patch
# User Victor Stinner <victor.stinner@gmail.com>
# Date 1406197344 -7200
# Node ID 0177d8a4e82a613de0c64e747656c1d0b63e49b3
# Parent  e70ab72286b470b7209b91d3aa8a21953aafb78f
Issue #19884: readline: Disable the meta modifier key if stdout is not a
terminal to not write the ANSI sequence "\033[1034h" into stdout. This sequence
is used on some terminal (ex: TERM=xterm-256color") to enable support of 8 bit
characters.
diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py
--- a/Lib/test/test_readline.py
+++ b/Lib/test/test_readline.py
@@ -1,17 +1,19 @@
 """
 Very minimal unittests for parts of the readline module.
-
-These tests were added to check that the libedit emulation on OSX and
-the "real" readline have the same interface for history manipulation. That's
-why the tests cover only a small subset of the interface.
 """
+import os
 import unittest
 from test.test_support import run_unittest, import_module
+from test.script_helper import assert_python_ok
 # Skip tests if there is no readline module
 readline = import_module('readline')
 class TestHistoryManipulation (unittest.TestCase):
+    """These tests were added to check that the libedit emulation on OSX and
+    the "real" readline have the same interface for history manipulation.
+    That's why the tests cover only a small subset of the interface.
+    """
     @unittest.skipIf(not hasattr(readline, 'clear_history'),
                      "The history update test cannot be run because the "
@@ -40,8 +42,18 @@ class TestHistoryManipulation (unittest.
         self.assertEqual(readline.get_current_history_length(), 1)
+class TestReadline(unittest.TestCase):
+    def test_init(self):
+        # Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not
+        # written into stdout when the readline module is imported and stdout
+        # is redirected to a pipe.
+        rc, stdout, stderr = assert_python_ok('-c', 'import readline',
+                                              TERM='xterm-256color')
+        self.assertEqual(stdout, b'')
+
+
 def test_main():
-    run_unittest(TestHistoryManipulation)
+    run_unittest(TestHistoryManipulation, TestReadline)
 if __name__ == "__main__":
     test_main()
diff --git a/Modules/readline.c b/Modules/readline.c
--- a/Modules/readline.c
+++ b/Modules/readline.c
@@ -887,7 +887,7 @@ setup_readline(void)
 #endif
 #ifdef __APPLE__
-    /* the libedit readline emulation resets key bindings etc
+    /* the libedit readline emulation resets key bindings etc
      * when calling rl_initialize.  So call it upfront
      */
     if (using_libedit_emulation)
@@ -932,6 +932,17 @@ setup_readline(void)
     begidx = PyInt_FromLong(0L);
     endidx = PyInt_FromLong(0L);
+
+    if (!isatty(STDOUT_FILENO)) {
+        /* Issue #19884: stdout is no a terminal. Disable meta modifier
+           keys to not write the ANSI sequence "\033[1034h" into stdout. On
+           terminals supporting 8 bit characters like TERM=xterm-256color
+           (which is now the default Fedora since Fedora 18), the meta key is
+           used to enable support of 8 bit characters (ANSI sequence
+           "\033[1034h"). */
+        rl_variable_bind ("enable-meta-key", "off");
+    }
+
     /* Initialize (allows .inputrc to override)
      *
      * XXX: A bug in the readline-2.2 library causes a memory leak
@@ -943,7 +954,7 @@ setup_readline(void)
     else
 #endif /* __APPLE__ */
         rl_initialize();
-
+
     RESTORE_LOCALE(saved_locale)
 }
# HG changeset patch
# User Victor Stinner <victor.stinner@gmail.com>
# Date 1406232681 -7200
# Node ID f0ab6f9f06036dfacff09f22f86464840b50eb0a
# Parent  d422062d7d366386acdb81851b0f2ec3a6f6750c
Issue #19884, readline: calling rl_variable_bind ("enable-meta-key", "off")
does crash on Mac OS X which uses libedit instead of readline.
diff --git a/Modules/readline.c b/Modules/readline.c
--- a/Modules/readline.c
+++ b/Modules/readline.c
@@ -933,15 +933,19 @@ setup_readline(void)
     begidx = PyInt_FromLong(0L);
     endidx = PyInt_FromLong(0L);
+#ifndef __APPLE__
     if (!isatty(STDOUT_FILENO)) {
         /* Issue #19884: stdout is no a terminal. Disable meta modifier
            keys to not write the ANSI sequence "\033[1034h" into stdout. On
            terminals supporting 8 bit characters like TERM=xterm-256color
            (which is now the default Fedora since Fedora 18), the meta key is
            used to enable support of 8 bit characters (ANSI sequence
-           "\033[1034h"). */
+           "\033[1034h").
+
+           With libedit, this call makes readline() crash. */
         rl_variable_bind ("enable-meta-key", "off");
     }
+#endif
     /* Initialize (allows .inputrc to override)
      *
# HG changeset patch
# User Antoine Pitrou <solipsis@pitrou.net>
# Date 1415109130 -3600
# Node ID eba6e68e818c694e499dfc4b22dde095d2557ab1
# Parent  e54d0b197c8245bd29ea09f421e2f1da47370f41
Issue #22773: fix failing test with old readline versions due to issue #19884.
diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py
--- a/Lib/test/test_readline.py
+++ b/Lib/test/test_readline.py
@@ -43,6 +43,10 @@ class TestHistoryManipulation (unittest.
 class TestReadline(unittest.TestCase):
+
+    @unittest.skipIf(readline._READLINE_VERSION < 0x0600
+                     and "libedit" not in readline.__doc__,
+                     "not supported in this library version")
     def test_init(self):
         # Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not
         # written into stdout when the readline module is imported and stdout
diff --git a/Modules/readline.c b/Modules/readline.c
--- a/Modules/readline.c
+++ b/Modules/readline.c
@@ -1184,4 +1184,7 @@ initreadline(void)
     PyOS_ReadlineFunctionPointer = call_readline;
     setup_readline();
+
+    PyModule_AddIntConstant(m, "_READLINE_VERSION", RL_READLINE_VERSION);
+    PyModule_AddIntConstant(m, "_READLINE_RUNTIME_VERSION", rl_readline_version);
 }
SOURCES/00199-CVE-2013-1753.patch
New file
@@ -0,0 +1,88 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1417828515 18000
# Node ID d50096708b2d701937e78f525446d729fc28db88
# Parent  923aac88a3cc76a95d5a04d9d3ece245147a8064
add a default limit for the amount of data xmlrpclib.gzip_decode will return (closes #16043)
diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py
--- a/Lib/test/test_xmlrpc.py
+++ b/Lib/test/test_xmlrpc.py
@@ -737,7 +737,7 @@ class GzipServerTestCase(BaseServerTestC
         with cm:
             p.pow(6, 8)
-    def test_gsip_response(self):
+    def test_gzip_response(self):
         t = self.Transport()
         p = xmlrpclib.ServerProxy(URL, transport=t)
         old = self.requestHandler.encode_threshold
@@ -750,6 +750,23 @@ class GzipServerTestCase(BaseServerTestC
         self.requestHandler.encode_threshold = old
         self.assertTrue(a>b)
+    def test_gzip_decode_limit(self):
+        max_gzip_decode = 20 * 1024 * 1024
+        data = '\0' * max_gzip_decode
+        encoded = xmlrpclib.gzip_encode(data)
+        decoded = xmlrpclib.gzip_decode(encoded)
+        self.assertEqual(len(decoded), max_gzip_decode)
+
+        data = '\0' * (max_gzip_decode + 1)
+        encoded = xmlrpclib.gzip_encode(data)
+
+        with self.assertRaisesRegexp(ValueError,
+                                     "max gzipped payload length exceeded"):
+            xmlrpclib.gzip_decode(encoded)
+
+        xmlrpclib.gzip_decode(encoded, max_decode=-1)
+
+
 #Test special attributes of the ServerProxy object
 class ServerProxyTestCase(unittest.TestCase):
     def setUp(self):
diff --git a/Lib/xmlrpclib.py b/Lib/xmlrpclib.py
--- a/Lib/xmlrpclib.py
+++ b/Lib/xmlrpclib.py
@@ -49,6 +49,7 @@
 # 2003-07-12 gp  Correct marshalling of Faults
 # 2003-10-31 mvl Add multicall support
 # 2004-08-20 mvl Bump minimum supported Python version to 2.1
+# 2014-12-02 ch/doko  Add workaround for gzip bomb vulnerability
 #
 # Copyright (c) 1999-2002 by Secret Labs AB.
 # Copyright (c) 1999-2002 by Fredrik Lundh.
@@ -1165,10 +1166,13 @@ def gzip_encode(data):
 # in the HTTP header, as described in RFC 1952
 #
 # @param data The encoded data
+# @keyparam max_decode Maximum bytes to decode (20MB default), use negative
+#    values for unlimited decoding
 # @return the unencoded data
 # @raises ValueError if data is not correctly coded.
+# @raises ValueError if max gzipped payload length exceeded
-def gzip_decode(data):
+def gzip_decode(data, max_decode=20971520):
     """gzip encoded data -> unencoded data
     Decode data using the gzip content encoding as described in RFC 1952
@@ -1178,11 +1182,16 @@ def gzip_decode(data):
     f = StringIO.StringIO(data)
     gzf = gzip.GzipFile(mode="rb", fileobj=f)
     try:
-        decoded = gzf.read()
+        if max_decode < 0: # no limit
+            decoded = gzf.read()
+        else:
+            decoded = gzf.read(max_decode + 1)
     except IOError:
         raise ValueError("invalid data")
     f.close()
     gzf.close()
+    if max_decode >= 0 and len(decoded) > max_decode:
+        raise ValueError("max gzipped payload length exceeded")
     return decoded
 ##
SOURCES/00200-CVE-2014-4616.patch
New file
@@ -0,0 +1,52 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1397441438 14400
# Node ID 50c07ed1743da9cd4540d83de0c30bd17aeb41b0
# Parent  218e28a935ab4494d05215c243e2129625a71893
in scan_once, prevent the reading of arbitrary memory when passed a negative index
Bug reported by Guido Vranken.
diff --git a/Lib/json/tests/test_decode.py b/Lib/json/tests/test_decode.py
--- a/Lib/json/tests/test_decode.py
+++ b/Lib/json/tests/test_decode.py
@@ -60,5 +60,10 @@ class TestDecode(object):
         msg = 'escape'
         self.assertRaisesRegexp(ValueError, msg, self.loads, s)
+    def test_negative_index(self):
+        d = self.json.JSONDecoder()
+        self.assertRaises(ValueError, d.raw_decode, 'a'*42, -50000)
+        self.assertRaises(ValueError, d.raw_decode, u'a'*42, -50000)
+
 class TestPyDecode(TestDecode, PyTest): pass
 class TestCDecode(TestDecode, CTest): pass
diff --git a/Modules/_json.c b/Modules/_json.c
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -1468,7 +1468,10 @@ scan_once_str(PyScannerObject *s, PyObje
     PyObject *res;
     char *str = PyString_AS_STRING(pystr);
     Py_ssize_t length = PyString_GET_SIZE(pystr);
-    if (idx >= length) {
+    if (idx < 0)
+        /* Compatibility with the Python version. */
+        idx += length;
+    if (idx < 0 || idx >= length) {
         PyErr_SetNone(PyExc_StopIteration);
         return NULL;
     }
@@ -1555,7 +1558,10 @@ scan_once_unicode(PyScannerObject *s, Py
     PyObject *res;
     Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr);
     Py_ssize_t length = PyUnicode_GET_SIZE(pystr);
-    if (idx >= length) {
+    if (idx < 0)
+        /* Compatibility with Python version. */
+        idx += length;
+    if (idx < 0 || idx >= length) {
         PyErr_SetNone(PyExc_StopIteration);
         return NULL;
     }
SOURCES/00201-CVE-2014-4650.patch
New file
@@ -0,0 +1,35 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1402796189 25200
# Node ID b4bab078876811c7d95231d08aa6fa7142fdda66
# Parent  bb8b0c7fefd0c5ed99b3f336178a4f9554a1d0ef
url unquote the path before checking if it refers to a CGI script (closes #21766)
diff --git a/Lib/CGIHTTPServer.py b/Lib/CGIHTTPServer.py
--- a/Lib/CGIHTTPServer.py
+++ b/Lib/CGIHTTPServer.py
@@ -84,7 +84,7 @@ class CGIHTTPRequestHandler(SimpleHTTPSe
         path begins with one of the strings in self.cgi_directories
         (and the next character is a '/' or the end of the string).
         """
-        collapsed_path = _url_collapse_path(self.path)
+        collapsed_path = _url_collapse_path(urllib.unquote(self.path))
         dir_sep = collapsed_path.find('/', 1)
         head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
         if head in self.cgi_directories:
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -510,6 +510,11 @@ class CGIHTTPServerTestCase(BaseTestCase
                 (res.read(), res.getheader('Content-type'), res.status))
         self.assertEqual(os.environ['SERVER_SOFTWARE'], signature)
+    def test_urlquote_decoding_in_cgi_check(self):
+        res = self.request('/cgi-bin%2ffile1.py')
+        self.assertEqual((b'Hello World\n', 'text/html', 200),
+                (res.read(), res.getheader('Content-type'), res.status))
+
 class SimpleHTTPRequestHandlerTestCase(unittest.TestCase):
     """ Test url parsing """
SOURCES/00202-CVE-2014-7185.patch
New file
@@ -0,0 +1,51 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1403579547 25200
# Node ID 8d963c7db507be561e26bbbb852e3a2be3327c3f
# Parent  8e0b7393e921fb5e05c40265f9272dec90512ef6
avoid overflow with large buffer sizes and/or offsets (closes #21831)
diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py
--- a/Lib/test/test_buffer.py
+++ b/Lib/test/test_buffer.py
@@ -4,6 +4,7 @@ For now, tests just new or changed funct
 """
+import sys
 import unittest
 from test import test_support
@@ -29,6 +30,11 @@ class BufferTests(unittest.TestCase):
         m = memoryview(b) # Should not raise an exception
         self.assertEqual(m.tobytes(), s)
+    def test_large_buffer_size_and_offset(self):
+        data = bytearray('hola mundo')
+        buf = buffer(data, sys.maxsize, sys.maxsize)
+        self.assertEqual(buf[:4096], "")
+
 def test_main():
     with test_support.check_py3k_warnings(("buffer.. not supported",
diff --git a/Objects/bufferobject.c b/Objects/bufferobject.c
--- a/Objects/bufferobject.c
+++ b/Objects/bufferobject.c
@@ -88,7 +88,7 @@ get_buf(PyBufferObject *self, void **ptr
             *size = count;
         else
             *size = self->b_size;
-        if (offset + *size > count)
+        if (*size > count - offset)
             *size = count - offset;
     }
     return 1;
@@ -875,4 +875,4 @@ PyTypeObject PyBuffer_Type = {
     0,                                          /* tp_init */
     0,                                          /* tp_alloc */
     buffer_new,                                 /* tp_new */
-};
\ No newline at end of file
+};
SOURCES/00203-CVE-2013-1752-nntplib.patch
New file
@@ -0,0 +1,108 @@
# HG changeset patch
# User Barry Warsaw <barry@python.org>
# Date 1380582569 14400
# Node ID 36680a7c0e22686df9c338a9ca3cdb2c60e05b27
# Parent  0f5611bca5a284c0b5f978e83a05818f0907bda8# Parent  731abf7834c43efb321231e65e7dd76ad9e8e661
- Issue #16040: CVE-2013-1752: nntplib: Limit maximum line lengths to 2048 to
  prevent readline() calls from consuming too much memory.  Patch by Jyrki
  Pulliainen.
diff --git a/Lib/nntplib.py b/Lib/nntplib.py
--- a/Lib/nntplib.py
+++ b/Lib/nntplib.py
@@ -37,6 +37,13 @@ import socket
            "error_reply","error_temp","error_perm","error_proto",
            "error_data",]
+# maximal line length when calling readline(). This is to prevent
+# reading arbitrary lenght lines. RFC 3977 limits NNTP line length to
+# 512 characters, including CRLF. We have selected 2048 just to be on
+# the safe side.
+_MAXLINE = 2048
+
+
 # Exceptions raised when an error or invalid response is received
 class NNTPError(Exception):
     """Base class for all nntplib exceptions"""
@@ -200,7 +207,9 @@ class NNTP:
     def getline(self):
         """Internal: return one line from the server, stripping CRLF.
         Raise EOFError if the connection is closed."""
-        line = self.file.readline()
+        line = self.file.readline(_MAXLINE + 1)
+        if len(line) > _MAXLINE:
+            raise NNTPDataError('line too long')
         if self.debugging > 1:
             print '*get*', repr(line)
         if not line: raise EOFError
diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py
new file mode 100644
--- /dev/null
+++ b/Lib/test/test_nntplib.py
@@ -0,0 +1,65 @@
+import socket
+import threading
+import nntplib
+import time
+
+from unittest import TestCase
+from test import test_support
+
+HOST = test_support.HOST
+
+
+def server(evt, serv, evil=False):
+    serv.listen(5)
+    try:
+        conn, addr = serv.accept()
+    except socket.timeout:
+        pass
+    else:
+        if evil:
+            conn.send("1 I'm too long response" * 3000 + "\n")
+        else:
+            conn.send("1 I'm OK response\n")
+        conn.close()
+    finally:
+        serv.close()
+        evt.set()
+
+
+class BaseServerTest(TestCase):
+    def setUp(self):
+        self.evt = threading.Event()
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.settimeout(3)
+        self.port = test_support.bind_port(self.sock)
+        threading.Thread(
+            target=server,
+            args=(self.evt, self.sock, self.evil)).start()
+        time.sleep(.1)
+
+    def tearDown(self):
+        self.evt.wait()
+
+
+class ServerTests(BaseServerTest):
+    evil = False
+
+    def test_basic_connect(self):
+        nntp = nntplib.NNTP('localhost', self.port)
+        nntp.sock.close()
+
+
+class EvilServerTests(BaseServerTest):
+    evil = True
+
+    def test_too_long_line(self):
+        self.assertRaises(nntplib.NNTPDataError,
+                          nntplib.NNTP, 'localhost', self.port)
+
+
+def test_main(verbose=None):
+    test_support.run_unittest(EvilServerTests)
+    test_support.run_unittest(ServerTests)
+
+if __name__ == '__main__':
+    test_main()
SOURCES/00204-CVE-2013-1752-ftplib.patch
New file
@@ -0,0 +1,149 @@
# HG changeset patch
# User Serhiy Storchaka <storchaka@gmail.com>
# Date 1382277427 -10800
# Node ID 44ac81e6d584758ee56a865a7c18d82505be0643
# Parent  625ece68d79a27d376889579c414ed4b2d8a2649
Issue #16038: CVE-2013-1752: ftplib: Limit amount of data read by
limiting the call to readline().  Original patch by Michał
Jastrzębski and Giampaolo Rodola.
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -55,6 +55,8 @@ MSG_OOB = 0x1
 # The standard FTP server control port
 FTP_PORT = 21
+# The sizehint parameter passed to readline() calls
+MAXLINE = 8192
 # Exception raised when an error or invalid response is received
@@ -101,6 +103,7 @@ class FTP:
     debugging = 0
     host = ''
     port = FTP_PORT
+    maxline = MAXLINE
     sock = None
     file = None
     welcome = None
@@ -180,7 +183,9 @@ class FTP:
     # Internal: return one line from the server, stripping CRLF.
     # Raise EOFError if the connection is closed
     def getline(self):
-        line = self.file.readline()
+        line = self.file.readline(self.maxline + 1)
+        if len(line) > self.maxline:
+            raise Error("got more than %d bytes" % self.maxline)
         if self.debugging > 1:
             print '*get*', self.sanitize(line)
         if not line: raise EOFError
@@ -432,7 +437,9 @@ class FTP:
         conn = self.transfercmd(cmd)
         fp = conn.makefile('rb')
         while 1:
-            line = fp.readline()
+            line = fp.readline(self.maxline + 1)
+            if len(line) > self.maxline:
+                raise Error("got more than %d bytes" % self.maxline)
             if self.debugging > 2: print '*retr*', repr(line)
             if not line:
                 break
@@ -485,7 +492,9 @@ class FTP:
         self.voidcmd('TYPE A')
         conn = self.transfercmd(cmd)
         while 1:
-            buf = fp.readline()
+            buf = fp.readline(self.maxline + 1)
+            if len(buf) > self.maxline:
+                raise Error("got more than %d bytes" % self.maxline)
             if not buf: break
             if buf[-2:] != CRLF:
                 if buf[-1] in CRLF: buf = buf[:-1]
@@ -710,7 +719,9 @@ else:
             fp = conn.makefile('rb')
             try:
                 while 1:
-                    line = fp.readline()
+                    line = fp.readline(self.maxline + 1)
+                    if len(line) > self.maxline:
+                        raise Error("got more than %d bytes" % self.maxline)
                     if self.debugging > 2: print '*retr*', repr(line)
                     if not line:
                         break
@@ -748,7 +759,9 @@ else:
             conn = self.transfercmd(cmd)
             try:
                 while 1:
-                    buf = fp.readline()
+                    buf = fp.readline(self.maxline + 1)
+                    if len(buf) > self.maxline:
+                        raise Error("got more than %d bytes" % self.maxline)
                     if not buf: break
                     if buf[-2:] != CRLF:
                         if buf[-1] in CRLF: buf = buf[:-1]
@@ -905,7 +918,9 @@ class Netrc:
         fp = open(filename, "r")
         in_macro = 0
         while 1:
-            line = fp.readline()
+            line = fp.readline(self.maxline + 1)
+            if len(line) > self.maxline:
+                raise Error("got more than %d bytes" % self.maxline)
             if not line: break
             if in_macro and line.strip():
                 macro_lines.append(line)
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
--- a/Lib/test/test_ftplib.py
+++ b/Lib/test/test_ftplib.py
@@ -65,6 +65,7 @@ class DummyFTPHandler(asynchat.async_cha
         self.last_received_data = ''
         self.next_response = ''
         self.rest = None
+        self.next_retr_data = RETR_DATA
         self.push('220 welcome')
     def collect_incoming_data(self, data):
@@ -189,7 +190,7 @@ class DummyFTPHandler(asynchat.async_cha
             offset = int(self.rest)
         else:
             offset = 0
-        self.dtp.push(RETR_DATA[offset:])
+        self.dtp.push(self.next_retr_data[offset:])
         self.dtp.close_when_done()
         self.rest = None
@@ -203,6 +204,11 @@ class DummyFTPHandler(asynchat.async_cha
         self.dtp.push(NLST_DATA)
         self.dtp.close_when_done()
+    def cmd_setlongretr(self, arg):
+        # For testing. Next RETR will return long line.
+        self.next_retr_data = 'x' * int(arg)
+        self.push('125 setlongretr ok')
+
 class DummyFTPServer(asyncore.dispatcher, threading.Thread):
@@ -558,6 +564,20 @@ class TestFTPClass(TestCase):
         # IPv4 is in use, just make sure send_epsv has not been used
         self.assertEqual(self.server.handler.last_received_cmd, 'pasv')
+    def test_line_too_long(self):
+        self.assertRaises(ftplib.Error, self.client.sendcmd,
+                          'x' * self.client.maxline * 2)
+
+    def test_retrlines_too_long(self):
+        self.client.sendcmd('SETLONGRETR %d' % (self.client.maxline * 2))
+        received = []
+        self.assertRaises(ftplib.Error,
+                          self.client.retrlines, 'retr', received.append)
+
+    def test_storlines_too_long(self):
+        f = StringIO.StringIO('x' * self.client.maxline * 2)
+        self.assertRaises(ftplib.Error, self.client.storlines, 'stor', f)
+
 class TestIPv6Environment(TestCase):
SOURCES/00205-CVE-2013-1752-httplib-headers.patch
New file
@@ -0,0 +1,51 @@
# HG changeset patch
# User Berker Peksag <berker.peksag@gmail.com>
# Date 1407212157 -10800
# Node ID 5e310c6a8520603bca8bc4b40eaf4f074db47c0d
# Parent  46c7a724b487295257423a69478392cb01ce74e6
Issue #16037: HTTPMessage.readheaders() raises an HTTPException when more
than 100 headers are read.
Patch by Jyrki Pulliainen and Daniel Eriksson.
diff --git a/Lib/httplib.py b/Lib/httplib.py
--- a/Lib/httplib.py
+++ b/Lib/httplib.py
@@ -215,6 +215,10 @@ MAXAMOUNT = 1048576
 # maximal line length when calling readline().
 _MAXLINE = 65536
+# maximum amount of headers accepted
+_MAXHEADERS = 100
+
+
 class HTTPMessage(mimetools.Message):
     def addheader(self, key, value):
@@ -271,6 +275,8 @@ class HTTPMessage(mimetools.Message):
         elif self.seekable:
             tell = self.fp.tell
         while True:
+            if len(hlist) > _MAXHEADERS:
+                raise HTTPException("got more than %d headers" % _MAXHEADERS)
             if tell:
                 try:
                     startofline = tell()
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -262,6 +262,13 @@ class BasicTest(TestCase):
         if resp.read() != "":
             self.fail("Did not expect response from HEAD request")
+    def test_too_many_headers(self):
+        headers = '\r\n'.join('Header%d: foo' % i for i in xrange(200)) + '\r\n'
+        text = ('HTTP/1.1 200 OK\r\n' + headers)
+        s = FakeSocket(text)
+        r = httplib.HTTPResponse(s)
+        self.assertRaises(httplib.HTTPException, r.begin)
+
     def test_send_file(self):
         expected = 'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \
                    'Accept-Encoding: identity\r\nContent-Length:'
SOURCES/00206-CVE-2013-1752-poplib.patch
New file
@@ -0,0 +1,60 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1417827758 18000
# Node ID 339f877cca115c1901f5dd93d7bc066031d2a669
# Parent  54af094087953f4997a4ead63e949d845c4b4412
in poplib, limit maximum line length that we read from the network (closes #16041)
Patch from Berker Peksag.
diff --git a/Lib/poplib.py b/Lib/poplib.py
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -32,6 +32,12 @@ CR = '\r'
 LF = '\n'
 CRLF = CR+LF
+# maximal line length when calling readline(). This is to prevent
+# reading arbitrary length lines. RFC 1939 limits POP3 line length to
+# 512 characters, including CRLF. We have selected 2048 just to be on
+# the safe side.
+_MAXLINE = 2048
+
 class POP3:
@@ -103,7 +109,9 @@ class POP3:
     # Raise error_proto('-ERR EOF') if the connection is closed.
     def _getline(self):
-        line = self.file.readline()
+        line = self.file.readline(_MAXLINE + 1)
+        if len(line) > _MAXLINE:
+            raise error_proto('line too long')
         if self._debugging > 1: print '*get*', repr(line)
         if not line: raise error_proto('-ERR EOF')
         octets = len(line)
@@ -365,6 +373,8 @@ else:
             match = renewline.match(self.buffer)
             while not match:
                 self._fillBuffer()
+                if len(self.buffer) > _MAXLINE:
+                    raise error_proto('line too long')
                 match = renewline.match(self.buffer)
             line = match.group(0)
             self.buffer = renewline.sub('' ,self.buffer, 1)
diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py
--- a/Lib/test/test_poplib.py
+++ b/Lib/test/test_poplib.py
@@ -198,6 +198,10 @@ class TestPOP3Class(TestCase):
                     113)
         self.assertEqual(self.client.retr('foo'), expected)
+    def test_too_long_lines(self):
+        self.assertRaises(poplib.error_proto, self.client._shortcmd,
+                          'echo +%s' % ((poplib._MAXLINE + 10) * 'a'))
+
     def test_dele(self):
         self.assertOK(self.client.dele('foo'))
SOURCES/00207-CVE-2013-1752-smtplib.patch
New file
@@ -0,0 +1,100 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1417827918 18000
# Node ID 923aac88a3cc76a95d5a04d9d3ece245147a8064
# Parent  339f877cca115c1901f5dd93d7bc066031d2a669
smtplib: limit amount read from the network (closes #16042)
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -57,6 +57,7 @@ from sys import stderr
 SMTP_PORT = 25
 SMTP_SSL_PORT = 465
 CRLF = "\r\n"
+_MAXLINE = 8192 # more than 8 times larger than RFC 821, 4.5.3
 OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I)
@@ -179,10 +180,14 @@ else:
         def __init__(self, sslobj):
             self.sslobj = sslobj
-        def readline(self):
+        def readline(self, size=-1):
+            if size < 0:
+                size = None
             str = ""
             chr = None
             while chr != "\n":
+                if size is not None and len(str) >= size:
+                    break
                 chr = self.sslobj.read(1)
                 if not chr:
                     break
@@ -353,7 +358,7 @@ class SMTP:
             self.file = self.sock.makefile('rb')
         while 1:
             try:
-                line = self.file.readline()
+                line = self.file.readline(_MAXLINE + 1)
             except socket.error as e:
                 self.close()
                 raise SMTPServerDisconnected("Connection unexpectedly closed: "
@@ -363,6 +368,8 @@ class SMTP:
                 raise SMTPServerDisconnected("Connection unexpectedly closed")
             if self.debuglevel > 0:
                 print>>stderr, 'reply:', repr(line)
+            if len(line) > _MAXLINE:
+                raise SMTPResponseException(500, "Line too long.")
             resp.append(line[4:].strip())
             code = line[:3]
             # Check that the error code is syntactically correct.
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
--- a/Lib/test/test_smtplib.py
+++ b/Lib/test/test_smtplib.py
@@ -292,6 +292,33 @@ class BadHELOServerTests(unittest.TestCa
                             HOST, self.port, 'localhost', 3)
+@unittest.skipUnless(threading, 'Threading required for this test.')
+class TooLongLineTests(unittest.TestCase):
+    respdata = '250 OK' + ('.' * smtplib._MAXLINE * 2) + '\n'
+
+    def setUp(self):
+        self.old_stdout = sys.stdout
+        self.output = StringIO.StringIO()
+        sys.stdout = self.output
+
+        self.evt = threading.Event()
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.settimeout(15)
+        self.port = test_support.bind_port(self.sock)
+        servargs = (self.evt, self.respdata, self.sock)
+        threading.Thread(target=server, args=servargs).start()
+        self.evt.wait()
+        self.evt.clear()
+
+    def tearDown(self):
+        self.evt.wait()
+        sys.stdout = self.old_stdout
+
+    def testLineTooLong(self):
+        self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP,
+                          HOST, self.port, 'localhost', 3)
+
+
 sim_users = {'Mr.A@somewhere.com':'John A',
              'Ms.B@somewhere.com':'Sally B',
              'Mrs.C@somewhereesle.com':'Ruth C',
@@ -526,7 +553,8 @@ class SMTPSimTests(unittest.TestCase):
 def test_main(verbose=None):
     test_support.run_unittest(GeneralTests, DebuggingServerTests,
                               NonConnectingTests,
-                              BadHELOServerTests, SMTPSimTests)
+                              BadHELOServerTests, SMTPSimTests,
+                              TooLongLineTests)
 if __name__ == '__main__':
     test_main()
SOURCES/00208-CVE-2013-1752-imaplib.patch
New file
@@ -0,0 +1,59 @@
# HG changeset patch
# User R David Murray <rdmurray@bitdance.com>
# Date 1388775562 18000
# Node ID dd906f4ab9237020a7a275c2d361fa288e553481
# Parent  69b5f692455306c98aa27ecea17e6290787ebd3f
closes 16039: CVE-2013-1752: limit line length in imaplib readline calls.
diff --git a/Lib/imaplib.py b/Lib/imaplib.py
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -35,6 +35,15 @@ IMAP4_PORT = 143
 IMAP4_SSL_PORT = 993
 AllowedVersions = ('IMAP4REV1', 'IMAP4')        # Most recent first
+# Maximal line length when calling readline(). This is to prevent
+# reading arbitrary length lines. RFC 3501 and 2060 (IMAP 4rev1)
+# don't specify a line length. RFC 2683 however suggests limiting client
+# command lines to 1000 octets and server command lines to 8000 octets.
+# We have selected 10000 for some extra margin and since that is supposedly
+# also what UW and Panda IMAP does.
+_MAXLINE = 10000
+
+
 #       Commands
 Commands = {
@@ -237,7 +246,10 @@ class IMAP4:
     def readline(self):
         """Read line from remote."""
-        return self.file.readline()
+        line = self.file.readline(_MAXLINE + 1)
+        if len(line) > _MAXLINE:
+            raise self.error("got more than %d bytes" % _MAXLINE)
+        return line
     def send(self, data):
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -165,6 +165,16 @@ class BaseThreadedNetworkedTests(unittes
                               self.imap_class, *server.server_address)
+    def test_linetoolong(self):
+        class TooLongHandler(SimpleIMAPHandler):
+            def handle(self):
+                # Send a very long response line
+                self.wfile.write('* OK ' + imaplib._MAXLINE*'x' + '\r\n')
+
+        with self.reaped_server(TooLongHandler) as server:
+            self.assertRaises(imaplib.IMAP4.error,
+                              self.imap_class, *server.server_address)
+
 class ThreadedNetworkedTests(BaseThreadedNetworkedTests):
     server_class = SocketServer.TCPServer
SOURCES/00209-pep466-backport-hmac.compare_digest.patch
New file
@@ -0,0 +1,353 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1399849904 25200
# Node ID b40f1a00b13460cc089450028280c4e52dd24a64
# Parent  951775c68b1b7782750c213b0fce1f61d46b2f51
backport hmac.compare_digest to partially implement PEP 466 (closes #21306)
Backport from Alex Gaynor.
diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst
--- a/Doc/library/hmac.rst
+++ b/Doc/library/hmac.rst
@@ -38,6 +38,13 @@ An HMAC object has the following methods
    This string will be the same length as the *digest_size* of the digest given to
    the constructor.  It may contain non-ASCII characters, including NUL bytes.
+   .. warning::
+
+      When comparing the output of :meth:`digest` to an externally-supplied
+      digest during a verification routine, it is recommended to use the
+      :func:`compare_digest` function instead of the ``==`` operator
+      to reduce the vulnerability to timing attacks.
+
 .. method:: HMAC.hexdigest()
@@ -45,6 +52,13 @@ An HMAC object has the following methods
    containing only hexadecimal digits.  This may be used to exchange the value
    safely in email or other non-binary environments.
+   .. warning::
+
+      When comparing the output of :meth:`hexdigest` to an externally-supplied
+      digest during a verification routine, it is recommended to use the
+      :func:`compare_digest` function instead of the ``==`` operator
+      to reduce the vulnerability to timing attacks.
+
 .. method:: HMAC.copy()
@@ -52,6 +66,25 @@ An HMAC object has the following methods
    compute the digests of strings that share a common initial substring.
+This module also provides the following helper function:
+
+.. function:: compare_digest(a, b)
+
+   Return ``a == b``.  This function uses an approach designed to prevent
+   timing analysis by avoiding content-based short circuiting behaviour,
+   making it appropriate for cryptography.  *a* and *b* must both be of the
+   same type: either :class:`unicode` or a :term:`bytes-like object`.
+
+   .. note::
+
+      If *a* and *b* are of different lengths, or if an error occurs,
+      a timing attack could theoretically reveal information about the
+      types and lengths of *a* and *b*--but not their values.
+
+
+   .. versionadded:: 2.7.7
+
+
 .. seealso::
    Module :mod:`hashlib`
diff --git a/Lib/hmac.py b/Lib/hmac.py
--- a/Lib/hmac.py
+++ b/Lib/hmac.py
@@ -5,6 +5,9 @@ Implements the HMAC algorithm as describ
 import warnings as _warnings
+from operator import _compare_digest as compare_digest
+
+
 trans_5C = "".join ([chr (x ^ 0x5C) for x in xrange(256)])
 trans_36 = "".join ([chr (x ^ 0x36) for x in xrange(256)])
diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py
--- a/Lib/test/test_hmac.py
+++ b/Lib/test/test_hmac.py
@@ -302,12 +302,122 @@ class CopyTestCase(unittest.TestCase):
         self.assertTrue(h1.hexdigest() == h2.hexdigest(),
             "Hexdigest of copy doesn't match original hexdigest.")
+
+class CompareDigestTestCase(unittest.TestCase):
+
+    def test_compare_digest(self):
+        # Testing input type exception handling
+        a, b = 100, 200
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = 100, b"foobar"
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = b"foobar", 200
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = u"foobar", b"foobar"
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = b"foobar", u"foobar"
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+
+        # Testing bytes of different lengths
+        a, b = b"foobar", b"foo"
+        self.assertFalse(hmac.compare_digest(a, b))
+        a, b = b"\xde\xad\xbe\xef", b"\xde\xad"
+        self.assertFalse(hmac.compare_digest(a, b))
+
+        # Testing bytes of same lengths, different values
+        a, b = b"foobar", b"foobaz"
+        self.assertFalse(hmac.compare_digest(a, b))
+        a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea"
+        self.assertFalse(hmac.compare_digest(a, b))
+
+        # Testing bytes of same lengths, same values
+        a, b = b"foobar", b"foobar"
+        self.assertTrue(hmac.compare_digest(a, b))
+        a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef"
+        self.assertTrue(hmac.compare_digest(a, b))
+
+        # Testing bytearrays of same lengths, same values
+        a, b = bytearray(b"foobar"), bytearray(b"foobar")
+        self.assertTrue(hmac.compare_digest(a, b))
+
+        # Testing bytearrays of diffeent lengths
+        a, b = bytearray(b"foobar"), bytearray(b"foo")
+        self.assertFalse(hmac.compare_digest(a, b))
+
+        # Testing bytearrays of same lengths, different values
+        a, b = bytearray(b"foobar"), bytearray(b"foobaz")
+        self.assertFalse(hmac.compare_digest(a, b))
+
+        # Testing byte and bytearray of same lengths, same values
+        a, b = bytearray(b"foobar"), b"foobar"
+        self.assertTrue(hmac.compare_digest(a, b))
+        self.assertTrue(hmac.compare_digest(b, a))
+
+        # Testing byte bytearray of diffeent lengths
+        a, b = bytearray(b"foobar"), b"foo"
+        self.assertFalse(hmac.compare_digest(a, b))
+        self.assertFalse(hmac.compare_digest(b, a))
+
+        # Testing byte and bytearray of same lengths, different values
+        a, b = bytearray(b"foobar"), b"foobaz"
+        self.assertFalse(hmac.compare_digest(a, b))
+        self.assertFalse(hmac.compare_digest(b, a))
+
+        # Testing str of same lengths
+        a, b = "foobar", "foobar"
+        self.assertTrue(hmac.compare_digest(a, b))
+
+        # Testing str of diffeent lengths
+        a, b = "foo", "foobar"
+        self.assertFalse(hmac.compare_digest(a, b))
+
+        # Testing bytes of same lengths, different values
+        a, b = "foobar", "foobaz"
+        self.assertFalse(hmac.compare_digest(a, b))
+
+        # Testing error cases
+        a, b = u"foobar", b"foobar"
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = b"foobar", u"foobar"
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = b"foobar", 1
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = 100, 200
+        self.assertRaises(TypeError, hmac.compare_digest, a, b)
+        a, b = "fooä", "fooä"
+        self.assertTrue(hmac.compare_digest(a, b))
+
+        # subclasses are supported by ignore __eq__
+        class mystr(str):
+            def __eq__(self, other):
+                return False
+
+        a, b = mystr("foobar"), mystr("foobar")
+        self.assertTrue(hmac.compare_digest(a, b))
+        a, b = mystr("foobar"), "foobar"
+        self.assertTrue(hmac.compare_digest(a, b))
+        a, b = mystr("foobar"), mystr("foobaz")
+        self.assertFalse(hmac.compare_digest(a, b))
+
+        class mybytes(bytes):
+            def __eq__(self, other):
+                return False
+
+        a, b = mybytes(b"foobar"), mybytes(b"foobar")
+        self.assertTrue(hmac.compare_digest(a, b))
+        a, b = mybytes(b"foobar"), b"foobar"
+        self.assertTrue(hmac.compare_digest(a, b))
+        a, b = mybytes(b"foobar"), mybytes(b"foobaz")
+        self.assertFalse(hmac.compare_digest(a, b))
+
+
 def test_main():
     test_support.run_unittest(
         TestVectorsTestCase,
         ConstructorTestCase,
         SanityTestCase,
-        CopyTestCase
+        CopyTestCase,
+        CompareDigestTestCase,
     )
 if __name__ == "__main__":
diff --git a/Modules/operator.c b/Modules/operator.c
--- a/Modules/operator.c
+++ b/Modules/operator.c
@@ -235,6 +235,132 @@ op_delslice(PyObject *s, PyObject *a)
 #define spam2o(OP,ALTOP,DOC) {#OP, op_##OP, METH_O, PyDoc_STR(DOC)}, \
                            {#ALTOP, op_##OP, METH_O, PyDoc_STR(DOC)},
+
+
+/* compare_digest **********************************************************/
+
+/*
+ * timing safe compare
+ *
+ * Returns 1 of the strings are equal.
+ * In case of len(a) != len(b) the function tries to keep the timing
+ * dependent on the length of b. CPU cache locally may still alter timing
+ * a bit.
+ */
+static int
+_tscmp(const unsigned char *a, const unsigned char *b,
+        Py_ssize_t len_a, Py_ssize_t len_b)
+{
+    /* The volatile type declarations make sure that the compiler has no
+     * chance to optimize and fold the code in any way that may change
+     * the timing.
+     */
+    volatile Py_ssize_t length;
+    volatile const unsigned char *left;
+    volatile const unsigned char *right;
+    Py_ssize_t i;
+    unsigned char result;
+
+    /* loop count depends on length of b */
+    length = len_b;
+    left = NULL;
+    right = b;
+
+    /* don't use else here to keep the amount of CPU instructions constant,
+     * volatile forces re-evaluation
+     *  */
+    if (len_a == length) {
+        left = *((volatile const unsigned char**)&a);
+        result = 0;
+    }
+    if (len_a != length) {
+        left = b;
+        result = 1;
+    }
+
+    for (i=0; i < length; i++) {
+        result |= *left++ ^ *right++;
+    }
+
+    return (result == 0);
+}
+
+PyDoc_STRVAR(compare_digest__doc__,
+"compare_digest(a, b) -> bool\n"
+"\n"
+"Return 'a == b'.  This function uses an approach designed to prevent\n"
+"timing analysis, making it appropriate for cryptography.\n"
+"a and b must both be of the same type: either str (ASCII only),\n"
+"or any type that supports the buffer protocol (e.g. bytes).\n"
+"\n"
+"Note: If a and b are of different lengths, or if an error occurs,\n"
+"a timing attack could theoretically reveal information about the\n"
+"types and lengths of a and b--but not their values.\n");
+
+static PyObject*
+compare_digest(PyObject *self, PyObject *args)
+{
+    PyObject *a, *b;
+    int rc;
+
+    if (!PyArg_ParseTuple(args, "OO:compare_digest", &a, &b)) {
+        return NULL;
+    }
+
+    /* Unicode string */
+    if (PyUnicode_Check(a) && PyUnicode_Check(b)) {
+        rc = _tscmp(PyUnicode_AS_DATA(a),
+                    PyUnicode_AS_DATA(b),
+                    PyUnicode_GET_DATA_SIZE(a),
+                    PyUnicode_GET_DATA_SIZE(b));
+    }
+    /* fallback to buffer interface for bytes, bytesarray and other */
+    else {
+        Py_buffer view_a;
+        Py_buffer view_b;
+
+        if ((PyObject_CheckBuffer(a) == 0) & (PyObject_CheckBuffer(b) == 0)) {
+            PyErr_Format(PyExc_TypeError,
+                         "unsupported operand types(s) or combination of types: "
+                         "'%.100s' and '%.100s'",
+                         Py_TYPE(a)->tp_name, Py_TYPE(b)->tp_name);
+            return NULL;
+        }
+
+        if (PyObject_GetBuffer(a, &view_a, PyBUF_SIMPLE) == -1) {
+            return NULL;
+        }
+        if (view_a.ndim > 1) {
+            PyErr_SetString(PyExc_BufferError,
+                            "Buffer must be single dimension");
+            PyBuffer_Release(&view_a);
+            return NULL;
+        }
+
+        if (PyObject_GetBuffer(b, &view_b, PyBUF_SIMPLE) == -1) {
+            PyBuffer_Release(&view_a);
+            return NULL;
+        }
+        if (view_b.ndim > 1) {
+            PyErr_SetString(PyExc_BufferError,
+                            "Buffer must be single dimension");
+            PyBuffer_Release(&view_a);
+            PyBuffer_Release(&view_b);
+            return NULL;
+        }
+
+        rc = _tscmp((const unsigned char*)view_a.buf,
+                    (const unsigned char*)view_b.buf,
+                    view_a.len,
+                    view_b.len);
+
+        PyBuffer_Release(&view_a);
+        PyBuffer_Release(&view_b);
+    }
+
+    return PyBool_FromLong(rc);
+}
+
 static struct PyMethodDef operator_methods[] = {
 spam1o(isCallable,
@@ -318,6 +444,8 @@ spam2(ne,__ne__, "ne(a, b) -- Same as a!
 spam2(gt,__gt__, "gt(a, b) -- Same as a>b.")
 spam2(ge,__ge__, "ge(a, b) -- Same as a>=b.")
+    {"_compare_digest", (PyCFunction)compare_digest, METH_VARARGS,
+     compare_digest__doc__},
     {NULL,              NULL}           /* sentinel */
 };
SOURCES/00210-pep466-backport-hashlib.pbkdf2_hmac.patch
New file
@@ -0,0 +1,489 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1401567982 25200
# Node ID e4da3ba9dcac4374ca0ccc46a48c32be6f951038
# Parent  8fa8c290c165dccd613632b69a816623b51e801e
backport hashlib.pbkdf2_hmac per PEP 466 (closes #21304)
Backport by Alex Gaynor.
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -135,6 +135,46 @@ A hash object has the following methods:
    compute the digests of strings that share a common initial substring.
+Key Derivation Function
+-----------------------
+
+Key derivation and key stretching algorithms are designed for secure password
+hashing. Naive algorithms such as ``sha1(password)`` are not resistant against
+brute-force attacks. A good password hashing function must be tunable, slow, and
+include a `salt <https://en.wikipedia.org/wiki/Salt_%28cryptography%29>`_.
+
+
+.. function:: pbkdf2_hmac(name, password, salt, rounds, dklen=None)
+
+   The function provides PKCS#5 password-based key derivation function 2. It
+   uses HMAC as pseudorandom function.
+
+   The string *name* is the desired name of the hash digest algorithm for
+   HMAC, e.g. 'sha1' or 'sha256'. *password* and *salt* are interpreted as
+   buffers of bytes. Applications and libraries should limit *password* to
+   a sensible value (e.g. 1024). *salt* should be about 16 or more bytes from
+   a proper source, e.g. :func:`os.urandom`.
+
+   The number of *rounds* should be chosen based on the hash algorithm and
+   computing power. As of 2013, at least 100,000 rounds of SHA-256 is suggested.
+
+   *dklen* is the length of the derived key. If *dklen* is ``None`` then the
+   digest size of the hash algorithm *name* is used, e.g. 64 for SHA-512.
+
+   >>> import hashlib, binascii
+   >>> dk = hashlib.pbkdf2_hmac('sha256', b'password', b'salt', 100000)
+   >>> binascii.hexlify(dk)
+   b'0394a2ede332c9a13eb82e9b24631604c31df978b4e2f0fbd2c549944f9d79a5'
+
+   .. versionadded:: 2.7.8
+
+   .. note::
+
+      A fast implementation of *pbkdf2_hmac* is available with OpenSSL.  The
+      Python implementation uses an inline version of :mod:`hmac`. It is about
+      three times slower and doesn't release the GIL.
+
+
 .. seealso::
    Module :mod:`hmac`
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -77,7 +77,7 @@ __always_supported = ('md5', 'sha1', 'sh
 algorithms = __always_supported
-__all__ = __always_supported + ('new', 'algorithms')
+__all__ = __always_supported + ('new', 'algorithms', 'pbkdf2_hmac')
 def __get_openssl_constructor(name):
@@ -123,6 +123,72 @@ for __func_name in __always_supported:
         import logging
         logging.exception('code for hash %s was not found.', __func_name)
+try:
+    # OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA
+    from _hashlib import pbkdf2_hmac
+except ImportError:
+    import binascii
+    import struct
+
+    _trans_5C = b"".join(chr(x ^ 0x5C) for x in range(256))
+    _trans_36 = b"".join(chr(x ^ 0x36) for x in range(256))
+
+    def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None):
+        """Password based key derivation function 2 (PKCS #5 v2.0)
+
+        This Python implementations based on the hmac module about as fast
+        as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster
+        for long passwords.
+        """
+        if not isinstance(hash_name, str):
+            raise TypeError(hash_name)
+
+        if not isinstance(password, (bytes, bytearray)):
+            password = bytes(buffer(password))
+        if not isinstance(salt, (bytes, bytearray)):
+            salt = bytes(buffer(salt))
+
+        # Fast inline HMAC implementation
+        inner = new(hash_name)
+        outer = new(hash_name)
+        blocksize = getattr(inner, 'block_size', 64)
+        if len(password) > blocksize:
+            password = new(hash_name, password).digest()
+        password = password + b'\x00' * (blocksize - len(password))
+        inner.update(password.translate(_trans_36))
+        outer.update(password.translate(_trans_5C))
+
+        def prf(msg, inner=inner, outer=outer):
+            # PBKDF2_HMAC uses the password as key. We can re-use the same
+            # digest objects and and just update copies to skip initialization.
+            icpy = inner.copy()
+            ocpy = outer.copy()
+            icpy.update(msg)
+            ocpy.update(icpy.digest())
+            return ocpy.digest()
+
+        if iterations < 1:
+            raise ValueError(iterations)
+        if dklen is None:
+            dklen = outer.digest_size
+        if dklen < 1:
+            raise ValueError(dklen)
+
+        hex_format_string = "%%0%ix" % (new(hash_name).digest_size * 2)
+
+        dkey = b''
+        loop = 1
+        while len(dkey) < dklen:
+            prev = prf(salt + struct.pack(b'>I', loop))
+            rkey = int(binascii.hexlify(prev), 16)
+            for i in xrange(iterations - 1):
+                prev = prf(prev)
+                rkey ^= int(binascii.hexlify(prev), 16)
+            loop += 1
+            dkey += binascii.unhexlify(hex_format_string % rkey)
+
+        return dkey[:dklen]
+
 # Cleanup locals()
 del __always_supported, __func_name, __get_hash
 del __hash_new, __get_openssl_constructor
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -16,6 +16,8 @@ except ImportError:
     threading = None
 import unittest
 import warnings
+from binascii import unhexlify
+
 from test import test_support
 from test.test_support import _4G, precisionbigmemtest
@@ -436,8 +438,72 @@ class HashLibTestCase(unittest.TestCase)
+class KDFTests(unittest.TestCase):
+    pbkdf2_test_vectors = [
+        (b'password', b'salt', 1, None),
+        (b'password', b'salt', 2, None),
+        (b'password', b'salt', 4096, None),
+        # too slow, it takes over a minute on a fast CPU.
+        #(b'password', b'salt', 16777216, None),
+        (b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt',
+         4096, -1),
+        (b'pass\0word', b'sa\0lt', 4096, 16),
+    ]
+
+    pbkdf2_results = {
+        "sha1": [
+            # offical test vectors from RFC 6070
+            (unhexlify('0c60c80f961f0e71f3a9b524af6012062fe037a6'), None),
+            (unhexlify('ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957'), None),
+            (unhexlify('4b007901b765489abead49d926f721d065a429c1'), None),
+            #(unhexlify('eefe3d61cd4da4e4e9945b3d6ba2158c2634e984'), None),
+            (unhexlify('3d2eec4fe41c849b80c8d83662c0e44a8b291a964c'
+                           'f2f07038'), 25),
+            (unhexlify('56fa6aa75548099dcc37d7f03425e0c3'), None),],
+        "sha256": [
+            (unhexlify('120fb6cffcf8b32c43e7225256c4f837'
+                           'a86548c92ccc35480805987cb70be17b'), None),
+            (unhexlify('ae4d0c95af6b46d32d0adff928f06dd0'
+                           '2a303f8ef3c251dfd6e2d85a95474c43'), None),
+            (unhexlify('c5e478d59288c841aa530db6845c4c8d'
+                           '962893a001ce4e11a4963873aa98134a'), None),
+            #(unhexlify('cf81c66fe8cfc04d1f31ecb65dab4089'
+            #               'f7f179e89b3b0bcb17ad10e3ac6eba46'), None),
+            (unhexlify('348c89dbcbd32b2f32d814b8116e84cf2b17'
+                           '347ebc1800181c4e2a1fb8dd53e1c635518c7dac47e9'), 40),
+            (unhexlify('89b69d0516f829893c696226650a8687'), None),],
+        "sha512": [
+            (unhexlify('867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5'
+                           'd513554e1c8cf252c02d470a285a0501bad999bfe943c08f'
+                           '050235d7d68b1da55e63f73b60a57fce'), None),
+            (unhexlify('e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f004071'
+                           '3f18aefdb866d53cf76cab2868a39b9f7840edce4fef5a82'
+                           'be67335c77a6068e04112754f27ccf4e'), None),
+            (unhexlify('d197b1b33db0143e018b12f3d1d1479e6cdebdcc97c5c0f8'
+                           '7f6902e072f457b5143f30602641b3d55cd335988cb36b84'
+                           '376060ecd532e039b742a239434af2d5'), None),
+            (unhexlify('8c0511f4c6e597c6ac6315d8f0362e225f3c501495ba23b8'
+                           '68c005174dc4ee71115b59f9e60cd9532fa33e0f75aefe30'
+                           '225c583a186cd82bd4daea9724a3d3b8'), 64),
+            (unhexlify('9d9e9c4cd21fe4be24d5b8244c759665'), None),],
+    }
+
+    def test_pbkdf2_hmac(self):
+        for digest_name, results in self.pbkdf2_results.items():
+            for i, vector in enumerate(self.pbkdf2_test_vectors):
+                password, salt, rounds, dklen = vector
+                expected, overwrite_dklen = results[i]
+                if overwrite_dklen:
+                    dklen = overwrite_dklen
+                out = hashlib.pbkdf2_hmac(
+                    digest_name, password, salt, rounds, dklen)
+                self.assertEqual(out, expected,
+                                 (digest_name, password, salt, rounds, dklen))
+
+
+
 def test_main():
-    test_support.run_unittest(HashLibTestCase)
+    test_support.run_unittest(HashLibTestCase, KDFTests)
 if __name__ == "__main__":
     test_main()
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
--- a/Modules/_hashopenssl.c
+++ b/Modules/_hashopenssl.c
@@ -39,6 +39,7 @@
 #include <openssl/ssl.h>
 #include <openssl/err.h>
 #include <openssl/evp.h>
+#include <openssl/hmac.h>
 #define MUNCH_SIZE INT_MAX
@@ -563,6 +564,226 @@ EVP_new(PyObject *self, PyObject *args,
     return ret_obj;
 }
+
+
+#if (OPENSSL_VERSION_NUMBER >= 0x10000000 && !defined(OPENSSL_NO_HMAC) \
+     && !defined(OPENSSL_NO_SHA))
+
+#define PY_PBKDF2_HMAC 1
+
+/* Improved implementation of PKCS5_PBKDF2_HMAC()
+ *
+ * PKCS5_PBKDF2_HMAC_fast() hashes the password exactly one time instead of
+ * `iter` times. Today (2013) the iteration count is typically 100,000 or
+ * more. The improved algorithm is not subject to a Denial-of-Service
+ * vulnerability with overly large passwords.
+ *
+ * Also OpenSSL < 1.0 don't provide PKCS5_PBKDF2_HMAC(), only
+ * PKCS5_PBKDF2_SHA1.
+ */
+static int
+PKCS5_PBKDF2_HMAC_fast(const char *pass, int passlen,
+                       const unsigned char *salt, int saltlen,
+                       int iter, const EVP_MD *digest,
+                       int keylen, unsigned char *out)
+{
+    unsigned char digtmp[EVP_MAX_MD_SIZE], *p, itmp[4];
+    int cplen, j, k, tkeylen, mdlen;
+    unsigned long i = 1;
+    HMAC_CTX hctx_tpl, hctx;
+
+    mdlen = EVP_MD_size(digest);
+    if (mdlen < 0)
+        return 0;
+
+    HMAC_CTX_init(&hctx_tpl);
+    HMAC_CTX_init(&hctx);
+    p = out;
+    tkeylen = keylen;
+    if (!HMAC_Init_ex(&hctx_tpl, pass, passlen, digest, NULL)) {
+        HMAC_CTX_cleanup(&hctx_tpl);
+        return 0;
+    }
+    while(tkeylen) {
+        if(tkeylen > mdlen)
+            cplen = mdlen;
+        else
+            cplen = tkeylen;
+        /* We are unlikely to ever use more than 256 blocks (5120 bits!)
+         * but just in case...
+         */
+        itmp[0] = (unsigned char)((i >> 24) & 0xff);
+        itmp[1] = (unsigned char)((i >> 16) & 0xff);
+        itmp[2] = (unsigned char)((i >> 8) & 0xff);
+        itmp[3] = (unsigned char)(i & 0xff);
+        if (!HMAC_CTX_copy(&hctx, &hctx_tpl)) {
+            HMAC_CTX_cleanup(&hctx_tpl);
+            return 0;
+        }
+        if (!HMAC_Update(&hctx, salt, saltlen)
+                || !HMAC_Update(&hctx, itmp, 4)
+                || !HMAC_Final(&hctx, digtmp, NULL)) {
+            HMAC_CTX_cleanup(&hctx_tpl);
+            HMAC_CTX_cleanup(&hctx);
+            return 0;
+        }
+        HMAC_CTX_cleanup(&hctx);
+        memcpy(p, digtmp, cplen);
+        for (j = 1; j < iter; j++) {
+            if (!HMAC_CTX_copy(&hctx, &hctx_tpl)) {
+                HMAC_CTX_cleanup(&hctx_tpl);
+                return 0;
+            }
+            if (!HMAC_Update(&hctx, digtmp, mdlen)
+                    || !HMAC_Final(&hctx, digtmp, NULL)) {
+                HMAC_CTX_cleanup(&hctx_tpl);
+                HMAC_CTX_cleanup(&hctx);
+                return 0;
+            }
+            HMAC_CTX_cleanup(&hctx);
+            for (k = 0; k < cplen; k++) {
+                p[k] ^= digtmp[k];
+            }
+        }
+        tkeylen-= cplen;
+        i++;
+        p+= cplen;
+    }
+    HMAC_CTX_cleanup(&hctx_tpl);
+    return 1;
+}
+
+/* LCOV_EXCL_START */
+static PyObject *
+_setException(PyObject *exc)
+{
+    unsigned long errcode;
+    const char *lib, *func, *reason;
+
+    errcode = ERR_peek_last_error();
+    if (!errcode) {
+        PyErr_SetString(exc, "unknown reasons");
+        return NULL;
+    }
+    ERR_clear_error();
+
+    lib = ERR_lib_error_string(errcode);
+    func = ERR_func_error_string(errcode);
+    reason = ERR_reason_error_string(errcode);
+
+    if (lib && func) {
+        PyErr_Format(exc, "[%s: %s] %s", lib, func, reason);
+    }
+    else if (lib) {
+        PyErr_Format(exc, "[%s] %s", lib, reason);
+    }
+    else {
+        PyErr_SetString(exc, reason);
+    }
+    return NULL;
+}
+/* LCOV_EXCL_STOP */
+
+PyDoc_STRVAR(pbkdf2_hmac__doc__,
+"pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None) -> key\n\
+\n\
+Password based key derivation function 2 (PKCS #5 v2.0) with HMAC as\n\
+pseudorandom function.");
+
+static PyObject *
+pbkdf2_hmac(PyObject *self, PyObject *args, PyObject *kwdict)
+{
+    static char *kwlist[] = {"hash_name", "password", "salt", "iterations",
+                             "dklen", NULL};
+    PyObject *key_obj = NULL, *dklen_obj = Py_None;
+    char *name, *key;
+    Py_buffer password, salt;
+    long iterations, dklen;
+    int retval;
+    const EVP_MD *digest;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwdict, "ss*s*l|O:pbkdf2_hmac",
+                                     kwlist, &name, &password, &salt,
+                                     &iterations, &dklen_obj)) {
+        return NULL;
+    }
+
+    digest = EVP_get_digestbyname(name);
+    if (digest == NULL) {
+        PyErr_SetString(PyExc_ValueError, "unsupported hash type");
+        goto end;
+    }
+
+    if (password.len > INT_MAX) {
+        PyErr_SetString(PyExc_OverflowError,
+                        "password is too long.");
+        goto end;
+    }
+
+    if (salt.len > INT_MAX) {
+        PyErr_SetString(PyExc_OverflowError,
+                        "salt is too long.");
+        goto end;
+    }
+
+    if (iterations < 1) {
+        PyErr_SetString(PyExc_ValueError,
+                        "iteration value must be greater than 0.");
+        goto end;
+    }
+    if (iterations > INT_MAX) {
+        PyErr_SetString(PyExc_OverflowError,
+                        "iteration value is too great.");
+        goto end;
+    }
+
+    if (dklen_obj == Py_None) {
+        dklen = EVP_MD_size(digest);
+    } else {
+        dklen = PyLong_AsLong(dklen_obj);
+        if ((dklen == -1) && PyErr_Occurred()) {
+            goto end;
+        }
+    }
+    if (dklen < 1) {
+        PyErr_SetString(PyExc_ValueError,
+                        "key length must be greater than 0.");
+        goto end;
+    }
+    if (dklen > INT_MAX) {
+        /* INT_MAX is always smaller than dkLen max (2^32 - 1) * hLen */
+        PyErr_SetString(PyExc_OverflowError,
+                        "key length is too great.");
+        goto end;
+    }
+
+    key_obj = PyBytes_FromStringAndSize(NULL, dklen);
+    if (key_obj == NULL) {
+        goto end;
+    }
+    key = PyBytes_AS_STRING(key_obj);
+
+    Py_BEGIN_ALLOW_THREADS
+    retval = PKCS5_PBKDF2_HMAC_fast((char*)password.buf, (int)password.len,
+                                    (unsigned char *)salt.buf, (int)salt.len,
+                                    iterations, digest, dklen,
+                                    (unsigned char *)key);
+    Py_END_ALLOW_THREADS
+
+    if (!retval) {
+        Py_CLEAR(key_obj);
+        _setException(PyExc_ValueError);
+        goto end;
+    }
+
+  end:
+    PyBuffer_Release(&password);
+    PyBuffer_Release(&salt);
+    return key_obj;
+}
+
+#endif
+
 /*
  *  This macro and function generates a family of constructor function
  *  definitions for specific hash algorithms.  These constructors are much
@@ -690,6 +911,10 @@ static struct PyMethodDef EVP_functions[
     CONSTRUCTOR_METH_DEF(sha384),
     CONSTRUCTOR_METH_DEF(sha512),
 #endif
+#ifdef PY_PBKDF2_HMAC
+    {"pbkdf2_hmac", (PyCFunction)pbkdf2_hmac, METH_VARARGS|METH_KEYWORDS,
+     pbkdf2_hmac__doc__},
+#endif
     {NULL,      NULL}            /* Sentinel */
 };
diff -up Python-2.7.5/Lib/test/test_hmac.py.cod Python-2.7.5/Lib/test/test_hmac.py
--- Python-2.7.5/Lib/test/test_hmac.py.cod    2015-02-23 10:37:13.448594606 +0100
+++ Python-2.7.5/Lib/test/test_hmac.py    2015-02-23 10:37:27.581717509 +0100
@@ -1,3 +1,5 @@
+# coding: utf-8
+
 import hmac
 import hashlib
 import unittest
SOURCES/00211-pep466-UTF-7-decoder-fix-illegal-unicode.patch
New file
@@ -0,0 +1,67 @@
# HG changeset patch
# User Serhiy Storchaka <storchaka@gmail.com>
# Date 1382204269 -10800
# Node ID 214c0aac7540947d88a38ff0061734547ef86710
# Parent  c207ac413457a1b834e4b7dcf1a6836cd6e036e3
Issue #19279: UTF-7 decoder no more produces illegal unicode strings.
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -611,6 +611,35 @@ class UTF7Test(ReadTest):
             ]
         )
+    def test_errors(self):
+        tests = [
+            ('a\xffb', u'a\ufffdb'),
+            ('a+IK', u'a\ufffd'),
+            ('a+IK-b', u'a\ufffdb'),
+            ('a+IK,b', u'a\ufffdb'),
+            ('a+IKx', u'a\u20ac\ufffd'),
+            ('a+IKx-b', u'a\u20ac\ufffdb'),
+            ('a+IKwgr', u'a\u20ac\ufffd'),
+            ('a+IKwgr-b', u'a\u20ac\ufffdb'),
+            ('a+IKwgr,', u'a\u20ac\ufffd'),
+            ('a+IKwgr,-b', u'a\u20ac\ufffd-b'),
+            ('a+IKwgrB', u'a\u20ac\u20ac\ufffd'),
+            ('a+IKwgrB-b', u'a\u20ac\u20ac\ufffdb'),
+            ('a+/,+IKw-b', u'a\ufffd\u20acb'),
+            ('a+//,+IKw-b', u'a\ufffd\u20acb'),
+            ('a+///,+IKw-b', u'a\uffff\ufffd\u20acb'),
+            ('a+////,+IKw-b', u'a\uffff\ufffd\u20acb'),
+        ]
+        for raw, expected in tests:
+            self.assertRaises(UnicodeDecodeError, codecs.utf_7_decode,
+                              raw, 'strict', True)
+            self.assertEqual(raw.decode('utf-7', 'replace'), expected)
+
+    def test_nonbmp(self):
+        self.assertEqual(u'\U000104A0'.encode(self.encoding), '+2AHcoA-')
+        self.assertEqual(u'\ud801\udca0'.encode(self.encoding), '+2AHcoA-')
+        self.assertEqual('+2AHcoA-'.decode(self.encoding), u'\U000104A0')
+
 class UTF16ExTest(unittest.TestCase):
     def test_errors(self):
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -1671,6 +1671,7 @@ PyObject *PyUnicode_DecodeUTF7Stateful(c
                                        (base64buffer >> (base64bits-16));
                     base64bits -= 16;
                     base64buffer &= (1 << base64bits) - 1; /* clear high bits */
+                    assert(outCh <= 0xffff);
                     if (surrogate) {
                         /* expecting a second surrogate */
                         if (outCh >= 0xDC00 && outCh <= 0xDFFF) {
@@ -1737,6 +1738,7 @@ PyObject *PyUnicode_DecodeUTF7Stateful(c
                 inShift = 1;
                 shiftOutStart = p;
                 base64bits = 0;
+                base64buffer = 0;
             }
         }
         else if (DECODE_DIRECT(ch)) { /* character decodes as itself */
SOURCES/00212-pep466-pyunicode_fromformat-raise-overflow.patch
New file
@@ -0,0 +1,40 @@
# HG changeset patch
# User Serhiy Storchaka <storchaka@gmail.com>
# Date 1372008129 -10800
# Node ID 2f1e8b7fa534b147280fdc9b92e44a7c7305338a
# Parent  8f0adcb66633ee97e4f7bdeee2104268113b86c3
Issue #18184: PyUnicode_FromFormat() and PyUnicode_FromFormatV() now raise
OverflowError when an argument of %c format is out of range.
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -740,8 +740,25 @@ PyUnicode_FromFormatV(const char *format
             switch (*f) {
             case 'c':
-                (void)va_arg(count, int);
+            {
+                int ordinal = va_arg(count, int);
+#ifdef Py_UNICODE_WIDE
+                if (ordinal < 0 || ordinal > 0x10ffff) {
+                    PyErr_SetString(PyExc_OverflowError,
+                                    "%c arg not in range(0x110000) "
+                                    "(wide Python build)");
+                    goto fail;
+                }
+#else
+                if (ordinal < 0 || ordinal > 0xffff) {
+                    PyErr_SetString(PyExc_OverflowError,
+                                    "%c arg not in range(0x10000) "
+                                    "(narrow Python build)");
+                    goto fail;
+                }
+#endif
                 /* fall through... */
+            }
             case '%':
                 n++;
                 break;
SOURCES/00213-pep466-pyunicode_fromformat-fix-formats.patch
New file
@@ -0,0 +1,176 @@
# HG changeset patch
# User Victor Stinner <victor.stinner@gmail.com>
# Date 1406673545 -7200
# Node ID 263701e0b77e3160bc6a835087f838bd6b24092a
# Parent  6c47c6d2033e20e9b35f1d22e0e797961d6e680f
Issue #22023: Fix %S, %R and %V formats of PyUnicode_FromFormat().
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -690,7 +690,12 @@ makefmt(char *fmt, int longflag, int siz
     *fmt = '\0';
 }
-#define appendstring(string) {for (copy = string;*copy;) *s++ = *copy++;}
+#define appendstring(string) \
+    do { \
+        for (copy = string;*copy; copy++) { \
+            *s++ = (unsigned char)*copy; \
+        } \
+    } while (0)
 PyObject *
 PyUnicode_FromFormatV(const char *format, va_list vargs)
@@ -845,7 +850,7 @@ PyUnicode_FromFormatV(const char *format
                 str = PyObject_Str(obj);
                 if (!str)
                     goto fail;
-                n += PyUnicode_GET_SIZE(str);
+                n += PyString_GET_SIZE(str);
                 /* Remember the str and switch to the next slot */
                 *callresult++ = str;
                 break;
@@ -1006,15 +1011,10 @@ PyUnicode_FromFormatV(const char *format
             case 'S':
             case 'R':
             {
-                Py_UNICODE *ucopy;
-                Py_ssize_t usize;
-                Py_ssize_t upos;
+                const char *str = PyString_AS_STRING(*callresult);
                 /* unused, since we already have the result */
                 (void) va_arg(vargs, PyObject *);
-                ucopy = PyUnicode_AS_UNICODE(*callresult);
-                usize = PyUnicode_GET_SIZE(*callresult);
-                for (upos = 0; upos<usize;)
-                    *s++ = ucopy[upos++];
+                appendstring(str);
                 /* We're done with the unicode()/repr() => forget it */
                 Py_DECREF(*callresult);
                 /* switch to next unicode()/repr() result */
diff -up Python-2.7.5/Lib/test/test_unicode.py.uni Python-2.7.5/Lib/test/test_unicode.py
--- Python-2.7.5/Lib/test/test_unicode.py.uni    2015-02-24 13:37:01.704739438 +0100
+++ Python-2.7.5/Lib/test/test_unicode.py    2015-02-24 13:38:38.439482167 +0100
@@ -1633,6 +1633,119 @@ class UnicodeTest(
         self.assertEqual("%s" % u, u'__unicode__ overridden')
         self.assertEqual("{}".format(u), '__unicode__ overridden')
+    # Test PyUnicode_FromFormat()
+    def test_from_format(self):
+        test_support.import_module('ctypes')
+        from ctypes import (
+            pythonapi, py_object, sizeof,
+            c_int, c_long, c_longlong, c_ssize_t,
+            c_uint, c_ulong, c_ulonglong, c_size_t, c_void_p)
+        if sys.maxunicode == 0xffff:
+            name = "PyUnicodeUCS2_FromFormat"
+        else:
+            name = "PyUnicodeUCS4_FromFormat"
+        _PyUnicode_FromFormat = getattr(pythonapi, name)
+        _PyUnicode_FromFormat.restype = py_object
+
+        def PyUnicode_FromFormat(format, *args):
+            cargs = tuple(
+                py_object(arg) if isinstance(arg, unicode) else arg
+                for arg in args)
+            return _PyUnicode_FromFormat(format, *cargs)
+
+        def check_format(expected, format, *args):
+            text = PyUnicode_FromFormat(format, *args)
+            self.assertEqual(expected, text)
+
+        # ascii format, non-ascii argument
+        check_format(u'ascii\x7f=unicode\xe9',
+                     b'ascii\x7f=%U', u'unicode\xe9')
+
+        # non-ascii format, ascii argument: ensure that PyUnicode_FromFormatV()
+        # raises an error
+        #self.assertRaisesRegex(ValueError,
+        #    '^PyUnicode_FromFormatV\(\) expects an ASCII-encoded format '
+        #    'string, got a non-ASCII byte: 0xe9$',
+        #    PyUnicode_FromFormat, b'unicode\xe9=%s', u'ascii')
+
+        # test "%c"
+        check_format(u'\uabcd',
+                     b'%c', c_int(0xabcd))
+        if sys.maxunicode > 0xffff:
+            check_format(u'\U0010ffff',
+                         b'%c', c_int(0x10ffff))
+        with self.assertRaises(OverflowError):
+            PyUnicode_FromFormat(b'%c', c_int(0x110000))
+        # Issue #18183
+        if sys.maxunicode > 0xffff:
+            check_format(u'\U00010000\U00100000',
+                         b'%c%c', c_int(0x10000), c_int(0x100000))
+
+        # test "%"
+        check_format(u'%',
+                     b'%')
+        check_format(u'%',
+                     b'%%')
+        check_format(u'%s',
+                     b'%%s')
+        check_format(u'[%]',
+                     b'[%%]')
+        check_format(u'%abc',
+                     b'%%%s', b'abc')
+
+        # test %S
+        check_format(u"repr=abc",
+                     b'repr=%S', u'abc')
+
+        # test %R
+        check_format(u"repr=u'abc'",
+                     b'repr=%R', u'abc')
+
+        # test integer formats (%i, %d, %u)
+        check_format(u'010',
+                     b'%03i', c_int(10))
+        check_format(u'0010',
+                     b'%0.4i', c_int(10))
+        check_format(u'-123',
+                     b'%i', c_int(-123))
+
+        check_format(u'-123',
+                     b'%d', c_int(-123))
+        check_format(u'-123',
+                     b'%ld', c_long(-123))
+        check_format(u'-123',
+                     b'%zd', c_ssize_t(-123))
+
+        check_format(u'123',
+                     b'%u', c_uint(123))
+        check_format(u'123',
+                     b'%lu', c_ulong(123))
+        check_format(u'123',
+                     b'%zu', c_size_t(123))
+
+        # test long output
+        PyUnicode_FromFormat(b'%p', c_void_p(-1))
+
+        # test %V
+        check_format(u'repr=abc',
+                     b'repr=%V', u'abc', b'xyz')
+        check_format(u'repr=\xe4\xba\xba\xe6\xb0\x91',
+                     b'repr=%V', None, b'\xe4\xba\xba\xe6\xb0\x91')
+        check_format(u'repr=abc\xff',
+                     b'repr=%V', None, b'abc\xff')
+
+        # not supported: copy the raw format string. these tests are just here
+        # to check for crashs and should not be considered as specifications
+        check_format(u'%s',
+                     b'%1%s', b'abc')
+        check_format(u'%1abc',
+                     b'%1abc')
+        check_format(u'%+i',
+                     b'%+i', c_int(10))
+        check_format(u'%s',
+                     b'%.%s', b'abc')
+
+
     def test_encode_decimal(self):
         from _testcapi import unicode_encodedecimal
         self.assertEqual(unicode_encodedecimal(u'123'),
SOURCES/00214-pep466-backport-py3-ssl-changes.patch
New file
Diff too large
SOURCES/00215-pep466-reflect-openssl-settings-ssltests.patch
New file
@@ -0,0 +1,26 @@
diff -up Python-2.7.5/Lib/test/test_ssl.py.ssl2 Python-2.7.5/Lib/test/test_ssl.py
--- Python-2.7.5/Lib/test/test_ssl.py.ssl2    2015-03-04 12:19:26.345387741 +0100
+++ Python-2.7.5/Lib/test/test_ssl.py    2015-03-04 12:32:43.485702679 +0100
@@ -689,7 +689,8 @@ class ContextTests(unittest.TestCase):
     @skip_if_broken_ubuntu_ssl
     def test_options(self):
         ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
-        # OP_ALL | OP_NO_SSLv2 is the default value
+        self.assertEqual(ssl.OP_ALL, ctx.options)
+        ctx.options |= ssl.OP_NO_SSLv2
         self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2,
                          ctx.options)
         ctx.options |= ssl.OP_NO_SSLv3
@@ -2142,9 +2143,9 @@ else:
                 # No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs
                 try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
                                    client_options=ssl.OP_NO_SSLv2)
-            try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
+            try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True,
                                client_options=ssl.OP_NO_SSLv3)
-            try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
+            try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, True,
                                client_options=ssl.OP_NO_TLSv1)
         @skip_if_broken_ubuntu_ssl
SOURCES/00216-pep466-fix-load-verify-locs-unicode.patch
New file
@@ -0,0 +1,72 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1409232801 14400
# Node ID 97081a80f487841d81aeed55d398a1dba1faca00
# Parent  3ae399c6ecf685086ebf07e17717955f21e14cb8
fix load_verify_locations on unicode paths (closes #22244)
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -850,11 +850,14 @@ class ContextTests(unittest.TestCase):
         ctx.load_verify_locations(cafile=CERTFILE, capath=None)
         ctx.load_verify_locations(BYTES_CERTFILE)
         ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None)
+        ctx.load_verify_locations(cafile=BYTES_CERTFILE.decode('utf-8'))
         self.assertRaises(TypeError, ctx.load_verify_locations)
         self.assertRaises(TypeError, ctx.load_verify_locations, None, None, None)
         with self.assertRaises(IOError) as cm:
             ctx.load_verify_locations(WRONGCERT)
         self.assertEqual(cm.exception.errno, errno.ENOENT)
+        with self.assertRaises(IOError):
+            ctx.load_verify_locations(u'')
         with self.assertRaisesRegexp(ssl.SSLError, "PEM lib"):
             ctx.load_verify_locations(BADCERT)
         ctx.load_verify_locations(CERTFILE, CAPATH)
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -2628,17 +2628,33 @@ load_verify_locations(PySSLContext *self
     }
     if (cafile) {
-        cafile_bytes = PyString_AsEncodedObject(
-            cafile, Py_FileSystemDefaultEncoding, "strict");
-        if (!cafile_bytes) {
-            goto error;
+        if (PyString_Check(cafile)) {
+            Py_INCREF(cafile);
+            cafile_bytes = cafile;
+        } else {
+            PyObject *u = PyUnicode_FromObject(cafile);
+            if (!u)
+                goto error;
+            cafile_bytes = PyUnicode_AsEncodedString(
+                u, Py_FileSystemDefaultEncoding, NULL);
+            Py_DECREF(u);
+            if (!cafile_bytes)
+                goto error;
         }
     }
     if (capath) {
-        capath_bytes = PyString_AsEncodedObject(
-            capath, Py_FileSystemDefaultEncoding, "strict");
-        if (!capath_bytes) {
-            goto error;
+        if (PyString_Check(capath)) {
+            Py_INCREF(capath);
+            capath_bytes = capath;
+        } else {
+            PyObject *u = PyUnicode_FromObject(capath);
+            if (!u)
+                goto error;
+            capath_bytes = PyUnicode_AsEncodedString(
+                u, Py_FileSystemDefaultEncoding, NULL);
+            Py_DECREF(u);
+            if (!capath_bytes)
+                goto error;
         }
     }
SOURCES/00217-pep466-backport-hashlib-algorithm-consts.patch
New file
@@ -0,0 +1,189 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1409233289 14400
# Node ID 3f73c44b1fd1d442d6841493328e9756fb5e7ef5
# Parent  97081a80f487841d81aeed55d398a1dba1faca00
PEP 466: backport hashlib algorithm constants (closes #21307)
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -88,6 +88,24 @@ This module provides the following const
    .. versionadded:: 2.7
+.. data:: algorithms_guaranteed
+
+   A set containing the names of the hash algorithms guaranteed to be supported
+   by this module on all platforms.
+
+   .. versionadded:: 2.7.9
+
+.. data:: algorithms_available
+
+   A set containing the names of the hash algorithms that are available in the
+   running Python interpreter.  These names will be recognized when passed to
+   :func:`new`.  :attr:`algorithms_guaranteed` will always be a subset.  The
+   same algorithm may appear multiple times in this set under different names
+   (thanks to OpenSSL).
+
+   .. versionadded:: 2.7.9
+
+
 The following values are provided as constant attributes of the hash objects
 returned by the constructors:
diff -up Python-2.7.5/Lib/hashlib.py.hash Python-2.7.5/Lib/hashlib.py
--- Python-2.7.5/Lib/hashlib.py.hash    2015-03-04 17:05:57.496598686 +0100
+++ Python-2.7.5/Lib/hashlib.py    2015-03-04 17:11:34.872739103 +0100
@@ -18,8 +18,9 @@ than using new():
 md5(), sha1(), sha224(), sha256(), sha384(), and sha512()
-More algorithms may be available on your platform but the above are
-guaranteed to exist.
+More algorithms may be available on your platform but the above are guaranteed
+to exist.  See the algorithms_guaranteed and algorithms_available attributes
+to find out what algorithm names can be passed to new().
 NOTE: If you want the adler32 or crc32 hash functions they are available in
 the zlib module.
@@ -75,9 +76,14 @@ More condensed:
 # always available algorithm is added.
 __always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+algorithms_guaranteed = set(__always_supported)
+algorithms_available = set(__always_supported)
+
 algorithms = __always_supported
-__all__ = __always_supported + ('new', 'algorithms', 'pbkdf2_hmac')
+__all__ = __always_supported + ('new', 'algorithms_guaranteed',
+                                'algorithms_available', 'algorithms',
+                                'pbkdf2_hmac')
 def __get_openssl_constructor(name):
@@ -110,6 +116,8 @@ try:
     import _hashlib
     new = __hash_new
     __get_hash = __get_openssl_constructor
+    algorithms_available = algorithms_available.union(
+        _hashlib.openssl_md_meth_names)
 except ImportError:
     # We don't build the legacy modules
     raise
diff -up Python-2.7.5/Modules/_hashopenssl.c.hash Python-2.7.5/Modules/_hashopenssl.c
--- Python-2.7.5/Modules/_hashopenssl.c.hash    2015-03-04 17:06:18.246791837 +0100
+++ Python-2.7.5/Modules/_hashopenssl.c    2015-03-04 17:16:17.696369000 +0100
@@ -784,6 +784,61 @@ pbkdf2_hmac(PyObject *self, PyObject *ar
 #endif
+/* State for our callback function so that it can accumulate a result. */
+typedef struct _internal_name_mapper_state {
+    PyObject *set;
+    int error;
+} _InternalNameMapperState;
+
+
+/* A callback function to pass to OpenSSL's OBJ_NAME_do_all(...) */
+static void
+_openssl_hash_name_mapper(const OBJ_NAME *openssl_obj_name, void *arg)
+{
+    _InternalNameMapperState *state = (_InternalNameMapperState *)arg;
+    PyObject *py_name;
+
+    assert(state != NULL);
+    if (openssl_obj_name == NULL)
+        return;
+    /* Ignore aliased names, they pollute the list and OpenSSL appears to
+     * have a its own definition of alias as the resulting list still
+     * contains duplicate and alternate names for several algorithms.     */
+    if (openssl_obj_name->alias)
+        return;
+
+    py_name = PyString_FromString(openssl_obj_name->name);
+    if (py_name == NULL) {
+        state->error = 1;
+    } else {
+        if (PySet_Add(state->set, py_name) != 0) {
+            state->error = 1;
+        }
+        Py_DECREF(py_name);
+    }
+}
+
+
+/* Ask OpenSSL for a list of supported ciphers, filling in a Python set. */
+static PyObject*
+generate_hash_name_list(void)
+{
+    _InternalNameMapperState state;
+    state.set = PyFrozenSet_New(NULL);
+    if (state.set == NULL)
+        return NULL;
+    state.error = 0;
+
+    OBJ_NAME_do_all(OBJ_NAME_TYPE_MD_METH, &_openssl_hash_name_mapper, &state);
+
+    if (state.error) {
+        Py_DECREF(state.set);
+        return NULL;
+    }
+    return state.set;
+}
+
+
 /*
  *  This macro and function generates a family of constructor function
  *  definitions for specific hash algorithms.  These constructors are much
@@ -924,11 +979,11 @@ static struct PyMethodDef EVP_functions[
 PyMODINIT_FUNC
 init_hashlib(void)
 {
-    PyObject *m;
+    PyObject *m, *openssl_md_meth_names;
     SSL_load_error_strings();
     SSL_library_init();
-    OpenSSL_add_all_digests();
+    ERR_load_crypto_strings();
     Py_TYPE(&EVPtype) = &PyType_Type;
     if (PyType_Ready(&EVPtype) < 0)
@@ -938,6 +993,14 @@ init_hashlib(void)
     if (m == NULL)
         return;
+    openssl_md_meth_names = generate_hash_name_list();
+    if (openssl_md_meth_names == NULL) {
+        return;
+    }
+    if (PyModule_AddObject(m, "openssl_md_meth_names", openssl_md_meth_names)) {
+        return;
+    }
+
 #if HASH_OBJ_CONSTRUCTOR
     Py_INCREF(&EVPtype);
     PyModule_AddObject(m, "HASH", (PyObject *)&EVPtype);
diff -up Python-2.7.5/Lib/test/test_hashlib.py.hash Python-2.7.5/Lib/test/test_hashlib.py
--- Python-2.7.5/Lib/test/test_hashlib.py.hash    2015-03-04 18:04:57.823553474 +0100
+++ Python-2.7.5/Lib/test/test_hashlib.py    2015-03-04 18:06:39.395499123 +0100
@@ -107,6 +107,15 @@ class HashLibTestCase(unittest.TestCase)
             tuple([_algo for _algo in self.supported_hash_names if
                                                 _algo.islower()]))
+    def test_algorithms_guaranteed(self):
+        self.assertEqual(hashlib.algorithms_guaranteed,
+            set(_algo for _algo in self.supported_hash_names
+                  if _algo.islower()))
+
+    def test_algorithms_available(self):
+        self.assertTrue(set(hashlib.algorithms_guaranteed).
+                            issubset(hashlib.algorithms_available))
+
     def test_unknown_hash(self):
         self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
         self.assertRaises(TypeError, hashlib.new, 1)
SOURCES/00218-pep466-backport-urandom-pers-fd.patch
New file
@@ -0,0 +1,165 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1409243400 14400
# Node ID 3e7f8855078855a9409bc2c1372de89cb021d6c8
# Parent  3f73c44b1fd1d442d6841493328e9756fb5e7ef5
PEP 466: backport persistent urandom fd (closes #21305)
Patch from Alex Gaynor.
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -536,6 +536,7 @@ Py_Finalize(void)
     PyInt_Fini();
     PyFloat_Fini();
     PyDict_Fini();
+    _PyRandom_Fini();
 #ifdef Py_USING_UNICODE
     /* Cleanup Unicode implementation */
diff -up Python-2.7.5/Include/pythonrun.h.urandom Python-2.7.5/Include/pythonrun.h
--- Python-2.7.5/Include/pythonrun.h.urandom    2015-03-06 08:16:47.638584015 +0100
+++ Python-2.7.5/Include/pythonrun.h    2015-03-06 08:21:48.009485462 +0100
@@ -145,6 +145,7 @@ PyAPI_FUNC(void) PyInt_Fini(void);
 PyAPI_FUNC(void) PyFloat_Fini(void);
 PyAPI_FUNC(void) PyOS_FiniInterrupts(void);
 PyAPI_FUNC(void) PyByteArray_Fini(void);
+PyAPI_FUNC(void) _PyRandom_Fini(void);
 /* Stuff with no proper home (yet) */
 PyAPI_FUNC(char *) PyOS_Readline(FILE *, FILE *, char *);
diff -up Python-2.7.5/Python/random.c.urandom Python-2.7.5/Python/random.c
--- Python-2.7.5/Python/random.c.urandom    2015-03-06 08:22:10.244699950 +0100
+++ Python-2.7.5/Python/random.c    2015-03-06 08:24:57.907317272 +0100
@@ -118,10 +118,16 @@ vms_urandom(unsigned char *buffer, Py_ss
 #if !defined(MS_WINDOWS) && !defined(__VMS)
+static struct {
+    int fd;
+    dev_t st_dev;
+    ino_t st_ino;
+} urandom_cache = { -1 };
+
 /* Read size bytes from /dev/urandom into buffer.
    Call Py_FatalError() on error. */
 static void
-dev_urandom_noraise(char *buffer, Py_ssize_t size)
+dev_urandom_noraise(unsigned char *buffer, Py_ssize_t size)
 {
     int fd;
     Py_ssize_t n;
@@ -156,18 +162,56 @@ dev_urandom_python(char *buffer, Py_ssiz
 {
     int fd;
     Py_ssize_t n;
+    struct stat st;
     if (size <= 0)
         return 0;
-    Py_BEGIN_ALLOW_THREADS
-    fd = open("/dev/urandom", O_RDONLY);
-    Py_END_ALLOW_THREADS
-    if (fd < 0)
-    {
-        PyErr_SetString(PyExc_NotImplementedError,
-                        "/dev/urandom (or equivalent) not found");
-        return -1;
+    if (urandom_cache.fd >= 0) {
+        /* Does the fd point to the same thing as before? (issue #21207) */
+        if (fstat(urandom_cache.fd, &st)
+            || st.st_dev != urandom_cache.st_dev
+            || st.st_ino != urandom_cache.st_ino) {
+            /* Something changed: forget the cached fd (but don't close it,
+               since it probably points to something important for some
+               third-party code). */
+            urandom_cache.fd = -1;
+        }
+    }
+    if (urandom_cache.fd >= 0)
+        fd = urandom_cache.fd;
+    else {
+        Py_BEGIN_ALLOW_THREADS
+        fd = open("/dev/urandom", O_RDONLY);
+        Py_END_ALLOW_THREADS
+        if (fd < 0)
+        {
+            if (errno == ENOENT || errno == ENXIO ||
+                errno == ENODEV || errno == EACCES)
+                PyErr_SetString(PyExc_NotImplementedError,
+                                "/dev/urandom (or equivalent) not found");
+            else
+                PyErr_SetFromErrno(PyExc_OSError);
+            return -1;
+        }
+        if (urandom_cache.fd >= 0) {
+            /* urandom_fd was initialized by another thread while we were
+               not holding the GIL, keep it. */
+            close(fd);
+            fd = urandom_cache.fd;
+        }
+        else {
+            if (fstat(fd, &st)) {
+                PyErr_SetFromErrno(PyExc_OSError);
+                close(fd);
+                return -1;
+            }
+            else {
+                urandom_cache.fd = fd;
+                urandom_cache.st_dev = st.st_dev;
+                urandom_cache.st_ino = st.st_ino;
+            }
+        }
     }
     Py_BEGIN_ALLOW_THREADS
@@ -191,12 +235,21 @@ dev_urandom_python(char *buffer, Py_ssiz
             PyErr_Format(PyExc_RuntimeError,
                          "Failed to read %zi bytes from /dev/urandom",
                          size);
-        close(fd);
         return -1;
     }
-    close(fd);
     return 0;
 }
+
+static void
+dev_urandom_close(void)
+{
+    if (urandom_cache.fd >= 0) {
+        close(urandom_cache.fd);
+        urandom_cache.fd = -1;
+    }
+}
+
+
 #endif /* !defined(MS_WINDOWS) && !defined(__VMS) */
 /* Fill buffer with pseudo-random bytes generated by a linear congruent
@@ -300,8 +353,21 @@ _PyRandom_Init(void)
 # ifdef __VMS
         vms_urandom((unsigned char *)secret, secret_size, 0);
 # else
-        dev_urandom_noraise((char*)secret, secret_size);
+        dev_urandom_noraise((unsigned char*)secret, secret_size);
 # endif
 #endif
     }
 }
+
+void
+_PyRandom_Fini(void)
+{
+#ifdef MS_WINDOWS
+    if (hCryptProv) {
+        CryptReleaseContext(hCryptProv, 0);
+        hCryptProv = 0;
+    }
+#else
+    dev_urandom_close();
+#endif
+}
SOURCES/00219-pep466-fix-referenced-sslwrap.patch
New file
@@ -0,0 +1,91 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1412221981 14400
# Node ID 1a36d4e8cf4edfdc4c7d59a40075b8cf00e3ad3c
# Parent  222e0faa5fa9567f657f13fc78a60069142e09ae
fix sslwrap_simple (closes #22523)
Thanks Alex Gaynor.
diff --git a/Lib/ssl.py b/Lib/ssl.py
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -969,16 +969,16 @@ def get_protocol_name(protocol_code):
 # a replacement for the old socket.ssl function
 def sslwrap_simple(sock, keyfile=None, certfile=None):
-
     """A replacement for the old socket.ssl function.  Designed
     for compability with Python 2.5 and earlier.  Will disappear in
     Python 3.0."""
-
     if hasattr(sock, "_sock"):
         sock = sock._sock
-    ssl_sock = _ssl.sslwrap(sock, 0, keyfile, certfile, CERT_NONE,
-                            PROTOCOL_SSLv23, None)
+    ctx = SSLContext(PROTOCOL_SSLv23)
+    if keyfile or certfile:
+        ctx.load_cert_chain(certfile, keyfile)
+    ssl_sock = ctx._wrap_socket(sock, server_side=False)
     try:
         sock.getpeername()
     except socket_error:
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -94,6 +94,8 @@ class BasicTests(unittest.TestCase):
                 pass
             else:
                 raise
+
+
 def can_clear_options():
     # 0.9.8m or higher
     return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15)
@@ -2944,7 +2946,7 @@ def test_main(verbose=False):
         if not os.path.exists(filename):
             raise support.TestFailed("Can't read certificate file %r" % filename)
-    tests = [ContextTests, BasicSocketTests, SSLErrorTests]
+    tests = [ContextTests, BasicTests, BasicSocketTests, SSLErrorTests]
     if support.is_resource_enabled('network'):
         tests.append(NetworkedTests)
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -517,10 +517,12 @@ newPySSLSocket(PySSLContext *sslctx, PyS
     self->socket_type = socket_type;
     self->Socket = sock;
     Py_INCREF(self->Socket);
-    self->ssl_sock = PyWeakref_NewRef(ssl_sock, NULL);
-    if (self->ssl_sock == NULL) {
-        Py_DECREF(self);
-        return NULL;
+    if (ssl_sock != Py_None) {
+        self->ssl_sock = PyWeakref_NewRef(ssl_sock, NULL);
+        if (self->ssl_sock == NULL) {
+            Py_DECREF(self);
+            return NULL;
+        }
     }
     return self;
 }
@@ -2931,8 +2933,12 @@ static int
     ssl = SSL_get_app_data(s);
     assert(PySSLSocket_Check(ssl));
-    ssl_socket = PyWeakref_GetObject(ssl->ssl_sock);
-    Py_INCREF(ssl_socket);
+    if (ssl->ssl_sock == NULL) {
+        ssl_socket = Py_None;
+    } else {
+        ssl_socket = PyWeakref_GetObject(ssl->ssl_sock);
+        Py_INCREF(ssl_socket);
+    }
     if (ssl_socket == Py_None) {
         goto error;
     }
SOURCES/00220-pep466-allow-passing-ssl-urrlib-httplib.patch
New file
@@ -0,0 +1,673 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1416764565 21600
# Node ID 1882157b298a164291d2b3a8b9525eb0902895f6
# Parent  588ebc8fd3daf7307961cd614c4da9525bb67313
allow passing cert/ssl information to urllib2.urlopen and httplib.HTTPSConnection
This is basically a backport of issues #9003 and #22366.
diff --git a/Doc/library/httplib.rst b/Doc/library/httplib.rst
--- a/Doc/library/httplib.rst
+++ b/Doc/library/httplib.rst
@@ -70,12 +70,25 @@ The module provides the following classe
       *source_address* was added.
-.. class:: HTTPSConnection(host[, port[, key_file[, cert_file[, strict[, timeout[, source_address]]]]]])
+.. class:: HTTPSConnection(host[, port[, key_file[, cert_file[, strict[, timeout[, source_address, context, check_hostname]]]]]])
    A subclass of :class:`HTTPConnection` that uses SSL for communication with
-   secure servers.  Default port is ``443``. *key_file* is the name of a PEM
-   formatted file that contains your private key. *cert_file* is a PEM formatted
-   certificate chain file.
+   secure servers.  Default port is ``443``.  If *context* is specified, it must
+   be a :class:`ssl.SSLContext` instance describing the various SSL options.
+
+   *key_file* and *cert_file* are deprecated, please use
+   :meth:`ssl.SSLContext.load_cert_chain` instead, or let
+   :func:`ssl.create_default_context` select the system's trusted CA
+   certificates for you.
+
+   Please read :ref:`ssl-security` for more information on best practices.
+
+   .. note::
+      If *context* is specified and has a :attr:`~ssl.SSLContext.verify_mode`
+      of either :data:`~ssl.CERT_OPTIONAL` or :data:`~ssl.CERT_REQUIRED`, then
+      by default *host* is matched against the host name(s) allowed by the
+      server's certificate.  If you want to change that behaviour, you can
+      explicitly set *check_hostname* to False.
    .. warning::
       This does not do any verification of the server's certificate.
@@ -88,6 +101,9 @@ The module provides the following classe
    .. versionchanged:: 2.7
       *source_address* was added.
+   .. versionchanged:: 2.7.9
+      *context* and *check_hostname* was added.
+
 .. class:: HTTPResponse(sock, debuglevel=0, strict=0)
diff --git a/Lib/test/keycert2.pem b/Lib/test/keycert2.pem
new file mode 100644
--- /dev/null
+++ b/Lib/test/keycert2.pem
@@ -0,0 +1,31 @@
+-----BEGIN PRIVATE KEY-----
+MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBANcLaMB7T/Wi9DBc
+PltGzgt8cxsv55m7PQPHMZvn6Ke8xmNqcmEzib8opRwKGrCV6TltKeFlNSg8dwQK
+Tl4ktyTkGCVweRQJ37AkBayvEBml5s+QD4vlhqkJPsL/Nsd+fnqngOGc5+59+C6r
+s3XpiLlF5ah/z8q92Mnw54nypw1JAgMBAAECgYBE3t2Mj7GbDLZB6rj5yKJioVfI
+BD6bSJEQ7bGgqdQkLFwpKMU7BiN+ekjuwvmrRkesYZ7BFgXBPiQrwhU5J28Tpj5B
+EOMYSIOHfzdalhxDGM1q2oK9LDFiCotTaSdEzMYadel5rmKXJ0zcK2Jho0PCuECf
+tf/ghRxK+h1Hm0tKgQJBAO6MdGDSmGKYX6/5kPDje7we/lSLorSDkYmV0tmVShsc
+JxgaGaapazceA/sHL3Myx7Eenkip+yPYDXEDFvAKNDECQQDmxsT9NOp6mo7ISvky
+GFr2vVHsJ745BMWoma4rFjPBVnS8RkgK+b2EpDCdZSrQ9zw2r8sKTgrEyrDiGTEg
+wJyZAkA8OOc0flYMJg2aHnYR6kwVjPmGHI5h5gk648EMPx0rROs1sXkiUwkHLCOz
+HvhCq+Iv+9vX2lnVjbiu/CmxRdIxAkA1YEfzoKeTD+hyXxTgB04Sv5sRGegfXAEz
+i8gC4zG5R/vcCA1lrHmvEiLEZL/QcT6WD3bQvVg0SAU9ZkI8pxARAkA7yqMSvP1l
+gJXy44R+rzpLYb1/PtiLkIkaKG3x9TUfPnfD2jY09fPkZlfsRU3/uS09IkhSwimV
+d5rWoljEfdou
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIICXTCCAcagAwIBAgIJALVQzebTtrXFMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
+BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u
+IFNvZnR3YXJlIEZvdW5kYXRpb24xFTATBgNVBAMMDGZha2Vob3N0bmFtZTAeFw0x
+NDExMjMxNzAwMDdaFw0yNDExMjAxNzAwMDdaMGIxCzAJBgNVBAYTAlhZMRcwFQYD
+VQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9uIFNvZnR3YXJlIEZv
+dW5kYXRpb24xFTATBgNVBAMMDGZha2Vob3N0bmFtZTCBnzANBgkqhkiG9w0BAQEF
+AAOBjQAwgYkCgYEA1wtowHtP9aL0MFw+W0bOC3xzGy/nmbs9A8cxm+fop7zGY2py
+YTOJvyilHAoasJXpOW0p4WU1KDx3BApOXiS3JOQYJXB5FAnfsCQFrK8QGaXmz5AP
+i+WGqQk+wv82x35+eqeA4Zzn7n34LquzdemIuUXlqH/Pyr3YyfDnifKnDUkCAwEA
+AaMbMBkwFwYDVR0RBBAwDoIMZmFrZWhvc3RuYW1lMA0GCSqGSIb3DQEBBQUAA4GB
+AKuay3vDKfWzt5+ch/HHBsert84ISot4fUjzXDA/oOgTOEjVcSShHxqNShMOW1oA
+QYBpBB/5Kx5RkD/w6imhucxt2WQPRgjX4x4bwMipVH/HvFDp03mG51/Cpi1TyZ74
+El7qa/Pd4lHhOLzMKBA6503fpeYSFUIBxZbGLqylqRK7
+-----END CERTIFICATE-----
diff --git a/Lib/test/selfsigned_pythontestdotnet.pem b/Lib/test/selfsigned_pythontestdotnet.pem
new file mode 100644
--- /dev/null
+++ b/Lib/test/selfsigned_pythontestdotnet.pem
@@ -0,0 +1,16 @@
+-----BEGIN CERTIFICATE-----
+MIIChzCCAfCgAwIBAgIJAKGU95wKR8pSMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV
+BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u
+IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv
+bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG
+A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo
+b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0
+aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ
+Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm
+Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv
+EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjKTAnMCUGA1UdEQQeMByCGnNl
+bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MA0GCSqGSIb3DQEBBQUAA4GBAIOXmdtM
+eG9qzP9TiXW/Gc/zI4cBfdCpC+Y4gOfC9bQUC7hefix4iO3+iZjgy3X/FaRxUUoV
+HKiXcXIaWqTSUWp45cSh0MbwZXudp6JIAptzdAhvvCrPKeC9i9GvxsPD4LtDAL97
+vSaxQBezA7hdxZd90/EeyMgVZgAnTCnvAWX9
+-----END CERTIFICATE-----
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -8,6 +8,11 @@ import StringIO
 import urllib2
 from urllib2 import Request, OpenerDirector
+try:
+    import ssl
+except ImportError:
+    ssl = None
+
 # XXX
 # Request
 # CacheFTPHandler (hard to write)
@@ -47,6 +52,14 @@ class TrivialTests(unittest.TestCase):
         for string, list in tests:
             self.assertEqual(urllib2.parse_http_list(string), list)
+    @unittest.skipUnless(ssl, "ssl module required")
+    def test_cafile_and_context(self):
+        context = ssl.create_default_context()
+        with self.assertRaises(ValueError):
+            urllib2.urlopen(
+                "https://localhost", cafile="/nonexistent/path", context=context
+            )
+
 def test_request_headers_dict():
     """
diff --git a/Lib/urllib2.py b/Lib/urllib2.py
--- a/Lib/urllib2.py
+++ b/Lib/urllib2.py
@@ -109,6 +109,14 @@ try:
 except ImportError:
     from StringIO import StringIO
+# check for SSL
+try:
+    import ssl
+except ImportError:
+    _have_ssl = False
+else:
+    _have_ssl = True
+
 from urllib import (unwrap, unquote, splittype, splithost, quote,
      addinfourl, splitport, splittag, toBytes,
      splitattr, ftpwrapper, splituser, splitpasswd, splitvalue)
@@ -120,11 +128,30 @@ from urllib import localhost, url2pathna
 __version__ = sys.version[:3]
 _opener = None
-def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
+def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+            cafile=None, capath=None, cadefault=False, context=None):
     global _opener
-    if _opener is None:
-        _opener = build_opener()
-    return _opener.open(url, data, timeout)
+    if cafile or capath or cadefault:
+        if context is not None:
+            raise ValueError(
+                "You can't pass both context and any of cafile, capath, and "
+                "cadefault"
+            )
+        if not _have_ssl:
+            raise ValueError('SSL support not available')
+        context = ssl._create_stdlib_context(cert_reqs=ssl.CERT_REQUIRED,
+                                             cafile=cafile,
+                                             capath=capath)
+        https_handler = HTTPSHandler(context=context, check_hostname=True)
+        opener = build_opener(https_handler)
+    elif context:
+        https_handler = HTTPSHandler(context=context)
+        opener = build_opener(https_handler)
+    elif _opener is None:
+        _opener = opener = build_opener()
+    else:
+        opener = _opener
+    return opener.open(url, data, timeout)
 def install_opener(opener):
     global _opener
@@ -1121,7 +1148,7 @@ class AbstractHTTPHandler(BaseHandler):
         return request
-    def do_open(self, http_class, req):
+    def do_open(self, http_class, req, **http_conn_args):
         """Return an addinfourl object for the request, using http_class.
         http_class must implement the HTTPConnection API from httplib.
@@ -1135,7 +1162,8 @@ class AbstractHTTPHandler(BaseHandler):
         if not host:
             raise URLError('no host given')
-        h = http_class(host, timeout=req.timeout) # will parse host:port
+        # will parse host:port
+        h = http_class(host, timeout=req.timeout, **http_conn_args)
         h.set_debuglevel(self._debuglevel)
         headers = dict(req.unredirected_hdrs)
@@ -1203,8 +1231,14 @@ class HTTPHandler(AbstractHTTPHandler):
 if hasattr(httplib, 'HTTPS'):
     class HTTPSHandler(AbstractHTTPHandler):
+        def __init__(self, debuglevel=0, context=None, check_hostname=None):
+            AbstractHTTPHandler.__init__(self, debuglevel)
+            self._context = context
+            self._check_hostname = check_hostname
+
         def https_open(self, req):
-            return self.do_open(httplib.HTTPSConnection, req)
+            return self.do_open(httplib.HTTPSConnection, req,
+                context=self._context, check_hostname=self._check_hostname)
         https_request = AbstractHTTPHandler.do_request_
diff -up Python-2.7.5/Lib/test/test_urllib2_localnet.py.ctx Python-2.7.5/Lib/test/test_urllib2_localnet.py
--- Python-2.7.5/Lib/test/test_urllib2_localnet.py.ctx    2015-03-30 10:13:48.351310552 +0200
+++ Python-2.7.5/Lib/test/test_urllib2_localnet.py    2015-03-30 10:14:54.715713679 +0200
@@ -1,5 +1,6 @@
 #!/usr/bin/env python
+import os
 import urlparse
 import urllib2
 import BaseHTTPServer
@@ -11,6 +12,17 @@ from test import test_support
 mimetools = test_support.import_module('mimetools', deprecated=True)
 threading = test_support.import_module('threading')
+try:
+    import ssl
+except ImportError:
+    ssl = None
+
+here = os.path.dirname(__file__)
+# Self-signed cert file for 'localhost'
+CERT_localhost = os.path.join(here, 'keycert.pem')
+# Self-signed cert file for 'fakehostname'
+CERT_fakehostname = os.path.join(here, 'keycert2.pem')
+
 # Loopback http server infrastructure
 class LoopbackHttpServer(BaseHTTPServer.HTTPServer):
@@ -25,7 +37,7 @@ class LoopbackHttpServer(BaseHTTPServer.
         # Set the timeout of our listening socket really low so
         # that we can stop the server easily.
-        self.socket.settimeout(1.0)
+        self.socket.settimeout(0.1)
     def get_request(self):
         """BaseHTTPServer method, overridden."""
@@ -354,6 +366,19 @@ class TestUrlopen(BaseTestCase):
         urllib2.install_opener(opener)
         super(TestUrlopen, self).setUp()
+    def urlopen(self, url, data=None, **kwargs):
+        l = []
+        f = urllib2.urlopen(url, data, **kwargs)
+        try:
+            # Exercise various methods
+            l.extend(f.readlines(200))
+            l.append(f.readline())
+            l.append(f.read(1024))
+            l.append(f.read())
+        finally:
+            f.close()
+        return b"".join(l)
+
     def start_server(self, responses):
         handler = GetRequestHandler(responses)
@@ -364,6 +389,16 @@ class TestUrlopen(BaseTestCase):
         handler.port = port
         return handler
+    def start_https_server(self, responses=None, **kwargs):
+        if not hasattr(urllib2, 'HTTPSHandler'):
+            self.skipTest('ssl support required')
+        from test.ssl_servers import make_https_server
+        if responses is None:
+            responses = [(200, [], b"we care a bit")]
+        handler = GetRequestHandler(responses)
+        server = make_https_server(self, handler_class=handler, **kwargs)
+        handler.port = server.port
+        return handler
     def test_redirection(self):
         expected_response = 'We got here...'
@@ -434,6 +469,28 @@ class TestUrlopen(BaseTestCase):
         finally:
             self.server.stop()
+    def test_https(self):
+        handler = self.start_https_server()
+        context = ssl.create_default_context(cafile=CERT_localhost)
+        data = self.urlopen("https://localhost:%s/bizarre" % handler.port, context=context)
+        self.assertEqual(data, b"we care a bit")
+
+    def test_https_with_cafile(self):
+        handler = self.start_https_server(certfile=CERT_localhost)
+        import ssl
+        # Good cert
+        data = self.urlopen("https://localhost:%s/bizarre" % handler.port,
+                            cafile=CERT_localhost)
+        self.assertEqual(data, b"we care a bit")
+        # Bad cert
+        with self.assertRaises(urllib2.URLError) as cm:
+            self.urlopen("https://localhost:%s/bizarre" % handler.port,
+                         cafile=CERT_fakehostname)
+        # Good cert, but mismatching hostname
+        handler = self.start_https_server(certfile=CERT_fakehostname)
+        with self.assertRaises(ssl.CertificateError) as cm:
+            self.urlopen("https://localhost:%s/bizarre" % handler.port,
+                         cafile=CERT_fakehostname)
     def test_sending_headers(self):
         handler = self.start_server([(200, [], "we don't care")])
diff -up Python-2.7.5/Doc/library/urllib2.rst.ctx Python-2.7.5/Doc/library/urllib2.rst
--- Python-2.7.5/Doc/library/urllib2.rst.ctx    2015-03-30 10:20:15.958747076 +0200
+++ Python-2.7.5/Doc/library/urllib2.rst    2015-03-30 10:30:46.172779366 +0200
@@ -22,13 +22,10 @@ redirections, cookies and more.
 The :mod:`urllib2` module defines the following functions:
-.. function:: urlopen(url[, data][, timeout])
+.. function:: urlopen(url[, data[, timeout[, cafile[, capath[, cadefault[, context]]]]])
    Open the URL *url*, which can be either a string or a :class:`Request` object.
-   .. warning::
-      HTTPS requests do not do any verification of the server's certificate.
-
    *data* may be a string specifying additional data to send to the server, or
    ``None`` if no such data is needed.  Currently HTTP requests are the only ones
    that use *data*; the HTTP request will be a POST instead of a GET when the
@@ -41,7 +38,19 @@ The :mod:`urllib2` module defines the fo
    The optional *timeout* parameter specifies a timeout in seconds for blocking
    operations like the connection attempt (if not specified, the global default
    timeout setting will be used).  This actually only works for HTTP, HTTPS and
-   FTP connections.
+         FTP connections.
+
+   If *context* is specified, it must be a :class:`ssl.SSLContext` instance
+   describing the various SSL options. See :class:`~httplib.HTTPSConnection` for
+   more details.
+
+   The optional *cafile* and *capath* parameters specify a set of trusted CA
+   certificates for HTTPS requests.  *cafile* should point to a single file
+   containing a bundle of CA certificates, whereas *capath* should point to a
+   directory of hashed certificate files.  More information can be found in
+   :meth:`ssl.SSLContext.load_verify_locations`.
+
+   The *cadefault* parameter is ignored.
    This function returns a file-like object with two additional methods:
@@ -66,7 +75,10 @@ The :mod:`urllib2` module defines the fo
    handled through the proxy.
    .. versionchanged:: 2.6
-      *timeout* was added.
+     *timeout* was added.
+
+   .. versionchanged:: 2.7.9
+      *cafile*, *capath*, *cadefault*, and *context* were added.
 .. function:: install_opener(opener)
@@ -280,9 +292,13 @@ The following classes are provided:
    A class to handle opening of HTTP URLs.
-.. class:: HTTPSHandler()
+.. class:: HTTPSHandler([debuglevel[, context[, check_hostname]]])
+
+   A class to handle opening of HTTPS URLs. *context* and *check_hostname* have
+   the same meaning as for :class:`httplib.HTTPSConnection`.
-   A class to handle opening of HTTPS URLs.
+   .. versionchanged:: 2.7.9
+      *context* and *check_hostname* were added.
 .. class:: FileHandler()
diff -up Python-2.7.5/Lib/httplib.py.ctx Python-2.7.5/Lib/httplib.py
--- Python-2.7.5/Lib/httplib.py.ctx    2015-03-30 10:19:52.551521393 +0200
+++ Python-2.7.5/Lib/httplib.py    2015-03-30 10:30:05.045386751 +0200
@@ -1159,21 +1159,44 @@ else:
         def __init__(self, host, port=None, key_file=None, cert_file=None,
                      strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
-                     source_address=None):
+                     source_address=None, context=None, check_hostname=None):
             HTTPConnection.__init__(self, host, port, strict, timeout,
                                     source_address)
             self.key_file = key_file
             self.cert_file = cert_file
+            if context is None:
+                context = ssl.create_default_context()
+            will_verify = context.verify_mode != ssl.CERT_NONE
+            if check_hostname is None:
+                check_hostname = will_verify
+            elif check_hostname and not will_verify:
+                raise ValueError("check_hostname needs a SSL context with "
+                                 "either CERT_OPTIONAL or CERT_REQUIRED")
+            if key_file or cert_file:
+                context.load_cert_chain(cert_file, key_file)
+            self._context = context
+            self._check_hostname = check_hostname
         def connect(self):
             "Connect to a host on a given (SSL) port."
-            sock = socket.create_connection((self.host, self.port),
-                                            self.timeout, self.source_address)
+            HTTPConnection.connect(self)
+
             if self._tunnel_host:
-                self.sock = sock
-                self._tunnel()
-            self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file)
+                server_hostname = self._tunnel_host
+            else:
+                server_hostname = self.host
+            sni_hostname = server_hostname if ssl.HAS_SNI else None
+
+            self.sock = self._context.wrap_socket(self.sock,
+                                                  server_hostname=sni_hostname)
+            if not self._context.check_hostname and self._check_hostname:
+                try:
+                    ssl.match_hostname(self.sock.getpeercert(), server_hostname)
+                except Exception:
+                    self.sock.shutdown(socket.SHUT_RDWR)
+                    self.sock.close()
+                    raise
     __all__.append("HTTPSConnection")
diff -up Python-2.7.5/Lib/test/test_httplib.py.ctx Python-2.7.5/Lib/test/test_httplib.py
--- Python-2.7.5/Lib/test/test_httplib.py.ctx    2015-03-30 10:19:12.905139139 +0200
+++ Python-2.7.5/Lib/test/test_httplib.py    2015-03-30 10:27:41.822017804 +0200
@@ -1,6 +1,7 @@
 import httplib
 import array
 import httplib
+import os
 import StringIO
 import socket
 import errno
@@ -10,6 +11,14 @@ TestCase = unittest.TestCase
 from test import test_support
+here = os.path.dirname(__file__)
+# Self-signed cert file for 'localhost'
+CERT_localhost = os.path.join(here, 'keycert.pem')
+# Self-signed cert file for 'fakehostname'
+CERT_fakehostname = os.path.join(here, 'keycert2.pem')
+# Self-signed cert file for self-signed.pythontest.net
+CERT_selfsigned_pythontestdotnet = os.path.join(here, 'selfsigned_pythontestdotnet.pem')
+
 HOST = test_support.HOST
 class FakeSocket:
@@ -493,40 +502,147 @@ class TimeoutTest(TestCase):
         httpConn.close()
-class HTTPSTimeoutTest(TestCase):
+class HTTPSTest(TestCase):
 # XXX Here should be tests for HTTPS, there isn't any right now!
+    def setUp(self):
+        if not hasattr(httplib, 'HTTPSConnection'):
+            self.skipTest('ssl support required')
+
+    def make_server(self, certfile):
+        from test.ssl_servers import make_https_server
+        return make_https_server(self, certfile=certfile)
     def test_attributes(self):
-        # simple test to check it's storing it
-        if hasattr(httplib, 'HTTPSConnection'):
-            h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
-            self.assertEqual(h.timeout, 30)
+        # simple test to check it's storing the timeout
+        h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
+        self.assertEqual(h.timeout, 30)
+
+    def test_networked(self):
+        # Default settings: requires a valid cert from a trusted CA
+        import ssl
+        test_support.requires('network')
+        with test_support.transient_internet('self-signed.pythontest.net'):
+            h = httplib.HTTPSConnection('self-signed.pythontest.net', 443)
+            with self.assertRaises(ssl.SSLError) as exc_info:
+                h.request('GET', '/')
+            self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
+
+    def test_networked_noverification(self):
+        # Switch off cert verification
+        import ssl
+        test_support.requires('network')
+        with test_support.transient_internet('self-signed.pythontest.net'):
+            context = ssl._create_stdlib_context()
+            h = httplib.HTTPSConnection('self-signed.pythontest.net', 443,
+                                        context=context)
+            h.request('GET', '/')
+            resp = h.getresponse()
+            self.assertIn('nginx', resp.getheader('server'))
+
+    def test_networked_trusted_by_default_cert(self):
+        # Default settings: requires a valid cert from a trusted CA
+        test_support.requires('network')
+        with test_support.transient_internet('www.python.org'):
+            h = httplib.HTTPSConnection('www.python.org', 443)
+            h.request('GET', '/')
+            resp = h.getresponse()
+            content_type = resp.getheader('content-type')
+            self.assertIn('text/html', content_type)
+
+    def test_networked_good_cert(self):
+        # We feed the server's cert as a validating cert
+        import ssl
+        test_support.requires('network')
+        with test_support.transient_internet('self-signed.pythontest.net'):
+            context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+            context.verify_mode = ssl.CERT_REQUIRED
+            context.load_verify_locations(CERT_selfsigned_pythontestdotnet)
+            h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
+            h.request('GET', '/')
+            resp = h.getresponse()
+            server_string = resp.getheader('server')
+            self.assertIn('nginx', server_string)
+
+    def test_networked_bad_cert(self):
+        # We feed a "CA" cert that is unrelated to the server's cert
+        import ssl
+        test_support.requires('network')
+        with test_support.transient_internet('self-signed.pythontest.net'):
+            context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+            context.verify_mode = ssl.CERT_REQUIRED
+            context.load_verify_locations(CERT_localhost)
+            h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
+            with self.assertRaises(ssl.SSLError) as exc_info:
+                h.request('GET', '/')
+            self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
+
+    def test_local_unknown_cert(self):
+        # The custom cert isn't known to the default trust bundle
+        import ssl
+        server = self.make_server(CERT_localhost)
+        h = httplib.HTTPSConnection('localhost', server.port)
+        with self.assertRaises(ssl.SSLError) as exc_info:
+            h.request('GET', '/')
+        self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
+
+    def test_local_good_hostname(self):
+        # The (valid) cert validates the HTTP hostname
+        import ssl
+        server = self.make_server(CERT_localhost)
+        context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+        context.verify_mode = ssl.CERT_REQUIRED
+        context.load_verify_locations(CERT_localhost)
+        h = httplib.HTTPSConnection('localhost', server.port, context=context)
+        h.request('GET', '/nonexistent')
+        resp = h.getresponse()
+        self.assertEqual(resp.status, 404)
+
+    def test_local_bad_hostname(self):
+        # The (valid) cert doesn't validate the HTTP hostname
+        import ssl
+        server = self.make_server(CERT_fakehostname)
+        context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+        context.verify_mode = ssl.CERT_REQUIRED
+        context.load_verify_locations(CERT_fakehostname)
+        h = httplib.HTTPSConnection('localhost', server.port, context=context)
+        with self.assertRaises(ssl.CertificateError):
+            h.request('GET', '/')
+        # Same with explicit check_hostname=True
+        h = httplib.HTTPSConnection('localhost', server.port, context=context,
+                                   check_hostname=True)
+        with self.assertRaises(ssl.CertificateError):
+            h.request('GET', '/')
+        # With check_hostname=False, the mismatching is ignored
+        h = httplib.HTTPSConnection('localhost', server.port, context=context,
+                                   check_hostname=False)
+        h.request('GET', '/nonexistent')
+        resp = h.getresponse()
+        self.assertEqual(resp.status, 404)
-    @unittest.skipIf(not hasattr(httplib, 'HTTPS'), 'httplib.HTTPS not available')
     def test_host_port(self):
         # Check invalid host_port
-        # Note that httplib does not accept user:password@ in the host-port.
         for hp in ("www.python.org:abc", "user:password@www.python.org"):
-            self.assertRaises(httplib.InvalidURL, httplib.HTTP, hp)
+            self.assertRaises(httplib.InvalidURL, httplib.HTTPSConnection, hp)
-        for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b",
-                          8000),
-                         ("pypi.python.org:443", "pypi.python.org", 443),
-                         ("pypi.python.org", "pypi.python.org", 443),
-                         ("pypi.python.org:", "pypi.python.org", 443),
-                         ("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443)):
-            http = httplib.HTTPS(hp)
-            c = http._conn
-            if h != c.host:
-                self.fail("Host incorrectly parsed: %s != %s" % (h, c.host))
-            if p != c.port:
-                self.fail("Port incorrectly parsed: %s != %s" % (p, c.host))
+        for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000",
+                          "fe80::207:e9ff:fe9b", 8000),
+                         ("www.python.org:443", "www.python.org", 443),
+                         ("www.python.org:", "www.python.org", 443),
+                         ("www.python.org", "www.python.org", 443),
+                         ("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443),
+                         ("[fe80::207:e9ff:fe9b]:", "fe80::207:e9ff:fe9b",
+                             443)):
+            c = httplib.HTTPSConnection(hp)
+            self.assertEqual(h, c.host)
+            self.assertEqual(p, c.port)
+
+@test_support.reap_threads
 def test_main(verbose=None):
     test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
-                              HTTPSTimeoutTest, SourceAddressTest)
+                              HTTPSTest, SourceAddressTest)
 if __name__ == '__main__':
     test_main()
diff -up Python-2.7.5/Lib/test/test_ssl.py.ctx Python-2.7.5/Lib/test/test_ssl.py
--- Python-2.7.5/Lib/test/test_ssl.py.ctx    2015-03-30 10:18:55.677973042 +0200
+++ Python-2.7.5/Lib/test/test_ssl.py    2015-03-30 10:22:02.323772604 +0200
@@ -14,7 +14,7 @@ import os
 import errno
 import pprint
 import tempfile
-import urllib
+import urllib2
 import traceback
 import weakref
 import platform
@@ -2332,9 +2332,10 @@ else:
                 d1 = f.read()
             d2 = ''
             # now fetch the same data from the HTTPS server
-            url = 'https://%s:%d/%s' % (
-                HOST, server.port, os.path.split(CERTFILE)[1])
-            f = urllib.urlopen(url)
+            url = 'https://localhost:%d/%s' % (
+                server.port, os.path.split(CERTFILE)[1])
+            context = ssl.create_default_context(cafile=CERTFILE)
+            f = urllib2.urlopen(url, context=context)
             try:
                 dlen = f.info().getheader("content-length")
                 if dlen and (int(dlen) > 0):
SOURCES/00221-pep466-backport-sslwrap-c-ssl.patch
New file
@@ -0,0 +1,384 @@
diff -up Python-2.7.5/Modules/_ssl.c.wrap Python-2.7.5/Modules/_ssl.c
--- Python-2.7.5/Modules/_ssl.c.wrap    2015-06-25 09:09:59.646325880 +0200
+++ Python-2.7.5/Modules/_ssl.c    2015-06-25 10:33:20.349273370 +0200
@@ -224,6 +224,19 @@ typedef struct {
     enum py_ssl_server_or_client socket_type;
 } PySSLSocket;
+typedef struct {
+    PyObject_HEAD
+    PySocketSockObject *Socket;         /* Socket on which we're layered */
+    PySSLContext*       ctx;
+    SSL*                ssl;
+    X509*               peer_cert;
+    char                server[X509_NAME_MAXLEN];
+    char                issuer[X509_NAME_MAXLEN];
+    int                 shutdown_seen_zero;
+
+} PySSLObject;
+
+static PyTypeObject PySSL_Type;
 static PyTypeObject PySSLContext_Type;
 static PyTypeObject PySSLSocket_Type;
@@ -455,6 +468,138 @@ _setSSLError (char *errstr, int errcode,
  * SSL objects
  */
+static PyObject *
+context_new_args(PyTypeObject *type, int proto_version);
+
+static PyObject *
+load_cert_chain_args(PySSLContext *self, char *certfile_bytes, char *keyfile_bytes, PyObject *password);
+
+static PyObject *
+set_ciphers_args(PySSLContext *self, const char *cipherlist);
+
+static PyObject *
+load_verify_locations_args(PySSLContext *self, PyObject *cafile, PyObject *capath, PyObject *cadata);
+
+static int
+set_verify_mode_args(PySSLContext *self, int n);
+
+static PySSLObject *
+newPySSLObject(PySocketSockObject *Sock, char *key_file, char *cert_file,
+               enum py_ssl_server_or_client socket_type,
+               enum py_ssl_cert_requirements certreq,
+               enum py_ssl_version proto_version,
+               PyObject *cacerts_file, const char *ciphers)
+{
+    PySSLObject *self;
+    char *errstr = NULL;
+    SSL_CTX *ctx = NULL;
+
+
+    self = PyObject_New(PySSLObject, &PySSL_Type); /* Create new object */
+    if (self == NULL)
+        return NULL;
+    memset(self->server, '\0', sizeof(char) * X509_NAME_MAXLEN);
+    memset(self->issuer, '\0', sizeof(char) * X509_NAME_MAXLEN);
+
+    self->peer_cert = NULL;
+    self->ssl = NULL;
+    self->ctx = NULL;
+    self->Socket = NULL;
+    self->shutdown_seen_zero = 0;
+
+    /* Make sure the SSL error state is initialized */
+    (void) ERR_get_state();
+    ERR_clear_error();
+
+    if ((socket_type == PY_SSL_SERVER) &&
+        ((key_file == NULL) || (cert_file == NULL))) {
+        errstr = ERRSTR("Both the key & certificate files "
+                        "must be specified for server-side operation");
+        goto fail;
+    }
+
+
+    if ((key_file && !cert_file) || (!key_file && cert_file)) {
+        errstr = ERRSTR("Both the key & certificate files "
+                        "must be specified");
+        goto fail;
+    }
+
+
+    PyObject *sslctx = context_new_args(&PySSLContext_Type, proto_version);
+    if ((sslctx != NULL) && (PyObject_TypeCheck(sslctx, &PySSLContext_Type))) {
+#if !HAVE_SNI
+        errstr = ERRSTR("setting a socket's "
+                        "context is not supported by your OpenSSL library");
+        goto fail;
+#else
+        self->ctx = (PySSLContext *) sslctx;
+        ctx = self->ctx->ctx;
+#endif
+    } else {
+        errstr = ERRSTR("The value must be a SSLContext");
+        goto fail;
+    }
+
+    if (key_file) {
+        load_cert_chain_args(self->ctx, cert_file, key_file, NULL);
+    }
+
+    if (ciphers != NULL) {
+        set_ciphers_args(self->ctx, ciphers);
+    }
+
+
+
+    if (certreq != PY_SSL_CERT_NONE) {
+        if (cacerts_file == Py_None) {
+            errstr = ERRSTR("No root certificates specified for "
+                            "verification of other-side certificates.");
+            goto fail;
+        } else {
+            load_verify_locations_args(self->ctx, cacerts_file, NULL, NULL);
+            }
+    }
+
+    /* ssl compatibility */
+    SSL_CTX_set_options(ctx,
+                        SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS);
+
+    set_verify_mode_args(self->ctx, certreq);
+
+    PySSL_BEGIN_ALLOW_THREADS
+    self->ssl = SSL_new(ctx); /* New ssl struct */
+    PySSL_END_ALLOW_THREADS
+    SSL_set_fd(self->ssl, Sock->sock_fd);       /* Set the socket for SSL */
+#ifdef SSL_MODE_AUTO_RETRY
+    SSL_set_mode(self->ssl, SSL_MODE_AUTO_RETRY);
+#endif
+
+    /* If the socket is in non-blocking mode or timeout mode, set the BIO
+     * to non-blocking mode (blocking is the default)
+     */
+    if (Sock->sock_timeout >= 0.0) {
+        /* Set both the read and write BIO's to non-blocking mode */
+        BIO_set_nbio(SSL_get_rbio(self->ssl), 1);
+        BIO_set_nbio(SSL_get_wbio(self->ssl), 1);
+    }
+   PySSL_BEGIN_ALLOW_THREADS
+   if (socket_type == PY_SSL_CLIENT)
+       SSL_set_connect_state(self->ssl);
+   else
+       SSL_set_accept_state(self->ssl);
+   PySSL_END_ALLOW_THREADS
+
+   self->Socket = Sock;
+   Py_INCREF(self->Socket);
+   return self;
+ fail:
+    if (errstr)
+        PyErr_SetString(PySSLErrorObject, errstr);
+    Py_DECREF(self);
+    return NULL;
+}
+
 static PySSLSocket *
 newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock,
                enum py_ssl_server_or_client socket_type,
@@ -526,6 +671,47 @@ newPySSLSocket(PySSLContext *sslctx, PyS
 }
+
+
+static PyObject *
+PySSL_sslwrap(PyObject *self, PyObject *args)
+{
+    PySocketSockObject *Sock;
+    int server_side = 0;
+    int verification_mode = PY_SSL_CERT_NONE;
+    int protocol = PY_SSL_VERSION_SSL23;
+    char *key_file = NULL;
+    char *cert_file = NULL;
+    PyObject *cacerts_file;
+    const char *ciphers = NULL;
+
+    if (!PyArg_ParseTuple(args, "O!i|zziizz:sslwrap",
+                          PySocketModule.Sock_Type,
+                          &Sock,
+                          &server_side,
+                          &key_file, &cert_file,
+                          &verification_mode, &protocol,
+                          &cacerts_file, &ciphers))
+        return NULL;
+
+    /*
+    fprintf(stderr,
+        "server_side is %d, keyfile %p, certfile %p, verify_mode %d, "
+        "protocol %d, certs %p\n",
+        server_side, key_file, cert_file, verification_mode,
+        protocol, cacerts_file);
+     */
+
+    return (PyObject *) newPySSLObject(Sock, key_file, cert_file,
+                                       server_side, verification_mode,
+                                       protocol, cacerts_file,
+                                       ciphers);
+}
+
+PyDoc_STRVAR(ssl_doc,
+"sslwrap(socket, server_side, [keyfile, certfile, certs_mode, protocol,\n"
+"                              cacertsfile, ciphers]) -> sslobject");
+
 /* SSL object methods */
 static PyObject *PySSL_SSLdo_handshake(PySSLSocket *self)
@@ -1922,6 +2108,11 @@ static PyMethodDef PySSLMethods[] = {
     {NULL, NULL}
 };
+static PyObject *PySSL_getattr(PySSLObject *self, char *name)
+{
+    return Py_FindMethod(PySSLMethods, (PyObject *)self, name);
+}
+
 static PyTypeObject PySSLSocket_Type = {
     PyVarObject_HEAD_INIT(NULL, 0)
     "_ssl._SSLSocket",                  /*tp_name*/
@@ -1956,25 +2147,49 @@ static PyTypeObject PySSLSocket_Type = {
     ssl_getsetlist,                     /*tp_getset*/
 };
+static PyTypeObject PySSL_Type = {
+    PyVarObject_HEAD_INIT(NULL, 0)
+    "ssl.SSLContext",                   /*tp_name*/
+    sizeof(PySSLObject),                /*tp_basicsize*/
+    0,                                  /*tp_itemsize*/
+    /* methods */
+    (destructor)PySSL_dealloc,          /*tp_dealloc*/
+    0,                                  /*tp_print*/
+    (getattrfunc)PySSL_getattr,         /*tp_getattr*/
+    0,                                  /*tp_setattr*/
+    0,                                  /*tp_compare*/
+    0,                                  /*tp_repr*/
+    0,                                  /*tp_as_number*/
+    0,                                  /*tp_as_sequence*/
+    0,                                  /*tp_as_mapping*/
+    0,                                  /*tp_hash*/
+};
+
 /*
  * _SSLContext objects
  */
+
 static PyObject *
 context_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 {
     char *kwlist[] = {"protocol", NULL};
-    PySSLContext *self;
     int proto_version = PY_SSL_VERSION_SSL23;
-    long options;
-    SSL_CTX *ctx = NULL;
     if (!PyArg_ParseTupleAndKeywords(
         args, kwds, "i:_SSLContext", kwlist,
         &proto_version))
         return NULL;
+    return  context_new_args(type, proto_version);
+}
+static PyObject *
+context_new_args(PyTypeObject *type, int proto_version)
+{
+    SSL_CTX *ctx = NULL;
+    PySSLContext *self;
+    long options;
     PySSL_BEGIN_ALLOW_THREADS
     if (proto_version == PY_SSL_VERSION_TLS1)
         ctx = SSL_CTX_new(TLSv1_method());
@@ -2082,11 +2297,16 @@ context_dealloc(PySSLContext *self)
 static PyObject *
 set_ciphers(PySSLContext *self, PyObject *args)
 {
-    int ret;
     const char *cipherlist;
     if (!PyArg_ParseTuple(args, "s:set_ciphers", &cipherlist))
         return NULL;
+    return set_ciphers_args(self, cipherlist);
+    }
+
+static PyObject *
+set_ciphers_args(PySSLContext *self, const char *cipherlist){
+    int ret;
     ret = SSL_CTX_set_cipher_list(self->ctx, cipherlist);
     if (ret == 0) {
         /* Clearing the error queue is necessary on some OpenSSL versions,
@@ -2204,9 +2424,17 @@ get_verify_mode(PySSLContext *self, void
 static int
 set_verify_mode(PySSLContext *self, PyObject *arg, void *c)
 {
-    int n, mode;
+    int n;
     if (!PyArg_Parse(arg, "i", &n))
         return -1;
+    return set_verify_mode_args(self, n);
+    }
+
+static int
+set_verify_mode_args(PySSLContext *self, int n)
+{
+    int mode;
+
     if (n == PY_SSL_CERT_NONE)
         mode = SSL_VERIFY_NONE;
     else if (n == PY_SSL_CERT_OPTIONAL)
@@ -2434,10 +2662,6 @@ load_cert_chain(PySSLContext *self, PyOb
     char *kwlist[] = {"certfile", "keyfile", "password", NULL};
     PyObject *password = NULL;
     char *certfile_bytes = NULL, *keyfile_bytes = NULL;
-    pem_password_cb *orig_passwd_cb = self->ctx->default_passwd_callback;
-    void *orig_passwd_userdata = self->ctx->default_passwd_callback_userdata;
-    _PySSLPasswordInfo pw_info = { NULL, NULL, NULL, 0, 0 };
-    int r;
     errno = 0;
     ERR_clear_error();
@@ -2447,6 +2671,17 @@ load_cert_chain(PySSLContext *self, PyOb
             Py_FileSystemDefaultEncoding, &keyfile_bytes,
             &password))
         return NULL;
+    return load_cert_chain_args(self, certfile_bytes, keyfile_bytes, password);
+    }
+
+static PyObject *
+load_cert_chain_args(PySSLContext *self, char *certfile_bytes, char *keyfile_bytes ,PyObject *password){
+
+    pem_password_cb *orig_passwd_cb = self->ctx->default_passwd_callback;
+    void *orig_passwd_userdata = self->ctx->default_passwd_callback_userdata;
+    _PySSLPasswordInfo pw_info = { NULL, NULL, NULL, 0, 0 };
+    int r;
+
     if (password && password != Py_None) {
         if (PyCallable_Check(password)) {
             pw_info.callable = password;
@@ -2598,22 +2833,27 @@ _add_ca_certs(PySSLContext *self, void *
     return retval;
 }
-
 static PyObject *
 load_verify_locations(PySSLContext *self, PyObject *args, PyObject *kwds)
 {
     char *kwlist[] = {"cafile", "capath", "cadata", NULL};
     PyObject *cadata = NULL, *cafile = NULL, *capath = NULL;
-    PyObject *cafile_bytes = NULL, *capath_bytes = NULL;
-    const char *cafile_buf = NULL, *capath_buf = NULL;
-    int r = 0, ok = 1;
-
-    errno = 0;
     if (!PyArg_ParseTupleAndKeywords(args, kwds,
             "|OOO:load_verify_locations", kwlist,
             &cafile, &capath, &cadata))
         return NULL;
+    return load_verify_locations_args(self, cafile, capath, cadata);
+    }
+
+static PyObject *
+load_verify_locations_args(PySSLContext *self, PyObject *cafile, PyObject *capath, PyObject *cadata)
+{
+    PyObject *cafile_bytes = NULL, *capath_bytes = NULL;
+    const char *cafile_buf = NULL, *capath_buf = NULL;
+    int r = 0, ok = 1;
+
+    errno = 0;
     if (cafile == Py_None)
         cafile = NULL;
     if (capath == Py_None)
@@ -3688,6 +3928,8 @@ PySSL_enum_crls(PyObject *self, PyObject
 /* List of functions exported by this module. */
 static PyMethodDef PySSL_methods[] = {
+    {"sslwrap",             PySSL_sslwrap,
+     METH_VARARGS, ssl_doc},
     {"_test_decode_cert",       PySSL_test_decode_certificate,
      METH_VARARGS},
 #ifdef HAVE_OPENSSL_RAND
SOURCES/00222-add-2014-bit-dh-key.patch
New file
@@ -0,0 +1,49 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1427947446 14400
# Node ID 4f2391e866434a94ca6d87dff5ea01fcab91d08a
# Parent  5d88f653187203d85f4cfd4877f093af3919035b
replace 512 bit dh key with a 2014 bit one (closes #23844)
Patch by Cédric Krier.
diff --git a/Lib/test/dh1024.pem b/Lib/test/dh1024.pem
new file mode 100644
--- /dev/null
+++ b/Lib/test/dh1024.pem
@@ -0,0 +1,7 @@
+-----BEGIN DH PARAMETERS-----
+MIGHAoGBAIbzw1s9CT8SV5yv6L7esdAdZYZjPi3qWFs61CYTFFQnf2s/d09NYaJt
+rrvJhIzWavqnue71qXCf83/J3nz3FEwUU/L0mGyheVbsSHiI64wUo3u50wK5Igo0
+RNs/LD0irs7m0icZ//hijafTU+JOBiuA8zMI+oZfU7BGuc9XrUprAgEC
+-----END DH PARAMETERS-----
+
+Generated with: openssl dhparam -out dh1024.pem  1024
diff --git a/Lib/test/dh512.pem b/Lib/test/dh512.pem
deleted file mode 100644
--- a/Lib/test/dh512.pem
+++ /dev/null
@@ -1,9 +0,0 @@
------BEGIN DH PARAMETERS-----
-MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak
-XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC
------END DH PARAMETERS-----
-
-These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols"
-(http://www.skip-vpn.org/spec/numbers.html).
-See there for how they were generated.
-Note that g is not a generator, but this is not a problem since p is a safe prime.
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -66,7 +66,7 @@ BADKEY = data_file("badkey.pem")
 NOKIACERT = data_file("nokia.pem")
 NULLBYTECERT = data_file("nullbytecert.pem")
-DHFILE = data_file("dh512.pem")
+DHFILE = data_file("dh1024.pem")
 BYTES_DHFILE = DHFILE.encode(sys.getfilesystemencoding())
SOURCES/00223-pep476-verify-certs-by-default.patch
New file
@@ -0,0 +1,153 @@
# HG changeset patch
# User Benjamin Peterson <benjamin@python.org>
# Date 1416798122 21600
# Node ID fb83916c3ea12899569e88a7505469a90ab1f674
# Parent  c84f36a5f556867c2ec50453dc879a500032d377
pep 476: verify certificates by default (#22417)
diff --git a/Doc/library/httplib.rst b/Doc/library/httplib.rst
--- a/Doc/library/httplib.rst
+++ b/Doc/library/httplib.rst
@@ -90,9 +90,6 @@ The module provides the following classe
       server's certificate.  If you want to change that behaviour, you can
       explicitly set *check_hostname* to False.
-   .. warning::
-      This does not do any verification of the server's certificate.
-
    .. versionadded:: 2.0
    .. versionchanged:: 2.6
@@ -104,6 +101,11 @@ The module provides the following classe
    .. versionchanged:: 2.7.9
       *context* and *check_hostname* was added.
+      This class now performs all the necessary certificate and hostname checks
+      by default. To revert to the previous, unverified, behavior
+      :func:`ssl._create_unverified_context` can be passed to the *context*
+      parameter.
+
 .. class:: HTTPResponse(sock, debuglevel=0, strict=0)
diff --git a/Lib/httplib.py b/Lib/httplib.py
--- a/Lib/httplib.py
+++ b/Lib/httplib.py
@@ -1193,7 +1193,7 @@ else:
             self.key_file = key_file
             self.cert_file = cert_file
             if context is None:
-                context = ssl.create_default_context()
+                context = ssl._create_default_https_context()
             will_verify = context.verify_mode != ssl.CERT_NONE
             if check_hostname is None:
                 check_hostname = will_verify
diff --git a/Lib/ssl.py b/Lib/ssl.py
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -427,8 +427,7 @@ def create_default_context(purpose=Purpo
         context.load_default_certs(purpose)
     return context
-
-def _create_stdlib_context(protocol=PROTOCOL_SSLv23, cert_reqs=None,
+def _create_unverified_context(protocol=PROTOCOL_SSLv23, cert_reqs=None,
                            check_hostname=False, purpose=Purpose.SERVER_AUTH,
                            certfile=None, keyfile=None,
                            cafile=None, capath=None, cadata=None):
@@ -469,6 +468,14 @@ def _create_stdlib_context(protocol=PROT
     return context
+# Used by http.client if no context is explicitly passed.
+_create_default_https_context = create_default_context
+
+
+# Backwards compatibility alias, even though it's not a public name.
+_create_stdlib_context = _create_unverified_context
+
+
 class SSLSocket(socket):
     """This class implements a subtype of socket.socket that wraps
     the underlying OS socket in an SSL context when necessary, and
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -1,10 +1,9 @@
 import httplib
 import array
-import httplib
-import os
 import StringIO
 import socket
 import errno
+import os
 import unittest
 TestCase = unittest.TestCase
diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py
--- a/Lib/test/test_urllib2_localnet.py
+++ b/Lib/test/test_urllib2_localnet.py
@@ -5,6 +5,7 @@ import urllib2
 import BaseHTTPServer
 import unittest
 import hashlib
+import ssl
 from test import test_support
@@ -562,15 +563,37 @@ class TestUrlopen(BaseTestCase):
                             cafile=CERT_localhost)
         self.assertEqual(data, b"we care a bit")
         # Bad cert
-        with self.assertRaises(urllib2.URLError) as cm:
+        with self.assertRaises(urllib2.URLError):
             self.urlopen("https://localhost:%s/bizarre" % handler.port,
                          cafile=CERT_fakehostname)
         # Good cert, but mismatching hostname
         handler = self.start_https_server(certfile=CERT_fakehostname)
-        with self.assertRaises(ssl.CertificateError) as cm:
+        with self.assertRaises(ssl.CertificateError):
             self.urlopen("https://localhost:%s/bizarre" % handler.port,
                          cafile=CERT_fakehostname)
+    def test_https_with_cadefault(self):
+        handler = self.start_https_server(certfile=CERT_localhost)
+        # Self-signed cert should fail verification with system certificate store
+        with self.assertRaises(urllib2.URLError):
+            self.urlopen("https://localhost:%s/bizarre" % handler.port,
+                         cadefault=True)
+
+    def test_https_sni(self):
+        if ssl is None:
+            self.skipTest("ssl module required")
+        if not ssl.HAS_SNI:
+            self.skipTest("SNI support required in OpenSSL")
+        sni_name = [None]
+        def cb_sni(ssl_sock, server_name, initial_context):
+            sni_name[0] = server_name
+        context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
+        context.set_servername_callback(cb_sni)
+        handler = self.start_https_server(context=context, certfile=CERT_localhost)
+        context = ssl.create_default_context(cafile=CERT_localhost)
+        self.urlopen("https://localhost:%s" % handler.port, context=context)
+        self.assertEqual(sni_name[0], "localhost")
+
     def test_sending_headers(self):
         handler = self.start_server([(200, [], "we don't care")])
diff -up Python-2.7.5/Doc/library/xmlrpclib.rst.ver Python-2.7.5/Doc/library/xmlrpclib.rst
--- Python-2.7.5/Doc/library/xmlrpclib.rst.ver    2015-03-30 13:59:29.243493601 +0200
+++ Python-2.7.5/Doc/library/xmlrpclib.rst    2015-03-30 14:03:40.509532180 +0200
@@ -34,6 +34,10 @@ between conformable Python objects and X
    constructed data.  If you need to parse untrusted or unauthenticated data see
    :ref:`xml-vulnerabilities`.
+.. versionchanged:: 2.7.9
+
+   For https URIs, :mod:`xmlrpclib` now performs all the necessary certificate
+   and hostname checks by default
 .. class:: ServerProxy(uri[, transport[, encoding[, verbose[,  allow_none[, use_datetime]]]]])
SOURCES/00224-pep476-add-toggle-for-cert-verify.patch
New file
@@ -0,0 +1,103 @@
diff -up Python-2.7.5/Lib/ssl.py.cert Python-2.7.5/Lib/ssl.py
--- Python-2.7.5/Lib/ssl.py.cert    2015-03-30 14:52:12.172241615 +0200</